diff --git a/app/fixtures/github-packages/effect-4.0.0/bower.json b/app/fixtures/github-packages/effect-4.0.0/bower.json
index 3b520e6ae..bed5c5ab8 100644
--- a/app/fixtures/github-packages/effect-4.0.0/bower.json
+++ b/app/fixtures/github-packages/effect-4.0.0/bower.json
@@ -16,6 +16,7 @@
"package.json"
],
"dependencies": {
- "purescript-prelude": "^6.0.0"
+ "purescript-prelude": "^6.0.0",
+ "purescript-type-equality": "^4.0.0"
}
}
diff --git a/app/fixtures/registry-index/ty/pe/type-equality b/app/fixtures/registry-index/ty/pe/type-equality
new file mode 100644
index 000000000..8fbce8f14
--- /dev/null
+++ b/app/fixtures/registry-index/ty/pe/type-equality
@@ -0,0 +1 @@
+{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"dependencies":{}}
\ No newline at end of file
diff --git a/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz b/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz
new file mode 100644
index 000000000..ba7126b60
Binary files /dev/null and b/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz differ
diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json
new file mode 100644
index 000000000..68f250604
--- /dev/null
+++ b/app/fixtures/registry/metadata/type-equality.json
@@ -0,0 +1,15 @@
+{
+ "location": {
+ "githubOwner": "purescript",
+ "githubRepo": "purescript-type-equality"
+ },
+ "published": {
+ "4.0.1": {
+ "bytes": 2184,
+ "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=",
+ "publishedTime": "2022-04-27T18:00:18.000Z",
+ "ref": "v4.0.1"
+ }
+ },
+ "unpublished": {}
+}
diff --git a/app/src/App/API.purs b/app/src/App/API.purs
index ecb4907a8..22a567719 100644
--- a/app/src/App/API.purs
+++ b/app/src/App/API.purs
@@ -1,16 +1,15 @@
module Registry.App.API
- ( PackageSetUpdateEffects
- , packageSetUpdate
+ ( AuthenticatedEffects
+ , PackageSetUpdateEffects
, PublishEffects
- , publish
- , AuthenticatedEffects
, authenticated
- , packagingTeam
- -- The below are exported for tests, but aren't otherwise intended for use
- -- outside this module.
+ , copyPackageSourceFiles
, formatPursuitResolutions
+ , packageSetUpdate
+ , packagingTeam
+ , parseInstalledModulePath
+ , publish
, removeIgnoredTarballFiles
- , copyPackageSourceFiles
) where
import Registry.App.Prelude
@@ -27,8 +26,10 @@ import Data.FoldableWithIndex (foldMapWithIndex)
import Data.Map as Map
import Data.Newtype (over, unwrap)
import Data.Number.Format as Number.Format
+import Data.Set as Set
import Data.Set.NonEmpty as NonEmptySet
import Data.String as String
+import Data.String.CodeUnits as String.CodeUnits
import Data.String.NonEmpty (fromString) as NonEmptyString
import Data.String.NonEmpty.Internal (toString) as NonEmptyString
import Data.String.Regex as Regex
@@ -39,6 +40,10 @@ import Node.FS.Stats as FS.Stats
import Node.FS.Sync as FS.Sync
import Node.Library.Execa as Execa
import Node.Path as Path
+import Parsing as Parsing
+import Parsing.Combinators as Parsing.Combinators
+import Parsing.Combinators.Array as Parsing.Combinators.Array
+import Parsing.String as Parsing.String
import Registry.App.Auth as Auth
import Registry.App.CLI.Purs (CompilerFailure(..))
import Registry.App.CLI.Purs as Purs
@@ -82,6 +87,8 @@ import Registry.Operation.Validation as Operation.Validation
import Registry.Owner as Owner
import Registry.PackageName as PackageName
import Registry.PackageSet as PackageSet
+import Registry.PursGraph (ModuleName(..))
+import Registry.PursGraph as PursGraph
import Registry.Range as Range
import Registry.Sha256 as Sha256
import Registry.Solver as Solver
@@ -90,7 +97,7 @@ import Run (AFF, EFFECT, Run)
import Run as Run
import Run.Except (EXCEPT)
import Run.Except as Except
-import Spago.Core.Config as Spago
+import Spago.Core.Config as Spago.Config
import Spago.Core.Prelude as Spago.Prelude
import Spago.Log as Spago.Log
@@ -388,70 +395,68 @@ publish source payload = do
-- - if it's a legacy import then we can try to infer as much info as possible to make a manifest
let packagePursJson = Path.concat [ packageDirectory, "purs.json" ]
hadPursJson <- Run.liftEffect $ FS.Sync.exists packagePursJson
- unless hadPursJson do
- let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ]
- hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml
- case hasSpagoYaml of
- true -> do
- Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..."
- -- Need to make a Spago log env first, disable the logging
- let spagoEnv = { logOptions: { color: false, verbosity: Spago.Log.LogQuiet } }
- maybeConfig <- Spago.Prelude.runSpago spagoEnv (Spago.readConfig packageSpagoYaml)
- case maybeConfig of
- Left err' -> Except.throw $ String.joinWith "\n"
- [ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:"
- , err'
- ]
- Right { yaml: config } -> do
- -- Once we have the config we are still not entirely sure it fits into a Manifest
- -- E.g. need to make sure all the ranges are present
- case spagoToManifest config of
- Left err -> Except.throw $ String.joinWith "\n"
- [ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:"
- , err
- ]
- Right manifest -> do
- Log.debug "Successfully converted a spago.yaml into a purs.json manifest"
- Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest
- false -> do
- Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..."
- address <- case existingMetadata.location of
- Git _ -> Except.throw "Legacy packages can only come from GitHub."
- GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir."
- GitHub { owner, repo } -> pure { owner, repo }
-
- version <- case LenientVersion.parse payload.ref of
- Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used."
- Right result -> pure $ LenientVersion.version result
-
- Legacy.Manifest.fetchLegacyManifest payload.name address (RawVersion payload.ref) >>= case _ of
- Left manifestError -> do
- let formatError { error, reason } = reason <> " " <> Legacy.Manifest.printLegacyManifestError error
- Except.throw $ String.joinWith "\n"
- [ "Could not publish your package because there were issues converting its spago.dhall and/or bower.json files into a purs.json manifest:"
- , formatError manifestError
- ]
- Right legacyManifest -> do
- Log.debug $ "Successfully produced a legacy manifest from the package source. Writing it to the package source..."
- let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest
- Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest
- Log.debug "A valid purs.json is available in the package source."
+ let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ]
+ hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml
- Manifest manifest <- Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of
- Left error -> do
- Log.error $ "Could not read purs.json from path " <> packagePursJson <> ": " <> Aff.message error
- Except.throw $ "Could not find a purs.json file in the package source."
- Right string -> Env.askResourceEnv >>= \{ dhallTypes } -> Run.liftAff (jsonToDhallManifest dhallTypes string) >>= case _ of
- Left error -> do
- Log.error $ "Manifest does not typecheck: " <> error
- Except.throw $ "Found a valid purs.json file in the package source, but it does not typecheck."
- Right _ -> case parseJson Manifest.codec string of
- Left err -> do
- Log.error $ "Failed to parse manifest: " <> CA.printJsonDecodeError err
- Except.throw $ "Found a purs.json file in the package source, but it could not be decoded."
- Right manifest -> do
- Log.debug $ "Read a valid purs.json manifest from the package source:\n" <> stringifyJson Manifest.codec manifest
+ Manifest manifest <-
+ if hadPursJson then
+ Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of
+ Left error -> do
+ Except.throw $ "Could not read purs.json from path " <> packagePursJson <> ": " <> Aff.message error
+ Right string -> Env.askResourceEnv >>= \{ dhallTypes } -> Run.liftAff (jsonToDhallManifest dhallTypes string) >>= case _ of
+ Left error -> do
+ Log.error $ "Manifest does not typecheck: " <> error
+ Except.throw $ "Found a valid purs.json file in the package source, but it does not typecheck."
+ Right _ -> case parseJson Manifest.codec string of
+ Left err -> do
+ Log.error $ "Failed to parse manifest: " <> CA.printJsonDecodeError err
+ Except.throw $ "Found a purs.json file in the package source, but it could not be decoded."
+ Right manifest -> do
+ Log.debug $ "Read a valid purs.json manifest from the package source:\n" <> stringifyJson Manifest.codec manifest
+ pure manifest
+
+ else if hasSpagoYaml then do
+ Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..."
+ -- Need to make a Spago log env first, disable the logging
+ let spagoEnv = { logOptions: { color: false, verbosity: Spago.Log.LogQuiet } }
+ Spago.Prelude.runSpago spagoEnv (Spago.Config.readConfig packageSpagoYaml) >>= case _ of
+ Left readErr -> Except.throw $ String.joinWith "\n"
+ [ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:"
+ , readErr
+ ]
+ Right { yaml: config } -> do
+ -- Once we have the config we are still not entirely sure it fits into a Manifest
+ -- E.g. need to make sure all the ranges are present
+ case spagoToManifest config of
+ Left err -> Except.throw $ String.joinWith "\n"
+ [ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:"
+ , err
+ ]
+ Right manifest -> do
+ Log.debug "Successfully converted a spago.yaml into a purs.json manifest"
+ pure manifest
+ else do
+ Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..."
+ address <- case existingMetadata.location of
+ Git _ -> Except.throw "Legacy packages can only come from GitHub."
+ GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir."
+ GitHub { owner, repo } -> pure { owner, repo }
+
+ version <- case LenientVersion.parse payload.ref of
+ Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used."
+ Right result -> pure $ LenientVersion.version result
+
+ Legacy.Manifest.fetchLegacyManifest payload.name address (RawVersion payload.ref) >>= case _ of
+ Left manifestError -> do
+ let formatError { error, reason } = reason <> " " <> Legacy.Manifest.printLegacyManifestError error
+ Except.throw $ String.joinWith "\n"
+ [ "Could not publish your package because there were issues converting its spago.dhall and/or bower.json files into a purs.json manifest:"
+ , formatError manifestError
+ ]
+ Right legacyManifest -> do
+ Log.debug $ "Successfully produced a legacy manifest from the package source."
+ let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest
pure manifest
Comment.comment "Verifying package..."
@@ -527,15 +532,139 @@ publish source payload = do
, url
]
- publishRegistry
- { source
- , manifest: Manifest manifest
- , metadata: Metadata metadata
- , payload
- , publishedTime
- , tmp
- , packageDirectory
- }
+ -- Now that we've verified the package we can write the manifest to the source
+ -- directory and then publish it.
+ if hadPursJson then do
+ -- No need to verify the generated manifest because nothing was generated,
+ -- and no need to write a file (it's already in the package source.)
+ publishRegistry
+ { source
+ , manifest: Manifest manifest
+ , metadata: Metadata metadata
+ , payload
+ , publishedTime
+ , tmp
+ , packageDirectory
+ }
+
+ else if hasSpagoYaml then do
+ -- We need to write the generated purs.json file, but because spago-next
+ -- already does unused dependency checks and supports explicit test-only
+ -- dependencies we can skip those checks.
+ Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest)
+ publishRegistry
+ { source
+ , manifest: Manifest manifest
+ , metadata: Metadata metadata
+ , payload
+ , publishedTime
+ , tmp
+ , packageDirectory
+ }
+
+ -- Otherwise this is a legacy package, generated from a combination of bower,
+ -- spago.dhall, and package set files, so we need to verify the generated
+ -- manifest does not contain unused dependencies before writing it.
+ else do
+ Log.debug "Pruning unused dependencies from legacy package manifest..."
+
+ Log.debug "Solving manifest to get all transitive dependencies."
+ resolutions <- verifyResolutions (Manifest manifest) payload.resolutions
+
+ Log.debug "Installing dependencies."
+ tmpDepsDir <- Tmp.mkTmpDir
+ installBuildPlan resolutions tmpDepsDir
+
+ Log.debug "Discovering used dependencies from source."
+ let srcGlobs = Path.concat [ packageDirectory, "src", "**", "*.purs" ]
+ let depGlobs = Path.concat [ tmpDepsDir, "*", "src", "**", "*.purs" ]
+ let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] }
+ -- We need to use the minimum compiler version that supports 'purs graph'
+ let minGraphCompiler = unsafeFromRight (Version.parse "0.13.8")
+ let callCompilerVersion = if payload.compiler >= minGraphCompiler then payload.compiler else minGraphCompiler
+ Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of
+ Left err -> do
+ let prefix = "Failed to discover unused dependencies because purs graph failed: "
+ Log.error $ prefix <> case err of
+ UnknownError str -> str
+ CompilationError errs -> Purs.printCompilerErrors errs
+ MissingCompiler -> "missing compiler " <> Version.print payload.compiler
+ -- We allow legacy packages through even if we couldn't run purs graph,
+ -- because we can't be sure we chose the correct compiler version.
+ if source == LegacyPackage then
+ Comment.comment "Failed to prune dependencies for legacy package, continuing anyway..."
+ else do
+ Except.throw "purs graph failed; cannot verify unused dependencies."
+ Right output -> case Argonaut.Parser.jsonParser output of
+ Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr
+ Right json -> case CA.decode PursGraph.pursGraphCodec json of
+ Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CA.printJsonDecodeError decodeErr
+ Right graph -> do
+ Log.debug "Got a valid graph of source and dependencies. Removing install dir and associating discovered modules with their packages..."
+ FS.Extra.remove tmpDepsDir
+
+ let
+ -- We need access to a graph that _doesn't_ include the package
+ -- source, because we only care about dependencies of the package.
+ noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph
+
+ pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ }
+
+ case PursGraph.associateModules pathParser noSrcGraph of
+ Left errs ->
+ Except.throw $ String.joinWith "\n"
+ [ "Failed to associate modules with packages while finding unused dependencies:"
+ , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } ->
+ " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n"
+ ]
+ Right modulePackageMap -> do
+ Log.debug "Associated modules with their package names. Finding all modules used in package source..."
+ -- The modules used in the package source code are any that have
+ -- a path beginning with the package source directory. We only
+ -- care about dependents of these modules.
+ let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph
+
+ Log.debug "Found all modules used in package source. Finding all modules used by those modules..."
+ -- We find all dependencies of each module used in the source,
+ -- which results in a set containing every module name reachable
+ -- by the source code.
+ let allReachableModules = Set.fromFoldable $ Array.fold $ Array.mapMaybe (flip PursGraph.allDependencies graph) $ Set.toUnfoldable sourceModules
+
+ -- Then we can associate each reachable module with its package
+ -- name to get the full set of used package names.
+ let allUsedPackages = Set.mapMaybe (flip Map.lookup modulePackageMap) allReachableModules
+
+ -- Finally, we can use this to find the unused dependencies.
+ Log.debug "Found all packages reachable by the project source code. Determining unused dependencies..."
+ case Operation.Validation.getUnusedDependencies (Manifest manifest) resolutions allUsedPackages of
+ Nothing -> do
+ Log.debug "No unused dependencies! This manifest is good to go."
+ Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest)
+ publishRegistry
+ { source
+ , manifest: Manifest manifest
+ , metadata: Metadata metadata
+ , payload
+ , publishedTime
+ , tmp
+ , packageDirectory
+ }
+ Just isUnused -> do
+ let printed = String.joinWith ", " (PackageName.print <$> NonEmptySet.toUnfoldable isUnused)
+ Log.debug $ "Found unused dependencies: " <> printed
+ Comment.comment $ "Generated legacy manifest contains unused dependencies which will be removed: " <> printed
+ let verified = manifest { dependencies = Map.filterKeys (not <<< flip NonEmptySet.member isUnused) manifest.dependencies }
+ Log.debug "Writing updated, pruned manifest."
+ Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified)
+ publishRegistry
+ { source
+ , manifest: Manifest verified
+ , metadata: Metadata metadata
+ , payload
+ , publishedTime
+ , tmp
+ , packageDirectory
+ }
type PublishRegistry =
{ source :: PackageSource
@@ -735,6 +864,7 @@ compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do
tmp <- Tmp.mkTmpDir
let dependenciesDir = Path.concat [ tmp, ".registry" ]
FS.Extra.ensureDirectory dependenciesDir
+
let
globs =
if Map.isEmpty resolutions then
@@ -744,23 +874,8 @@ compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do
, Path.concat [ dependenciesDir, "*/src/**/*.purs" ]
]
- -- We fetch every dependency at its resolved version, unpack the tarball, and
- -- store the resulting source code in a specified directory for dependencies.
- forWithIndex_ resolutions \name version -> do
- let
- -- This filename uses the format the directory name will have once
- -- unpacked, ie. package-name-major.minor.patch
- filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz"
- filepath = Path.concat [ dependenciesDir, filename ]
- Storage.download name version filepath
- Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of
- Left error -> do
- Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error
- Except.throw "Failed to unpack dependency tarball, cannot continue."
- Right _ ->
- Log.debug $ "Unpacked " <> filename
- Run.liftAff $ FS.Aff.unlink filepath
- Log.debug $ "Installed " <> formatPackageVersion name version
+ Log.debug "Installing build plan..."
+ installBuildPlan resolutions dependenciesDir
Log.debug "Compiling..."
compilerOutput <- Run.liftAff $ Purs.callCompiler
@@ -789,6 +904,49 @@ compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do
]
Right _ -> pure dependenciesDir
+-- | Install all dependencies indicated by the build plan to the specified
+-- | directory. Packages will be installed at 'dir/package-name-x.y.z'.
+installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit
+installBuildPlan resolutions dependenciesDir = do
+ -- We fetch every dependency at its resolved version, unpack the tarball, and
+ -- store the resulting source code in a specified directory for dependencies.
+ forWithIndex_ resolutions \name version -> do
+ let
+ -- This filename uses the format the directory name will have once
+ -- unpacked, ie. package-name-major.minor.patch
+ filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz"
+ filepath = Path.concat [ dependenciesDir, filename ]
+ Storage.download name version filepath
+ Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of
+ Left error -> do
+ Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error
+ Except.throw "Failed to unpack dependency tarball, cannot continue."
+ Right _ ->
+ Log.debug $ "Unpacked " <> filename
+ Run.liftAff $ FS.Aff.unlink filepath
+ Log.debug $ "Installed " <> formatPackageVersion name version
+
+-- | Parse the name and version from a path to a module installed in the standard
+-- | form: '
/-/...'
+parseInstalledModulePath :: { prefix :: FilePath, path :: FilePath } -> Either String { name :: PackageName, version :: Version }
+parseInstalledModulePath { prefix, path } = do
+ packageVersion <- lmap Parsing.parseErrorMessage $ Parsing.runParser path do
+ _ <- Parsing.String.string prefix
+ _ <- Parsing.Combinators.optional (Parsing.Combinators.try (Parsing.String.string Path.sep))
+ Tuple packageVersionChars _ <- Parsing.Combinators.Array.manyTill_ Parsing.String.anyChar (Parsing.String.string Path.sep)
+ pure $ String.CodeUnits.fromCharArray (Array.fromFoldable packageVersionChars)
+
+ -- Then we can drop everything after the last hyphen (the
+ -- version number) and join the rest back together.
+ case NonEmptyArray.fromArray $ String.split (String.Pattern "-") packageVersion of
+ Nothing -> Left $ "Could not parse package name and version from install path: " <> path
+ Just separated -> do
+ let packageString = String.joinWith "-" (NonEmptyArray.dropEnd 1 separated)
+ name <- PackageName.parse packageString
+ let versionString = NonEmptyArray.last separated
+ version <- Version.parse versionString
+ pure { name, version }
+
type PublishToPursuit =
{ packageSourceDir :: FilePath
, dependenciesDir :: FilePath
@@ -999,9 +1157,9 @@ getPacchettiBotti = do
packagingTeam :: Team
packagingTeam = { org: "purescript", team: "packaging" }
-spagoToManifest :: Spago.Config -> Either String Manifest
+spagoToManifest :: Spago.Config.Config -> Either String Manifest
spagoToManifest config = do
- package@{ name, description, dependencies: Spago.Dependencies deps } <- note "Did not find a package in the config" config.package
+ package@{ name, description, dependencies: Spago.Config.Dependencies deps } <- note "Did not find a package in the config" config.package
publishConfig@{ version, license } <- note "Did not find a `publish` section in the package config" package.publish
let includeFiles = NonEmptyArray.fromArray =<< (Array.mapMaybe NonEmptyString.fromString <$> publishConfig.include)
let excludeFiles = NonEmptyArray.fromArray =<< (Array.mapMaybe NonEmptyString.fromString <$> publishConfig.exclude)
diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs
index 130c3016d..38fa0f19c 100644
--- a/app/src/App/CLI/Purs.purs
+++ b/app/src/App/CLI/Purs.purs
@@ -96,12 +96,14 @@ data PursCommand
= Version
| Compile { globs :: Array FilePath }
| Publish { resolutions :: FilePath }
+ | Graph { globs :: Array FilePath }
printCommand :: PursCommand -> Array String
printCommand = case _ of
Version -> [ "--version" ]
Compile { globs } -> [ "compile" ] <> globs <> [ "--json-errors" ]
Publish { resolutions } -> [ "publish", "--manifest", "purs.json", "--resolutions", resolutions ]
+ Graph { globs } -> [ "graph" ] <> globs <> [ "--json-errors" ]
-- | Call a specific version of the PureScript compiler
callCompiler :: CompilerArgs -> Aff (Either CompilerFailure String)
@@ -118,7 +120,8 @@ callCompiler compilerArgs = do
$ Version.print version
errorsCodec = CA.Record.object "CompilerErrors"
- { errors: CA.array compilerErrorCodec }
+ { errors: CA.array compilerErrorCodec
+ }
result <- _.result =<< Execa.execa purs (printCommand compilerArgs.command) (_ { cwd = compilerArgs.cwd })
pure case result of
diff --git a/app/test/App/API.purs b/app/test/App/API.purs
index 7c2cd2baf..827ed7aa4 100644
--- a/app/test/App/API.purs
+++ b/app/test/App/API.purs
@@ -6,6 +6,7 @@ import Data.Array.NonEmpty as NonEmptyArray
import Data.Foldable (traverse_)
import Data.Map as Map
import Data.Set as Set
+import Data.String as String
import Data.String.NonEmpty as NonEmptyString
import Effect.Aff as Aff
import Effect.Ref as Ref
@@ -13,6 +14,7 @@ import Node.FS.Aff as FS.Aff
import Node.Path as Path
import Node.Process as Process
import Registry.App.API as API
+import Registry.App.CLI.Tar as Tar
import Registry.App.Effect.Env as Env
import Registry.App.Effect.Log as Log
import Registry.App.Effect.Pursuit as Pursuit
@@ -23,7 +25,10 @@ import Registry.Constants as Constants
import Registry.Foreign.FSExtra as FS.Extra
import Registry.Foreign.FastGlob as FastGlob
import Registry.Foreign.Tmp as Tmp
+import Registry.Internal.Codec as Internal.Codec
+import Registry.Manifest as Manifest
import Registry.PackageName as PackageName
+import Registry.Range as Range
import Registry.Test.Assert as Assert
import Registry.Test.Assert.Run as Assert.Run
import Registry.Test.Utils as Utils
@@ -52,11 +57,24 @@ spec = do
removeIgnoredTarballFiles
copySourceFiles
+ Spec.describe "Parses installed paths" do
+ Spec.it "Parses install path /my-package-1.0.0/..." do
+ tmp <- Tmp.mkTmpDir
+ let moduleA = Path.concat [ tmp, "my-package-1.0.0", "src", "ModuleA.purs" ]
+ case API.parseInstalledModulePath { prefix: tmp, path: moduleA } of
+ Left err -> Assert.fail $ "Expected to parse " <> moduleA <> " but got error: " <> err
+ Right { name, version } -> do
+ Assert.shouldEqual name (Utils.unsafePackageName "my-package")
+ Assert.shouldEqual version (Utils.unsafeVersion "1.0.0")
+ FS.Extra.remove tmp
+
Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do
- Spec.it "Publish" \{ workdir, index, metadata, storageDir, githubDir } -> do
+ Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do
let testEnv = { workdir, index, metadata, username: "jon", storage: storageDir, github: githubDir }
Assert.Run.runTestEffects testEnv do
- -- We'll publish effect@4.0.0
+ -- We'll publish effect@4.0.0 from the fixtures/github-packages
+ -- directory, which has an unnecessary dependency on 'type-equality'
+ -- inserted into it.
let
name = Utils.unsafePackageName "effect"
version = Utils.unsafeVersion "4.0.0"
@@ -83,11 +101,28 @@ spec = do
unless (Set.member version versions) do
Except.throw $ "Expected " <> formatPackageVersion name version <> " to be published to registry storage."
+ -- Let's verify the manifest does not include the unnecessary
+ -- 'type-equality' dependency...
+ Storage.download name version "effect-result"
+ Tar.extract { cwd: workdir, archive: "effect-result" }
+ Run.liftAff (readJsonFile Manifest.codec (Path.concat [ "effect-4.0.0", "purs.json" ])) >>= case _ of
+ Left err -> Except.throw $ "Expected effect@4.0.0 to be downloaded to effect-4.0.0 with a purs.json but received error " <> err
+ Right (Manifest manifest) -> do
+ let expectedDeps = Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeRange ">=6.0.0 <7.0.0")
+ when (manifest.dependencies /= expectedDeps) do
+ Except.throw $ String.joinWith "\n"
+ [ "Expected effect@4.0.0 to have dependencies"
+ , printJson (Internal.Codec.packageMap Range.codec) expectedDeps
+ , "\nbut got"
+ , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies
+ ]
+
-- Finally, we can verify that publishing the package again should fail
-- since it already exists.
Except.runExcept (API.publish CurrentPackage publishArgs) >>= case _ of
Left _ -> pure unit
Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail."
+
where
withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit
withCleanEnv action = do
diff --git a/app/test/App/CLI/Purs.purs b/app/test/App/CLI/Purs.purs
index bf2d37be1..d3add8698 100644
--- a/app/test/App/CLI/Purs.purs
+++ b/app/test/App/CLI/Purs.purs
@@ -2,12 +2,17 @@ module Test.Registry.App.CLI.Purs (spec) where
import Registry.App.Prelude
+import Data.Argonaut.Parser as Argonaut.Parser
+import Data.Codec.Argonaut as CA
import Data.Foldable (traverse_)
+import Data.Map as Map
import Node.FS.Aff as FS.Aff
import Node.Path as Path
import Registry.App.CLI.Purs (CompilerFailure(..))
import Registry.App.CLI.Purs as Purs
import Registry.Foreign.Tmp as Tmp
+import Registry.PursGraph (ModuleName(..))
+import Registry.PursGraph as PursGraph
import Registry.Test.Assert as Assert
import Registry.Test.Utils as Utils
import Registry.Version as Version
@@ -17,7 +22,8 @@ spec :: Spec.Spec Unit
spec = do
traverse_ (testVersion <<< Utils.unsafeVersion) [ "0.13.0", "0.14.0", "0.14.7", "0.15.4" ]
traverse_ (testMissingVersion <<< Utils.unsafeVersion) [ "0.13.1", "0.13.7", "0.15.1", "0.12.0", "0.14.12345" ]
- traverse_ testCompilationError [ Just (Utils.unsafeVersion "0.13.0"), Just (Utils.unsafeVersion "0.13.8"), Just (Utils.unsafeVersion "0.14.0"), Just (Utils.unsafeVersion "0.15.0"), Nothing ]
+ traverse_ (testCompilationError <<< map Utils.unsafeVersion) [ Just "0.13.0", Just "0.13.8", Just "0.14.0", Just "0.15.0", Nothing ]
+ traverse_ (testGraph <<< map Utils.unsafeVersion) [ Just "0.14.0", Just "0.15.0", Nothing ]
where
testVersion version =
Spec.it ("Calls compiler version " <> Version.print version) do
@@ -49,3 +55,29 @@ spec = do
Left (CompilationError [ { position: { startLine: 1, startColumn: 1 } } ]) ->
pure unit
_ -> Assert.fail "Should have failed with CompilationError"
+
+ testGraph version =
+ Spec.it ("Produces a graph for " <> maybe "latest" Version.print version) do
+ tmp <- Tmp.mkTmpDir
+ let moduleA = Path.concat [ tmp, "ModuleA.purs" ]
+ let moduleB = Path.concat [ tmp, "ModuleB.purs" ]
+ FS.Aff.writeTextFile UTF8 moduleA "module ModuleA where\n\nimport ModuleB\n"
+ FS.Aff.writeTextFile UTF8 moduleB "module ModuleB where\n"
+ result <- Purs.callCompiler { command: Purs.Graph { globs: [ moduleA, moduleB ] }, cwd: Nothing, version }
+ case result of
+ Left runErr -> Assert.fail $ case runErr of
+ CompilationError errs -> Purs.printCompilerErrors errs
+ UnknownError str -> str
+ MissingCompiler -> "MissingCompiler"
+ Right str -> case Argonaut.Parser.jsonParser str of
+ Left parseErr -> Assert.fail $ "Failed to parse output as JSON: " <> parseErr
+ Right json -> case CA.decode PursGraph.pursGraphCodec json of
+ Left decodeErr -> Assert.fail $ "Failed to decode JSON: " <> CA.printJsonDecodeError decodeErr
+ Right graph -> do
+ let
+ expected = Map.fromFoldable
+ [ Tuple (ModuleName "ModuleA") { path: moduleA, depends: [ ModuleName "ModuleB" ] }
+ , Tuple (ModuleName "ModuleB") { path: moduleB, depends: [] }
+ ]
+
+ graph `Assert.shouldEqual` expected
diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs
index 7f8b71815..0dc31e283 100644
--- a/lib/src/Operation/Validation.purs
+++ b/lib/src/Operation/Validation.purs
@@ -97,6 +97,23 @@ getUnresolvedDependencies (Manifest { dependencies }) resolutions =
| not (Range.includes dependencyRange version) -> Just $ Right $ dependencyName /\ dependencyRange /\ version
| otherwise -> Nothing
+-- | Discovers dependencies listed in the manifest that are not actually used
+-- | by the solved dependencies. This should not produce an error, but it
+-- | indicates an over-constrained manifest.
+getUnusedDependencies :: Manifest -> Map PackageName Version -> Set PackageName -> Maybe (NonEmptySet PackageName)
+getUnusedDependencies (Manifest { dependencies }) resolutions discovered = do
+ let
+ -- There may be too many resolved dependencies because the manifest includes
+ -- e.g. test dependencies, so we start by only considering resolved deps
+ -- that are actually used.
+ inUse = Set.filter (flip Set.member discovered) (Map.keys resolutions)
+
+ -- Next, we can determine which dependencies are unused by looking at the
+ -- difference between the manifest dependencies and the resolved packages
+ unused = Set.filter (not <<< flip Set.member inUse) (Map.keys dependencies)
+
+ NonEmptySet.fromSet unused
+
data TarballSizeResult
= ExceedsMaximum Number
| WarnPackageSize Number
diff --git a/lib/src/PursGraph.purs b/lib/src/PursGraph.purs
new file mode 100644
index 000000000..4dc003d59
--- /dev/null
+++ b/lib/src/PursGraph.purs
@@ -0,0 +1,90 @@
+-- | A module describing the output of 'purs graph' along with some helper
+-- | functions for working with the graph.
+module Registry.PursGraph where
+
+import Prelude
+
+import Data.Array as Array
+import Data.Array.NonEmpty (NonEmptyArray)
+import Data.Array.NonEmpty as NonEmptyArray
+import Data.Bifunctor (bimap)
+import Data.Codec.Argonaut (JsonCodec)
+import Data.Codec.Argonaut as CA
+import Data.Codec.Argonaut.Record as CA.Record
+import Data.Either (Either(..))
+import Data.Map (Map)
+import Data.Map as Map
+import Data.Maybe (Maybe(..))
+import Data.Newtype (class Newtype, un)
+import Data.Profunctor as Profunctor
+import Data.Set (Set)
+import Data.Set as Set
+import Data.Traversable (traverse)
+import Data.Tuple (Tuple(..))
+import Node.Path (FilePath)
+import Registry.Internal.Codec as Internal.Codec
+import Registry.PackageName (PackageName)
+
+-- | A graph of the dependencies between modules, discovered by the purs
+-- | compiler from a set of source files.
+type PursGraph = Map ModuleName PursGraphNode
+
+pursGraphCodec :: JsonCodec PursGraph
+pursGraphCodec = Internal.Codec.strMap "PursGraph" (Just <<< ModuleName) (un ModuleName) pursGraphNodeCodec
+
+type PursGraphNode =
+ { depends :: Array ModuleName
+ , path :: FilePath
+ }
+
+pursGraphNodeCodec :: JsonCodec PursGraphNode
+pursGraphNodeCodec = CA.Record.object "PursGraphNode"
+ { depends: CA.array moduleNameCodec
+ , path: CA.string
+ }
+
+-- | A module name string from a 'purs graph' invocation.
+newtype ModuleName = ModuleName String
+
+derive instance Newtype ModuleName _
+derive instance Eq ModuleName
+derive instance Ord ModuleName
+
+moduleNameCodec :: JsonCodec ModuleName
+moduleNameCodec = Profunctor.wrapIso ModuleName CA.string
+
+type AssociatedError = { module :: ModuleName, path :: FilePath, error :: String }
+
+-- | Given a function to parse the `path` component of `purs graph`, associate
+-- | all associate all modules in the groph with their package names.
+associateModules :: (FilePath -> Either String PackageName) -> PursGraph -> Either (NonEmptyArray AssociatedError) (Map ModuleName PackageName)
+associateModules parse graph = do
+ let
+ parsed :: Array (Either AssociatedError (Tuple ModuleName PackageName))
+ parsed = Map.toUnfoldableUnordered graph # map \(Tuple moduleName { path }) -> parse path # bimap
+ (\error -> { module: moduleName, path, error })
+ (\packageName -> Tuple moduleName packageName)
+
+ separated :: { errors :: Array AssociatedError, values :: Array (Tuple ModuleName PackageName) }
+ separated = parsed # Array.foldMap case _ of
+ Left err -> { errors: [ err ], values: [] }
+ Right tup -> { errors: [], values: [ tup ] }
+
+ case NonEmptyArray.fromArray separated.errors of
+ Nothing -> pure $ Map.fromFoldable separated.values
+ Just errors -> Left errors
+
+-- | Find direct dependencies of the given module, according to the given graph.
+directDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName)
+directDependencies name = map (Set.fromFoldable <<< _.depends) <<< Map.lookup name
+
+-- | Find all dependencies of the given module, according to the given graph.
+allDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName)
+allDependencies start graph = map Set.fromFoldable (getDependencies start)
+ where
+ getDependencies name =
+ map _.depends (Map.lookup name graph) >>= case _ of
+ [] -> pure []
+ directs -> do
+ let nextDeps = map Array.concat (traverse getDependencies directs)
+ nextDeps <> Just directs
diff --git a/lib/test/Registry.purs b/lib/test/Registry.purs
index f08ddf310..10551afb6 100644
--- a/lib/test/Registry.purs
+++ b/lib/test/Registry.purs
@@ -12,6 +12,7 @@ import Test.Registry.Operation as Test.Operation
import Test.Registry.Operation.Validation as Test.Operation.Validation
import Test.Registry.PackageName as Test.PackageName
import Test.Registry.PackageSet as Test.PackageSet
+import Test.Registry.PursGraph as Test.PursGraph
import Test.Registry.Range as Test.Range
import Test.Registry.SSH as Test.SSH
import Test.Registry.Sha256 as Test.Sha256
@@ -40,3 +41,4 @@ main = Aff.launchAff_ $ Spec.Runner.runSpec [ Spec.Reporter.consoleReporter ] do
Spec.describe "ManifestIndex" Test.ManifestIndex.spec
Spec.describe "Solver" Test.Solver.spec
Spec.describe "Operation Validation" Test.Operation.Validation.spec
+ Spec.describe "Purs Graph" Test.PursGraph.spec
diff --git a/lib/test/Registry/PursGraph.purs b/lib/test/Registry/PursGraph.purs
new file mode 100644
index 000000000..7d592ad1d
--- /dev/null
+++ b/lib/test/Registry/PursGraph.purs
@@ -0,0 +1,632 @@
+module Test.Registry.PursGraph (spec) where
+
+import Prelude
+
+import Data.Argonaut.Parser as Argonaut.Parser
+import Data.Array as Array
+import Data.Array.NonEmpty as NonEmptyArray
+import Data.Codec.Argonaut as CA
+import Data.Either (Either(..))
+import Data.Foldable (for_)
+import Data.Foldable as Foldable
+import Data.Map as Map
+import Data.Maybe (Maybe(..))
+import Data.Maybe as Maybe
+import Data.Set as Set
+import Data.String as String
+import Node.Path as Path
+import Registry.PackageName as PackageName
+import Registry.PursGraph (ModuleName(..))
+import Registry.PursGraph as PursGraph
+import Registry.Test.Assert as Assert
+import Registry.Test.Utils (unsafePackageName, unsafeStringify)
+import Safe.Coerce (coerce)
+import Test.Spec as Spec
+
+spec :: Spec.Spec Unit
+spec = do
+ let
+ parse raw = case Argonaut.Parser.jsonParser raw of
+ Left err -> Left $ "Failed to parse graph as JSON:\n\n" <> raw <> "\n\n due to an error:\n\n" <> err
+ Right json -> case CA.decode PursGraph.pursGraphCodec json of
+ Left err -> Left $ "Failed to decode graph JSON:\n\n" <> CA.printJsonDecodeError err
+ Right result -> pure result
+
+ Spec.describe "type-equality" do
+ case parse typeEquality of
+ Left err ->
+ Spec.it "Parses the graph" do
+ Assert.fail err
+
+ Right graph -> do
+ Spec.it "Has the correct number of modules" do
+ Map.size graph `Assert.shouldEqual` 1
+
+ Spec.it "Parses packages from paths" do
+ case PursGraph.associateModules (PackageName.parse <<< Maybe.fromMaybe "" <<< Array.head <<< String.split (String.Pattern Path.sep)) graph of
+ Left errs -> Assert.fail $ "Failed to parse package names:\n\n" <> NonEmptyArray.foldMap1 (\{ path, error } -> path <> ": " <> error) errs
+ Right names | Map.size names > 1 -> Assert.fail "Expected only one package name"
+ Right names ->
+ unless (Foldable.elem (unsafePackageName "type-equality") (Map.values names)) do
+ Assert.fail $ "Expected to find package name 'type-equality': " <> unsafeStringify names
+
+ Spec.it "Has correct direct dependencies for Type.Equality" do
+ PursGraph.directDependencies (ModuleName "Type.Equality") graph `Assert.shouldEqual` (Just Set.empty)
+
+ Spec.it "Has correct all dependencies for Type.Equality" do
+ PursGraph.allDependencies (ModuleName "Type.Equality") graph `Assert.shouldEqual` (Just Set.empty)
+
+ Spec.describe "prelude" do
+ case parse prelude of
+ Left err ->
+ Spec.it "Parses the graph" do
+ Assert.fail err
+
+ Right graph -> do
+ -- purs graph ... | jq length
+ let size = 50
+
+ Spec.it "Has the correct number of modules" do
+ Map.size graph `Assert.shouldEqual` size
+
+ Spec.it "Contains all expected modules" do
+ let
+ -- purs graph ... | jq keys
+ expected =
+ [ "Control.Applicative"
+ , "Control.Apply"
+ , "Control.Bind"
+ , "Control.Category"
+ , "Control.Monad"
+ , "Control.Semigroupoid"
+ , "Data.Boolean"
+ , "Data.BooleanAlgebra"
+ , "Data.Bounded"
+ , "Data.Bounded.Generic"
+ , "Data.CommutativeRing"
+ , "Data.DivisionRing"
+ , "Data.Eq"
+ , "Data.Eq.Generic"
+ , "Data.EuclideanRing"
+ , "Data.Field"
+ , "Data.Function"
+ , "Data.Functor"
+ , "Data.Generic.Rep"
+ , "Data.HeytingAlgebra"
+ , "Data.HeytingAlgebra.Generic"
+ , "Data.Monoid"
+ , "Data.Monoid.Additive"
+ , "Data.Monoid.Conj"
+ , "Data.Monoid.Disj"
+ , "Data.Monoid.Dual"
+ , "Data.Monoid.Endo"
+ , "Data.Monoid.Generic"
+ , "Data.Monoid.Multiplicative"
+ , "Data.NaturalTransformation"
+ , "Data.Ord"
+ , "Data.Ord.Generic"
+ , "Data.Ordering"
+ , "Data.Reflectable"
+ , "Data.Ring"
+ , "Data.Ring.Generic"
+ , "Data.Semigroup"
+ , "Data.Semigroup.First"
+ , "Data.Semigroup.Generic"
+ , "Data.Semigroup.Last"
+ , "Data.Semiring"
+ , "Data.Semiring.Generic"
+ , "Data.Show"
+ , "Data.Show.Generic"
+ , "Data.Symbol"
+ , "Data.Unit"
+ , "Data.Void"
+ , "Prelude"
+ , "Record.Unsafe"
+ , "Type.Proxy"
+ ]
+
+ if Array.length expected == size then
+ for_ expected \name -> case Map.lookup (coerce name) graph of
+ Nothing -> Assert.fail $ "Missing expected module name " <> name
+ Just _ -> pure unit
+ else
+ Assert.fail "Expected and actual module counts do not match; please update the test."
+
+ -- This one was worked by hand.
+ let workedModule = "Data.Field"
+ Spec.it ("Has correct direct dependencies for " <> workedModule) do
+ let
+ expected :: Array ModuleName
+ expected = coerce
+ [ "Data.CommutativeRing"
+ , "Data.DivisionRing"
+ , "Data.EuclideanRing"
+ , "Data.Ring"
+ , "Data.Semiring"
+ ]
+
+ case PursGraph.directDependencies (ModuleName workedModule) graph of
+ Nothing -> Assert.fail "Expected allDependencies to return a result"
+ Just deps -> Assert.shouldEqual (Set.toUnfoldable deps) expected
+
+ Spec.it ("Has correct all dependencies for " <> workedModule) do
+ let
+ expected :: Array ModuleName
+ expected = Array.sort $ coerce
+ [ -- directs
+ "Data.CommutativeRing"
+ , "Data.DivisionRing"
+ , "Data.EuclideanRing"
+ , "Data.Ring"
+ , "Data.Semiring"
+
+ -- transitive via Data.CommutativeRing
+ , "Data.Symbol"
+ , "Data.Unit"
+ , "Type.Proxy"
+
+ -- transitive via Data.EuclideanRing
+ , "Data.BooleanAlgebra"
+ , "Data.Eq"
+
+ -- transitive via Data.Ring
+ , "Record.Unsafe"
+
+ -- transitive via Data.BooleanAlgebra
+ , "Data.HeytingAlgebra"
+
+ -- transitive via Data.Eq
+ , "Data.Void"
+ ]
+
+ case PursGraph.allDependencies (ModuleName workedModule) graph of
+ Nothing -> Assert.fail "Expected allDependencies to return a result"
+ Just deps -> Assert.shouldEqual (Set.toUnfoldable deps) expected
+
+typeEquality :: String
+typeEquality =
+ """
+ {
+ "Type.Equality": {
+ "depends": [],
+ "path": "type-equality/src/Type/Equality.purs"
+ }
+ }
+ """
+
+prelude :: String
+prelude =
+ """
+ {
+ "Control.Applicative": {
+ "depends": [
+ "Control.Apply",
+ "Data.Functor",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Control/Applicative.purs"
+ },
+ "Control.Apply": {
+ "depends": [
+ "Data.Functor",
+ "Data.Function",
+ "Control.Category",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Control/Apply.purs"
+ },
+ "Control.Bind": {
+ "depends": [
+ "Control.Applicative",
+ "Control.Apply",
+ "Control.Category",
+ "Data.Function",
+ "Data.Functor",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Control/Bind.purs"
+ },
+ "Control.Category": {
+ "depends": [
+ "Control.Semigroupoid"
+ ],
+ "path": "prelude/src/Control/Category.purs"
+ },
+ "Control.Monad": {
+ "depends": [
+ "Control.Applicative",
+ "Control.Apply",
+ "Control.Bind",
+ "Data.Functor",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Control/Monad.purs"
+ },
+ "Control.Semigroupoid": {
+ "depends": [],
+ "path": "prelude/src/Control/Semigroupoid.purs"
+ },
+ "Data.Boolean": {
+ "depends": [],
+ "path": "prelude/src/Data/Boolean.purs"
+ },
+ "Data.BooleanAlgebra": {
+ "depends": [
+ "Data.HeytingAlgebra",
+ "Data.Symbol",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/BooleanAlgebra.purs"
+ },
+ "Data.Bounded": {
+ "depends": [
+ "Data.Ord",
+ "Data.Symbol",
+ "Data.Unit",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Bounded.purs"
+ },
+ "Data.Bounded.Generic": {
+ "depends": [
+ "Data.Generic.Rep",
+ "Data.Bounded"
+ ],
+ "path": "prelude/src/Data/Bounded/Generic.purs"
+ },
+ "Data.CommutativeRing": {
+ "depends": [
+ "Data.Ring",
+ "Data.Semiring",
+ "Data.Symbol",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/CommutativeRing.purs"
+ },
+ "Data.DivisionRing": {
+ "depends": [
+ "Data.EuclideanRing",
+ "Data.Ring",
+ "Data.Semiring"
+ ],
+ "path": "prelude/src/Data/DivisionRing.purs"
+ },
+ "Data.Eq": {
+ "depends": [
+ "Data.HeytingAlgebra",
+ "Data.Symbol",
+ "Data.Unit",
+ "Data.Void",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Eq.purs"
+ },
+ "Data.Eq.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Eq/Generic.purs"
+ },
+ "Data.EuclideanRing": {
+ "depends": [
+ "Data.BooleanAlgebra",
+ "Data.CommutativeRing",
+ "Data.Eq",
+ "Data.Ring",
+ "Data.Semiring"
+ ],
+ "path": "prelude/src/Data/EuclideanRing.purs"
+ },
+ "Data.Field": {
+ "depends": [
+ "Data.DivisionRing",
+ "Data.CommutativeRing",
+ "Data.EuclideanRing",
+ "Data.Ring",
+ "Data.Semiring"
+ ],
+ "path": "prelude/src/Data/Field.purs"
+ },
+ "Data.Function": {
+ "depends": [
+ "Control.Category",
+ "Data.Boolean",
+ "Data.Ord",
+ "Data.Ring"
+ ],
+ "path": "prelude/src/Data/Function.purs"
+ },
+ "Data.Functor": {
+ "depends": [
+ "Data.Function",
+ "Data.Unit",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Functor.purs"
+ },
+ "Data.Generic.Rep": {
+ "depends": [
+ "Data.Semigroup",
+ "Data.Show",
+ "Data.Symbol",
+ "Data.Void",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Generic/Rep.purs"
+ },
+ "Data.HeytingAlgebra": {
+ "depends": [
+ "Data.Symbol",
+ "Data.Unit",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/HeytingAlgebra.purs"
+ },
+ "Data.HeytingAlgebra.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep",
+ "Data.HeytingAlgebra"
+ ],
+ "path": "prelude/src/Data/HeytingAlgebra/Generic.purs"
+ },
+ "Data.Monoid": {
+ "depends": [
+ "Data.Boolean",
+ "Data.Eq",
+ "Data.EuclideanRing",
+ "Data.Ord",
+ "Data.Ordering",
+ "Data.Semigroup",
+ "Data.Symbol",
+ "Data.Unit",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Monoid.purs"
+ },
+ "Data.Monoid.Additive": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Monoid/Additive.purs"
+ },
+ "Data.Monoid.Conj": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.HeytingAlgebra",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Monoid/Conj.purs"
+ },
+ "Data.Monoid.Disj": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.HeytingAlgebra",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Monoid/Disj.purs"
+ },
+ "Data.Monoid.Dual": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Monoid/Dual.purs"
+ },
+ "Data.Monoid.Endo": {
+ "depends": [
+ "Prelude"
+ ],
+ "path": "prelude/src/Data/Monoid/Endo.purs"
+ },
+ "Data.Monoid.Generic": {
+ "depends": [
+ "Data.Monoid",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Monoid/Generic.purs"
+ },
+ "Data.Monoid.Multiplicative": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Monoid/Multiplicative.purs"
+ },
+ "Data.NaturalTransformation": {
+ "depends": [],
+ "path": "prelude/src/Data/NaturalTransformation.purs"
+ },
+ "Data.Ord": {
+ "depends": [
+ "Data.Eq",
+ "Data.Symbol",
+ "Data.Ordering",
+ "Data.Ring",
+ "Data.Unit",
+ "Data.Void",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Ord.purs"
+ },
+ "Data.Ord.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Ord/Generic.purs"
+ },
+ "Data.Ordering": {
+ "depends": [
+ "Data.Eq",
+ "Data.Semigroup",
+ "Data.Show"
+ ],
+ "path": "prelude/src/Data/Ordering.purs"
+ },
+ "Data.Reflectable": {
+ "depends": [
+ "Data.Ord",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Reflectable.purs"
+ },
+ "Data.Ring": {
+ "depends": [
+ "Data.Semiring",
+ "Data.Symbol",
+ "Data.Unit",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Ring.purs"
+ },
+ "Data.Ring.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Ring/Generic.purs"
+ },
+ "Data.Semigroup": {
+ "depends": [
+ "Data.Symbol",
+ "Data.Unit",
+ "Data.Void",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Semigroup.purs"
+ },
+ "Data.Semigroup.First": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Semigroup/First.purs"
+ },
+ "Data.Semigroup.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Semigroup/Generic.purs"
+ },
+ "Data.Semigroup.Last": {
+ "depends": [
+ "Prelude",
+ "Data.Eq",
+ "Data.Ord"
+ ],
+ "path": "prelude/src/Data/Semigroup/Last.purs"
+ },
+ "Data.Semiring": {
+ "depends": [
+ "Data.Symbol",
+ "Data.Unit",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Semiring.purs"
+ },
+ "Data.Semiring.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep"
+ ],
+ "path": "prelude/src/Data/Semiring/Generic.purs"
+ },
+ "Data.Show": {
+ "depends": [
+ "Data.Semigroup",
+ "Data.Symbol",
+ "Data.Unit",
+ "Data.Void",
+ "Record.Unsafe",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Show.purs"
+ },
+ "Data.Show.Generic": {
+ "depends": [
+ "Prelude",
+ "Data.Generic.Rep",
+ "Data.Symbol",
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Show/Generic.purs"
+ },
+ "Data.Symbol": {
+ "depends": [
+ "Type.Proxy"
+ ],
+ "path": "prelude/src/Data/Symbol.purs"
+ },
+ "Data.Unit": {
+ "depends": [],
+ "path": "prelude/src/Data/Unit.purs"
+ },
+ "Data.Void": {
+ "depends": [],
+ "path": "prelude/src/Data/Void.purs"
+ },
+ "Prelude": {
+ "depends": [
+ "Control.Applicative",
+ "Control.Apply",
+ "Control.Bind",
+ "Control.Category",
+ "Control.Monad",
+ "Control.Semigroupoid",
+ "Data.Boolean",
+ "Data.BooleanAlgebra",
+ "Data.Bounded",
+ "Data.CommutativeRing",
+ "Data.DivisionRing",
+ "Data.Eq",
+ "Data.EuclideanRing",
+ "Data.Field",
+ "Data.Function",
+ "Data.Functor",
+ "Data.HeytingAlgebra",
+ "Data.Monoid",
+ "Data.NaturalTransformation",
+ "Data.Ord",
+ "Data.Ordering",
+ "Data.Ring",
+ "Data.Semigroup",
+ "Data.Semiring",
+ "Data.Show",
+ "Data.Unit",
+ "Data.Void"
+ ],
+ "path": "prelude/src/Prelude.purs"
+ },
+ "Record.Unsafe": {
+ "depends": [],
+ "path": "prelude/src/Record/Unsafe.purs"
+ },
+ "Type.Proxy": {
+ "depends": [],
+ "path": "prelude/src/Type/Proxy.purs"
+ }
+ }
+ """