diff --git a/.github/workflows/ci-test-jit.yaml b/.github/workflows/ci-test-jit.yaml index 467be5f69f..64e4254c21 100644 --- a/.github/workflows/ci-test-jit.yaml +++ b/.github/workflows/ci-test-jit.yaml @@ -77,7 +77,7 @@ jobs: key: jit-test-results.dist-exe_${{ hashFiles(env.jit_dist_rel_exe) }}.tests_${{ env.runtime_tests_causalhash }}.yaml_${{ hashFiles('**/ci-test-jit.yaml') }} - name: install libb2 (linux) - uses: awalsh128/cache-apt-pkgs-action@latest + uses: awalsh128/cache-apt-pkgs-action@5902b33ae29014e6ca012c5d8025d4346556bd40 #v1.4.3 if: runner.os == 'Linux' && steps.cache-jit-test-results.outputs.cache-hit != 'true' with: packages: libb2-1 diff --git a/.gitignore b/.gitignore index 9af3e43c04..a2fb3975a1 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,7 @@ dist-newstyle *.prof.html *.hp *.ps +*.profiterole.* /.direnv/ /.envrc diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index ce07a487fb..2a2300329f 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -101,7 +101,7 @@ data BranchLocalIds' t d p c = LocalIds branchPatchLookup :: Vector p, branchChildLookup :: Vector c } - deriving (Show) + deriving (Show, Eq) -- | Bytes encoding a LocalBranch newtype LocalBranchBytes = LocalBranchBytes ByteString @@ -110,6 +110,7 @@ newtype LocalBranchBytes = LocalBranchBytes ByteString data SyncBranchFormat' parent text defn patch child = SyncFull (BranchLocalIds' text defn patch child) LocalBranchBytes | SyncDiff parent (BranchLocalIds' text defn patch child) LocalBranchBytes + deriving (Eq, Show) type SyncBranchFormat = SyncBranchFormat' BranchObjectId TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs index 582bfc65a3..87f532bf25 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs @@ -22,5 +22,6 @@ data SyncCausalFormat' causalHash valueHash = SyncCausalFormat { valueHash :: valueHash, parents :: Vector causalHash } + deriving stock (Eq, Show) type SyncCausalFormat = SyncCausalFormat' CausalHashId BranchHashId diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs index 5a6f401964..5752d2dd87 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs @@ -36,9 +36,11 @@ type SyncDeclFormat = data SyncDeclFormat' t d = SyncDecl (SyncLocallyIndexedComponent' t d) + deriving stock (Eq, Show) newtype SyncLocallyIndexedComponent' t d = SyncLocallyIndexedComponent (Vector (LocalIds' t d, ByteString)) + deriving stock (Eq, Show) -- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Entity.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Entity.hs index 3b93fd4b16..92cbb58828 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Entity.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Entity.hs @@ -24,6 +24,7 @@ data SyncEntity' text hash defn patch branchh branch causal | N (Namespace.SyncBranchFormat' branch text defn patch (branch, causal)) | P (Patch.SyncPatchFormat' patch text hash defn) | C (Causal.SyncCausalFormat' causal branchh) + deriving stock (Eq, Show) entityType :: SyncEntity' text hash defn patch branchh branch causal -> TempEntityType entityType = \case diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs index d8645b81ae..f68016de78 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs @@ -15,7 +15,7 @@ data LocalIds' t h = LocalIds { textLookup :: Vector t, defnLookup :: Vector h } - deriving (Functor, Show) + deriving stock (Functor, Show, Eq) type LocalIds = LocalIds' TextId ObjectId diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs index 7defa50234..452df27904 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs @@ -40,6 +40,7 @@ data PatchLocalIds' t h d = LocalIds patchHashLookup :: Vector h, patchDefnLookup :: Vector d } + deriving stock (Eq, Show) type SyncPatchFormat = SyncPatchFormat' PatchObjectId TextId HashId ObjectId @@ -47,6 +48,7 @@ data SyncPatchFormat' parent text hash defn = SyncFull (PatchLocalIds' text hash defn) ByteString | -- | p is the identity of the thing that the diff is relative to SyncDiff parent (PatchLocalIds' text hash defn) ByteString + deriving stock (Eq, Show) -- | Apply a list of patch diffs to a patch, left to right. applyPatchDiffs :: Patch -> [PatchDiff] -> Patch diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 033efb8655..043fd697c7 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -111,6 +111,7 @@ module U.Codebase.Sqlite.Queries loadProjectByName, expectProject, loadAllProjects, + loadAllProjectsByRecentlyAccessed, loadAllProjectsBeginningWith, insertProject, renameProject, @@ -256,6 +257,7 @@ module U.Codebase.Sqlite.Queries addCurrentProjectPathTable, addProjectBranchReflogTable, addProjectBranchCausalHashIdColumn, + addProjectBranchLastAccessedColumn, -- ** schema version currentSchemaVersion, @@ -420,7 +422,7 @@ type TextPathSegments = [Text] -- * main squeeze currentSchemaVersion :: SchemaVersion -currentSchemaVersion = 17 +currentSchemaVersion = 18 runCreateSql :: Transaction () runCreateSql = @@ -486,6 +488,10 @@ addProjectBranchCausalHashIdColumn :: Transaction () addProjectBranchCausalHashIdColumn = executeStatements $(embedProjectStringFile "sql/014-add-project-branch-causal-hash-id.sql") +addProjectBranchLastAccessedColumn :: Transaction () +addProjectBranchLastAccessedColumn = + executeStatements $(embedProjectStringFile "sql/015-add-project-branch-last-accessed.sql") + schemaVersion :: Transaction SchemaVersion schemaVersion = queryOneCol @@ -2274,32 +2280,6 @@ globEscape = ']' -> "[]]" c -> Text.singleton c --- | Escape special characters for "LIKE" matches. --- --- Prepared statements prevent sql injection, but it's still possible some user --- may be able to craft a query using a fake "hash" that would let them see more than they --- ought to. --- --- You still need to provide the escape char in the sql query, E.g. --- --- @@ --- SELECT * FROM table --- WHERE txt LIKE ? ESCAPE '\' --- @@ --- --- >>> likeEscape '\\' "Nat.%" --- "Nat.\%" -likeEscape :: Char -> Text -> Text -likeEscape '%' _ = error "Can't use % or _ as escape characters" -likeEscape '_' _ = error "Can't use % or _ as escape characters" -likeEscape escapeChar pat = - flip Text.concatMap pat \case - '%' -> Text.pack [escapeChar, '%'] - '_' -> Text.pack [escapeChar, '_'] - c - | c == escapeChar -> Text.pack [escapeChar, escapeChar] - | otherwise -> Text.singleton c - -- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this -- is only true on Share. -- @@ -3623,6 +3603,17 @@ loadAllProjects = ORDER BY name ASC |] +-- | Load all projects. +loadAllProjectsByRecentlyAccessed :: Transaction [Project] +loadAllProjectsByRecentlyAccessed = + queryListRow + [sql| + SELECT project.id, project.name + FROM project + JOIN project_branch ON project.id = project_branch.project_id + ORDER BY project_branch.last_accessed DESC NULLS LAST, project.name ASC + |] + -- | Load all projects whose name matches a prefix. loadAllProjectsBeginningWith :: Maybe Text -> Transaction [Project] loadAllProjectsBeginningWith mayPrefix = do @@ -3761,7 +3752,7 @@ loadAllProjectBranchNamePairs = FROM project JOIN project_branch ON project.id = project_branch.project_id - ORDER BY project.name ASC, project_branch.name ASC + ORDER BY project_branch.last_accessed DESC NULLS LAST, project.name ASC, project_branch.name ASC |] <&> fmap \(projectName, branchName, projectId, branchId) -> ( ProjectAndBranch projectName branchName, @@ -4484,6 +4475,13 @@ setCurrentProjectPath projId branchId path = do INSERT INTO current_project_path(project_id, branch_id, path) VALUES (:projId, :branchId, :jsonPath) |] + execute + [sql| + UPDATE project_branch + SET last_accessed = strftime('%s', 'now') + WHERE project_id = :projId + AND branch_id = :branchId + |] where jsonPath :: Text jsonPath = diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs index e50d215ecf..f06fc70ec3 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs @@ -49,6 +49,7 @@ newtype LocallyIndexedComponent' t d = LocallyIndexedComponent newtype SyncLocallyIndexedComponent' t d = SyncLocallyIndexedComponent (Vector (LocalIds' t d, ByteString)) + deriving stock (Eq, Show) {- message = "hello, world" -> ABT { ... { Term.F.Text "hello, world" } } -> hashes to (#abc, 0) @@ -127,6 +128,7 @@ data TermFormat' t d = Term (LocallyIndexedComponent' t d) type SyncTermFormat = SyncTermFormat' TextId ObjectId data SyncTermFormat' t d = SyncTerm (SyncLocallyIndexedComponent' t d) + deriving stock (Eq, Show) data WatchResultFormat = WatchResult WatchLocalIds Term diff --git a/codebase2/codebase-sqlite/sql/015-add-project-branch-last-accessed.sql b/codebase2/codebase-sqlite/sql/015-add-project-branch-last-accessed.sql new file mode 100644 index 0000000000..dae33a8e87 --- /dev/null +++ b/codebase2/codebase-sqlite/sql/015-add-project-branch-last-accessed.sql @@ -0,0 +1,3 @@ +-- Add a new column to the project_branch table to store the last time that project branch was accessed. +-- This column is stored as a unix epoch time. +ALTER TABLE project_branch ADD COLUMN last_accessed INTEGER NULL; diff --git a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal index 2641df87cd..ac075bccfe 100644 --- a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal +++ b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.36.0. +-- This file has been generated from package.yaml by hpack version 0.37.0. -- -- see: https://github.com/sol/hpack @@ -24,6 +24,7 @@ extra-source-files: sql/012-add-current-project-path-table.sql sql/013-add-project-branch-reflog-table.sql sql/014-add-project-branch-causal-hash-id.sql + sql/015-add-project-branch-last-accessed.sql sql/create.sql source-repository head diff --git a/docs/release-steps.md b/docs/release-steps.md index dcb3d2ea17..02951d72a3 100644 --- a/docs/release-steps.md +++ b/docs/release-steps.md @@ -19,7 +19,11 @@ Edit `releases._.README` to include `Release: `. .basedev.release> push git(git@github.com:unisonweb/base) ``` -## 2. Run Release script +## 2. Check or run cloud client tests + +https://github.com/unisoncomputing/cloud-client-tests/actions/workflows/cloud-client-tests.yml + +## 3. Run Release script * **Milestone Release**: Look up the most recent release; bump the number and remove any trailing letters, e.g. `./scripts/make-release release/M5 trunk` * **Minor Release**: Increment the trailing letter of the previous release, or add an `a` to the previous milestone release, e.g. `./scripts/make-release release/M5a trunk` @@ -40,11 +44,11 @@ Including: After successfully executing the script you just have to sit tight and wait for all the jobs to complete. -## 3 +## 4 Smoke test of the new release. Try `brew upgrade unison-language`, launch it, launch `ui`. -## 4 +## 5 Write up release notes, template below. diff --git a/hie.yaml b/hie.yaml index 811a7099ff..6b28f83ee0 100644 --- a/hie.yaml +++ b/hie.yaml @@ -140,6 +140,9 @@ cradle: - path: "unison-share-api/src" component: "unison-share-api:lib" + - path: "unison-share-api/tests" + component: "unison-share-api:test:unison-share-api-tests" + - path: "unison-share-projects-api/src" component: "unison-share-projects-api:lib" diff --git a/lib/unison-prelude/src/Unison/Prelude.hs b/lib/unison-prelude/src/Unison/Prelude.hs index 374f4a1812..ef48bc2556 100644 --- a/lib/unison-prelude/src/Unison/Prelude.hs +++ b/lib/unison-prelude/src/Unison/Prelude.hs @@ -25,6 +25,7 @@ module Unison.Prelude whenJustM, eitherToMaybe, maybeToEither, + eitherToThese, altSum, altMap, hoistMaybe, @@ -82,6 +83,7 @@ import Data.Text as X (Text) import Data.Text qualified as Text import Data.Text.Encoding as X (decodeUtf8, encodeUtf8) import Data.Text.IO qualified as Text +import Data.These (These (..)) import Data.Traversable as X (for) import Data.Typeable as X (Typeable) import Data.Void as X (Void) @@ -205,6 +207,9 @@ throwEitherM = throwEitherMWith id throwEitherMWith :: forall e e' m a. (MonadIO m, Exception e') => (e -> e') -> m (Either e a) -> m a throwEitherMWith f action = throwExceptT . withExceptT f $ (ExceptT action) +eitherToThese :: Either a b -> These a b +eitherToThese = either This That + tShow :: (Show a) => a -> Text tShow = Text.pack . show diff --git a/lib/unison-prelude/src/Unison/Util/Set.hs b/lib/unison-prelude/src/Unison/Util/Set.hs index 4e3c6ef9b9..ac5bb12454 100644 --- a/lib/unison-prelude/src/Unison/Util/Set.hs +++ b/lib/unison-prelude/src/Unison/Util/Set.hs @@ -5,6 +5,7 @@ module Unison.Util.Set mapMaybe, symmetricDifference, Unison.Util.Set.traverse, + Unison.Util.Set.for, flatMap, filterM, forMaybe, @@ -51,6 +52,9 @@ forMaybe xs f = traverse :: (Applicative f, Ord b) => (a -> f b) -> Set a -> f (Set b) traverse f = fmap Set.fromList . Prelude.traverse f . Set.toList +for :: (Ord b, Applicative f) => Set a -> (a -> f b) -> f (Set b) +for = flip Unison.Util.Set.traverse + flatMap :: (Ord b) => (a -> Set b) -> Set a -> Set b flatMap f = Set.unions . fmap f . Set.toList diff --git a/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs b/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs index 6f04fc1976..9c5d1f8d08 100644 --- a/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs +++ b/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs @@ -695,8 +695,8 @@ column2UnzippedM bottomPadding left right = column3sep :: (LL.ListLike s Char, IsString s) => Pretty s -> [(Pretty s, Pretty s, Pretty s)] -> Pretty s column3sep sep rows = - let bc = align [(b, sep <> c) | (_, b, c) <- rows] - abc = group <$> align [(a, sep <> bc) | ((a, _, _), bc) <- rows `zip` bc] + let bc = align $ [(b, indent sep c) | (_, b, c) <- rows] + abc = group <$> align [(a, indent sep bc) | ((a, _, _), bc) <- rows `zip` bc] in lines abc -- | Creates an aligned table with an arbitrary number of columns separated by `sep` diff --git a/lib/unison-sqlite/src/Unison/Sqlite.hs b/lib/unison-sqlite/src/Unison/Sqlite.hs index eec974d6ed..8c33b3242b 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite.hs @@ -19,6 +19,7 @@ module Unison.Sqlite Transaction, runTransaction, runTransactionWithRollback, + runTransactionExceptT, runReadOnlyTransaction, runWriteTransaction, cacheTransaction, @@ -55,6 +56,9 @@ module Unison.Sqlite queryOneRowCheck, queryOneColCheck, + -- * Utilities + likeEscape, + -- * Rows modified rowsModified, @@ -118,6 +122,7 @@ import Unison.Sqlite.Exception import Unison.Sqlite.JournalMode (JournalMode (..), SetJournalModeException (..), trySetJournalMode) import Unison.Sqlite.Sql (Sql, sql) import Unison.Sqlite.Transaction +import Unison.Sqlite.Utils (likeEscape) -- $query-naming-convention -- diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs b/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs index b44a04b0fa..5bf735b917 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs @@ -3,6 +3,7 @@ module Unison.Sqlite.Transaction Transaction, runTransaction, runTransactionWithRollback, + runTransactionExceptT, runReadOnlyTransaction, runWriteTransaction, cacheTransaction, @@ -44,6 +45,7 @@ where import Control.Concurrent (threadDelay) import Control.Exception (Exception (fromException), onException, throwIO) +import Control.Monad.Trans.Except (ExceptT, runExceptT) import Control.Monad.Trans.Reader (ReaderT (..)) import Data.Text qualified as Text import Data.Unique (Unique, newUnique) @@ -130,6 +132,13 @@ runTransactionWithRollback conn transaction = liftIO do Right x -> pure x {-# SPECIALIZE runTransactionWithRollback :: Connection -> ((forall void. a -> Transaction void) -> Transaction a) -> IO a #-} +-- | Run a transaction wrapped in an 'ExceptT'. If the ExceptT fails, the transaction is rolled back. +runTransactionExceptT :: (MonadIO m, HasCallStack) => Connection -> ExceptT e Transaction a -> m (Either e a) +runTransactionExceptT conn transaction = runTransactionWithRollback conn \rollback -> do + runExceptT transaction >>= \case + Left e -> rollback (Left e) + Right a -> pure (Right a) + -- | Run a transaction that is known to only perform reads. -- -- The action is provided a function that peels off the 'Transaction' newtype without sending the corresponding diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Utils.hs b/lib/unison-sqlite/src/Unison/Sqlite/Utils.hs new file mode 100644 index 0000000000..84744b56b0 --- /dev/null +++ b/lib/unison-sqlite/src/Unison/Sqlite/Utils.hs @@ -0,0 +1,30 @@ +module Unison.Sqlite.Utils (likeEscape) where + +import Data.Text (Text) +import Data.Text qualified as Text + +-- | Escape special characters for "LIKE" matches. +-- +-- Prepared statements prevent sql injection, but it's still possible some user +-- may be able to craft a query using a fake "hash" that would let them see more than they +-- ought to. +-- +-- You still need to provide the escape char in the sql query, E.g. +-- +-- @@ +-- SELECT * FROM table +-- WHERE txt LIKE ? ESCAPE '\' +-- @@ +-- +-- >>> likeEscape '\\' "Nat.%" +-- "Nat.\%" +likeEscape :: Char -> Text -> Text +likeEscape '%' _ = error "Can't use % or _ as escape characters" +likeEscape '_' _ = error "Can't use % or _ as escape characters" +likeEscape escapeChar pat = + flip Text.concatMap pat \case + '%' -> Text.pack [escapeChar, '%'] + '_' -> Text.pack [escapeChar, '_'] + c + | c == escapeChar -> Text.pack [escapeChar, escapeChar] + | otherwise -> Text.singleton c diff --git a/lib/unison-sqlite/unison-sqlite.cabal b/lib/unison-sqlite/unison-sqlite.cabal index 28ea0f7c4f..3db0980a7c 100644 --- a/lib/unison-sqlite/unison-sqlite.cabal +++ b/lib/unison-sqlite/unison-sqlite.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.36.0. +-- This file has been generated from package.yaml by hpack version 0.37.0. -- -- see: https://github.com/sol/hpack @@ -27,6 +27,7 @@ library Unison.Sqlite.Exception Unison.Sqlite.JournalMode Unison.Sqlite.Sql + Unison.Sqlite.Utils hs-source-dirs: src default-extensions: diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index 1fcb0e5c7c..e8cb24e84e 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -99,6 +99,7 @@ module Unison.Codebase -- * Direct codebase access runTransaction, runTransactionWithRollback, + runTransactionExceptT, withConnection, withConnectionIO, @@ -112,6 +113,7 @@ module Unison.Codebase ) where +import Control.Monad.Except (ExceptT) import Data.Map qualified as Map import Data.Set qualified as Set import U.Codebase.Branch qualified as V2Branch @@ -174,6 +176,14 @@ runTransactionWithRollback :: runTransactionWithRollback Codebase {withConnection} action = withConnection \conn -> Sqlite.runTransactionWithRollback conn action +runTransactionExceptT :: + (MonadIO m) => + Codebase m v a -> + ExceptT e Sqlite.Transaction b -> + m (Either e b) +runTransactionExceptT Codebase {withConnection} action = + withConnection \conn -> Sqlite.runTransactionExceptT conn action + getShallowCausalAtPathFromRootHash :: -- Causal to start at, if Nothing use the codebase's root branch. CausalHash -> diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs index 9052e5511a..32527471df 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs @@ -84,7 +84,8 @@ migrations regionVar getDeclType termBuffer declBuffer rootCodebasePath = sqlMigration 14 Q.addSquashResultTable, sqlMigration 15 Q.addSquashResultTableIfNotExists, sqlMigration 16 Q.cdToProjectRoot, - (17 {- This migration takes a raw sqlite connection -}, \conn -> migrateSchema16To17 conn) + (17 {- This migration takes a raw sqlite connection -}, \conn -> migrateSchema16To17 conn), + sqlMigration 18 Q.addProjectBranchLastAccessedColumn ] where runT :: Sqlite.Transaction () -> Sqlite.Connection -> IO () diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema16To17.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema16To17.hs index 7771c08291..a565f808b1 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema16To17.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema16To17.hs @@ -5,9 +5,11 @@ module Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema16To17 (migrateSch import Control.Lens import Data.Aeson qualified as Aeson +import Data.Aeson.Text qualified as Aeson import Data.Map qualified as Map import Data.Text qualified as Text import Data.Text.Encoding qualified as Text +import Data.Text.Lazy qualified as Text.Lazy import Data.UUID (UUID) import Data.UUID qualified as UUID import U.Codebase.Branch.Type qualified as V2Branch @@ -76,8 +78,8 @@ migrateSchema16To17 conn = withDisabledForeignKeys $ do case mayRecentProjectBranch of Just (projectId, branchId) -> - Q.setCurrentProjectPath projectId branchId [] - Nothing -> Q.setCurrentProjectPath scratchMain.projectId scratchMain.branchId [] + initializeCurrentProjectPath projectId branchId [] + Nothing -> initializeCurrentProjectPath scratchMain.projectId scratchMain.branchId [] Debug.debugLogM Debug.Migration "Done migrating to version 17" Q.setSchemaVersion 17 where @@ -89,6 +91,19 @@ migrateSchema16To17 conn = withDisabledForeignKeys $ do let enable = Connection.execute conn [Sqlite.sql| PRAGMA foreign_keys=ON |] let action = Sqlite.runWriteTransaction conn \run -> run $ m UnsafeIO.bracket disable (const enable) (const action) + initializeCurrentProjectPath :: ProjectId -> ProjectBranchId -> [NameSegment] -> Sqlite.Transaction () + initializeCurrentProjectPath projId branchId path = do + Sqlite.execute + [Sqlite.sql| DELETE FROM current_project_path |] + Sqlite.execute + [Sqlite.sql| + INSERT INTO current_project_path(project_id, branch_id, path) + VALUES (:projId, :branchId, :jsonPath) + |] + where + jsonPath :: Text + jsonPath = + Text.Lazy.toStrict (Aeson.encodeToLazyText $ NameSegment.toUnescapedText <$> path) data ForeignKeyFailureException = ForeignKeyFailureException diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs index 050d7f5fda..cef8475cbf 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs @@ -96,6 +96,7 @@ createSchema = do Q.addCurrentProjectPathTable Q.addProjectBranchReflogTable Q.addProjectBranchCausalHashIdColumn + Q.addProjectBranchLastAccessedColumn (_, emptyCausalHashId) <- emptyCausalHash (_, ProjectBranch {projectId, branchId}) <- insertProjectAndBranch scratchProjectName scratchBranchName emptyCausalHashId Q.setCurrentProjectPath projectId branchId [] diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 098c48f302..d3d48f2c8a 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -20,6 +20,7 @@ library: - condition: "!os(windows)" dependencies: unix dependencies: + - attoparsec - Diff - IntervalMap - ListLike @@ -32,7 +33,10 @@ library: - co-log-core - code-page - concurrent-output + - conduit - containers >= 0.6.3 + - conduit + - conduit-extra - cryptonite - either - errors @@ -65,8 +69,10 @@ library: - recover-rtti - regex-tdfa - semialign + - serialise - servant - servant-client + - servant-conduit - stm - temporary - text-ansi diff --git a/unison-cli/src/Unison/Cli/MonadUtils.hs b/unison-cli/src/Unison/Cli/MonadUtils.hs index 39e8cfb4e2..bca9e1e0d0 100644 --- a/unison-cli/src/Unison/Cli/MonadUtils.hs +++ b/unison-cli/src/Unison/Cli/MonadUtils.hs @@ -49,6 +49,7 @@ module Unison.Cli.MonadUtils stepManyAtM, updateProjectBranchRoot, updateProjectBranchRoot_, + setProjectBranchRootToCausalHash, updateAtM, updateAt, updateAndStepAt, @@ -454,6 +455,13 @@ updateProjectBranchRoot projectBranch reason f = do liftIO (env.lspCheckForChanges projectPathIds) pure result +setProjectBranchRootToCausalHash :: ProjectBranch -> Text -> CausalHash -> Cli () +setProjectBranchRootToCausalHash projectBranch reason targetCH = do + Cli.time "setProjectBranchRootToCausalHash" do + Cli.runTransaction $ do + targetCHID <- Q.expectCausalHashIdByCausalHash targetCH + Q.setProjectBranchHead reason (projectBranch ^. #projectId) (projectBranch ^. #branchId) targetCHID + updateProjectBranchRoot_ :: ProjectBranch -> Text -> (Branch IO -> Branch IO) -> Cli () updateProjectBranchRoot_ projectBranch reason f = do updateProjectBranchRoot projectBranch reason (\b -> pure (f b, ())) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 558f610150..3dd88fd7e8 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -59,8 +59,8 @@ import Unison.Codebase.Editor.HandleInput.DeleteBranch (handleDeleteBranch) import Unison.Codebase.Editor.HandleInput.DeleteNamespace (getEndangeredDependents, handleDeleteNamespace) import Unison.Codebase.Editor.HandleInput.DeleteProject (handleDeleteProject) import Unison.Codebase.Editor.HandleInput.Dependents (handleDependents) -import Unison.Codebase.Editor.HandleInput.EditNamespace (handleEditNamespace) import Unison.Codebase.Editor.HandleInput.EditDependents (handleEditDependents) +import Unison.Codebase.Editor.HandleInput.EditNamespace (handleEditNamespace) import Unison.Codebase.Editor.HandleInput.FindAndReplace (handleStructuredFindI, handleStructuredFindReplaceI, handleTextFindI) import Unison.Codebase.Editor.HandleInput.FormatFile qualified as Format import Unison.Codebase.Editor.HandleInput.Global qualified as Global @@ -73,6 +73,7 @@ import Unison.Codebase.Editor.HandleInput.MoveAll (handleMoveAll) import Unison.Codebase.Editor.HandleInput.MoveBranch (doMoveBranch) import Unison.Codebase.Editor.HandleInput.MoveTerm (doMoveTerm) import Unison.Codebase.Editor.HandleInput.MoveType (doMoveType) +import Unison.Codebase.Editor.HandleInput.Names (handleNames) import Unison.Codebase.Editor.HandleInput.NamespaceDependencies (handleNamespaceDependencies) import Unison.Codebase.Editor.HandleInput.NamespaceDiffUtils (diffHelper) import Unison.Codebase.Editor.HandleInput.ProjectClone (handleClone) @@ -87,6 +88,7 @@ import Unison.Codebase.Editor.HandleInput.ReleaseDraft (handleReleaseDraft) import Unison.Codebase.Editor.HandleInput.Run (handleRun) import Unison.Codebase.Editor.HandleInput.RuntimeUtils qualified as RuntimeUtils import Unison.Codebase.Editor.HandleInput.ShowDefinition (handleShowDefinition) +import Unison.Codebase.Editor.HandleInput.SyncV2 qualified as SyncV2 import Unison.Codebase.Editor.HandleInput.TermResolution (resolveMainRef) import Unison.Codebase.Editor.HandleInput.Tests qualified as Tests import Unison.Codebase.Editor.HandleInput.Todo (handleTodo) @@ -497,29 +499,8 @@ loop e = do fixupOutput :: Path.HQSplit -> HQ.HashQualified Name fixupOutput = HQ'.toHQ . Path.nameFromHQSplit - NamesI global query -> do - hqLength <- Cli.runTransaction Codebase.hashLength - let searchNames names = do - let pped = PPED.makePPED (PPE.hqNamer 10 names) (PPE.suffixifyByHash names) - unsuffixifiedPPE = PPED.unsuffixifiedPPE pped - terms = Names.lookupHQTerm Names.IncludeSuffixes query names - types = Names.lookupHQType Names.IncludeSuffixes query names - terms' :: [(Referent, [HQ'.HashQualified Name])] - terms' = map (\r -> (r, PPE.allTermNames unsuffixifiedPPE r)) (Set.toList terms) - types' :: [(Reference, [HQ'.HashQualified Name])] - types' = map (\r -> (r, PPE.allTypeNames unsuffixifiedPPE r)) (Set.toList types) - pure (terms', types') - if global - then do - Global.forAllProjectBranches \(projBranchNames, _ids) branch -> do - let names = Branch.toNames . Branch.head $ branch - (terms, types) <- searchNames names - when (not (null terms) || not (null types)) do - Cli.respond $ GlobalListNames projBranchNames hqLength types terms - else do - names <- Cli.currentNames - (terms, types) <- searchNames names - Cli.respond $ ListNames hqLength types terms + NamesI global queries -> do + mapM_ (handleNames global) queries DocsI srcs -> do for_ srcs docsI CreateAuthorI authorNameSegment authorFullName -> do @@ -688,6 +669,13 @@ loop e = do Cli.respond Success PullI sourceTarget pullMode -> handlePull sourceTarget pullMode PushRemoteBranchI pushRemoteBranchInput -> handlePushRemoteBranch pushRemoteBranchInput + SyncToFileI syncFileDest projectBranchName -> SyncV2.handleSyncToFile syncFileDest projectBranchName + SyncFromFileI syncFileSrc projectBranchName -> do + description <- inputDescription input + SyncV2.handleSyncFromFile description syncFileSrc projectBranchName + SyncFromCodebaseI srcCodebasePath srcBranch destBranch -> do + description <- inputDescription input + SyncV2.handleSyncFromCodebase description srcCodebasePath srcBranch destBranch ListDependentsI hq -> handleDependents hq ListDependenciesI hq -> handleDependencies hq NamespaceDependenciesI path -> handleNamespaceDependencies path @@ -1018,6 +1006,11 @@ inputDescription input = ProjectsI -> wat PullI {} -> wat PushRemoteBranchI {} -> wat + SyncToFileI {} -> wat + SyncFromFileI fp pab -> + pure $ "sync.from-file " <> into @Text fp <> " " <> into @Text pab + SyncFromCodebaseI fp srcBranch destBranch -> do + pure $ "sync.from-file " <> into @Text fp <> " " <> into @Text srcBranch <> " " <> into @Text destBranch QuitI {} -> wat ReleaseDraftI {} -> wat ShowDefinitionI {} -> wat diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs index 72920b190d..8dc383882e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs @@ -78,6 +78,7 @@ import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Name (Name) import Unison.NameSegment (NameSegment) import Unison.NameSegment qualified as NameSegment +import Unison.Names (Names) import Unison.Parser.Ann (Ann) import Unison.Prelude import Unison.Project @@ -241,6 +242,22 @@ doMerge info = do let blob0 = Merge.makeMergeblob0 nametrees3 libdeps3 + names3 :: Merge.ThreeWay Names <- do + let causalHashes = + Merge.TwoOrThreeWay + { alice = info.alice.causalHash, + bob = info.bob.causalHash, + lca = info.lca.causalHash + } + branches <- for causalHashes \ch -> do + liftIO (Codebase.getBranchForHash env.codebase ch) >>= \case + Nothing -> done (Output.CouldntLoadBranch ch) + Just b -> pure b + let names = fmap (Branch.toNames . Branch.head) branches + pure Merge.ThreeWay {alice = names.alice, bob = names.bob, lca = fromMaybe mempty names.lca} + + respondRegion (Output.Literal "Loading definitions...") + -- Hydrate hydratedDefns :: Merge.ThreeWay @@ -260,14 +277,14 @@ doMerge info = do in bimap f g <$> blob0.defns ) - respondRegion (Output.Literal "Computing diff between branches...") + respondRegion (Output.Literal "Computing diffs...") blob1 <- - Merge.makeMergeblob1 blob0 hydratedDefns & onLeft \case + Merge.makeMergeblob1 blob0 names3 hydratedDefns & onLeft \case Merge.Alice reason -> done (Output.IncoherentDeclDuringMerge mergeTarget reason) Merge.Bob reason -> done (Output.IncoherentDeclDuringMerge mergeSource reason) - liftIO (debugFunctions.debugDiffs blob1.diffs) + liftIO (debugFunctions.debugDiffs blob1.diffsFromLCA) liftIO (debugFunctions.debugCombinedDiff blob1.diff) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Names.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Names.hs new file mode 100644 index 0000000000..1cd94f31c9 --- /dev/null +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Names.hs @@ -0,0 +1,70 @@ +module Unison.Codebase.Editor.HandleInput.Names (handleNames) where + +import Control.Monad (when) +import Data.Set qualified as Set +import Unison.Cli.Monad (Cli) +import Unison.Cli.Monad qualified as Cli +import Unison.Cli.NamesUtils qualified as Cli +import Unison.Codebase qualified as Codebase +import Unison.Codebase.Branch qualified as Branch +import Unison.Codebase.Branch.Names qualified as Branch +import Unison.Codebase.Editor.HandleInput.Global qualified as Global +import Unison.Codebase.Editor.Input (ErrorMessageOrName, RawQuery) +import Unison.Codebase.Editor.Output (Output (..)) +import Unison.HashQualifiedPrime qualified as HQ' +import Unison.Name (Name) +import Unison.NamesWithHistory qualified as Names +import Unison.PrettyPrintEnv qualified as PPE +import Unison.PrettyPrintEnv.Names qualified as PPE +import Unison.PrettyPrintEnvDecl qualified as PPED +import Unison.PrettyPrintEnvDecl.Names qualified as PPED +import Unison.Reference (Reference) +import Unison.Referent (Referent) +import Unison.Util.Pretty qualified as P + +-- | Handles a single @NamesI@ input query returning terms that match a given name. +-- +-- Parameters: +-- +-- * @global :: Bool@ +-- ** If @True@, search all projects and branches. +-- ** If @False@, search only the current branch. +-- +-- * @query :: (RawQuery, ErrorMessageOrName)@ +-- ** The first member is the raw @nameQuery@ being handled. +-- ** The second member is the parsed @nameQuery@ that is either an error message +-- to be printed or a name that can be looked up in the codebase. +handleNames :: + Bool -> + (RawQuery, ErrorMessageOrName) -> + Cli () +handleNames _ (nameQuery, Left errMsg) = do + Cli.respond $ + PrintMessage $ + P.lines [prettyNameQuery, errMsg] + where + prettyNameQuery = + P.red (P.bold $ P.string nameQuery) <> ":" +handleNames global (nameQuery, Right query) = do + hqLength <- Cli.runTransaction Codebase.hashLength + let searchNames names = do + let pped = PPED.makePPED (PPE.hqNamer 10 names) (PPE.suffixifyByHash names) + unsuffixifiedPPE = PPED.unsuffixifiedPPE pped + terms = Names.lookupHQTerm Names.IncludeSuffixes query names + types = Names.lookupHQType Names.IncludeSuffixes query names + terms' :: [(Referent, [HQ'.HashQualified Name])] + terms' = map (\r -> (r, PPE.allTermNames unsuffixifiedPPE r)) (Set.toList terms) + types' :: [(Reference, [HQ'.HashQualified Name])] + types' = map (\r -> (r, PPE.allTypeNames unsuffixifiedPPE r)) (Set.toList types) + pure (terms', types') + if global + then do + Global.forAllProjectBranches \(projBranchNames, _ids) branch -> do + let names = Branch.toNames . Branch.head $ branch + (terms, types) <- searchNames names + when (not (null terms) || not (null types)) do + Cli.respond $ GlobalListNames nameQuery projBranchNames hqLength types terms + else do + names <- Cli.currentNames + (terms, types) <- searchNames names + Cli.respond $ ListNames nameQuery hqLength types terms diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs new file mode 100644 index 0000000000..f34a64302a --- /dev/null +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs @@ -0,0 +1,71 @@ +module Unison.Codebase.Editor.HandleInput.SyncV2 + ( handleSyncToFile, + handleSyncFromFile, + handleSyncFromCodebase, + ) +where + +import Control.Lens +import Control.Monad.Reader (MonadReader (..)) +import U.Codebase.Sqlite.Queries qualified as Q +import Unison.Cli.Monad (Cli) +import Unison.Cli.Monad qualified as Cli +import Unison.Cli.MonadUtils qualified as Cli +import Unison.Cli.ProjectUtils qualified as Project +import Unison.Codebase (CodebasePath) +import Unison.Codebase qualified as Codebase +import Unison.Codebase.Editor.Output qualified as Output +import Unison.Codebase.Init qualified as Init +import Unison.Codebase.SqliteCodebase qualified as SqliteCodebase +import Unison.Prelude +import Unison.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName) +import Unison.Share.SyncV2 qualified as SyncV2 +import Unison.SyncV2.Types (BranchRef) + +handleSyncToFile :: FilePath -> ProjectAndBranch (Maybe ProjectName) (Maybe ProjectBranchName) -> Cli () +handleSyncToFile destSyncFile branchToSync = do + pp <- Cli.getCurrentProjectPath + projectBranch <- Project.resolveProjectBranchInProject (pp ^. #project) branchToSync + causalHash <- Cli.runTransaction $ Project.getProjectBranchCausalHash (projectBranch ^. #branch) + let branchRef = into @BranchRef $ ProjectAndBranch (projectBranch ^. #project . #name) (projectBranch ^. #branch . #name) + Cli.Env {codebase} <- ask + liftIO (SyncV2.syncToFile codebase causalHash (Just branchRef) destSyncFile) >>= \case + Left err -> Cli.respond (Output.SyncPullError err) + Right _ -> pure () + +handleSyncFromFile :: Text -> FilePath -> ProjectAndBranch (Maybe ProjectName) ProjectBranchName -> Cli () +handleSyncFromFile description srcSyncFile branchToSync = do + pp <- Cli.getCurrentProjectPath + projectBranch <- Project.resolveProjectBranchInProject (pp ^. #project) (over #branch Just branchToSync) + let shouldValidate = True + SyncV2.syncFromFile shouldValidate srcSyncFile >>= \case + Left err -> Cli.respond (Output.SyncPullError err) + Right causalHash -> do + Cli.setProjectBranchRootToCausalHash (projectBranch ^. #branch) description causalHash + +handleSyncFromCodebase :: Text -> CodebasePath -> ProjectAndBranch ProjectName ProjectBranchName -> ProjectAndBranch (Maybe ProjectName) ProjectBranchName -> Cli () +handleSyncFromCodebase description srcCodebasePath srcBranch destBranch = do + Cli.Env {codebase} <- ask + pp <- Cli.getCurrentProjectPath + projectBranch <- Project.resolveProjectBranchInProject (pp ^. #project) (over #branch Just destBranch) + r <- liftIO $ Init.withOpenCodebase SqliteCodebase.init "sync-src" srcCodebasePath Init.DontLock (Init.MigrateAfterPrompt Init.Backup Init.Vacuum) \srcCodebase -> do + Codebase.withConnection srcCodebase \srcConn -> do + maySrcCausalHash <- Codebase.runTransaction srcCodebase $ do + let ProjectAndBranch srcProjName srcBranchName = srcBranch + runMaybeT do + project <- MaybeT (Q.loadProjectByName srcProjName) + branch <- MaybeT (Q.loadProjectBranchByName (project ^. #projectId) srcBranchName) + lift $ Project.getProjectBranchCausalHash branch + case maySrcCausalHash of + Nothing -> pure $ Left (Output.SyncFromCodebaseMissingProjectBranch srcBranch) + Just srcCausalHash -> do + let shouldValidate = True + Right . fmap (const srcCausalHash) <$> liftIO (SyncV2.syncFromCodebase shouldValidate srcConn codebase srcCausalHash) + + case r of + Left openCodebaseErr -> Cli.respond (Output.OpenCodebaseError srcCodebasePath openCodebaseErr) + Right (Left errOutput) -> Cli.respond errOutput + Right (Right (Right causalHash)) -> do + Cli.setProjectBranchRootToCausalHash (projectBranch ^. #branch) description causalHash + Right (Right (Left syncErr)) -> do + Cli.respond (Output.SyncPullError syncErr) diff --git a/unison-cli/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs index da06a5fb8e..acaacd23c9 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -27,6 +27,10 @@ module Unison.Codebase.Editor.Input IsGlobal, DeleteOutput (..), DeleteTarget (..), + + -- * Type aliases + ErrorMessageOrName, + RawQuery, ) where @@ -61,6 +65,12 @@ type SourceName = Text -- "foo.u" or "buffer 7" type PatchPath = Path.Split' +type ErrorMessageOrValue a = Either (P.Pretty P.ColorText) a + +type ErrorMessageOrName = ErrorMessageOrValue (HQ.HashQualified Name) + +type RawQuery = String + data OptionalPatch = NoPatch | DefaultPatch | UsePatch PatchPath deriving (Eq, Ord, Show) @@ -126,6 +136,9 @@ data Input | DiffNamespaceI BranchId2 BranchId2 -- old new | PullI !PullSourceTarget !PullMode | PushRemoteBranchI PushRemoteBranchInput + | SyncToFileI FilePath (ProjectAndBranch (Maybe ProjectName) (Maybe ProjectBranchName)) + | SyncFromFileI FilePath UnresolvedProjectBranch + | SyncFromCodebaseI FilePath (ProjectAndBranch ProjectName ProjectBranchName) UnresolvedProjectBranch | ResetI (BranchId2 {- namespace to reset it to -}) (Maybe UnresolvedProjectBranch {- ProjectBranch to reset -}) | -- | used in Welcome module to give directions to user -- @@ -141,7 +154,8 @@ data Input -- > names .foo.bar -- > names .foo.bar#asdflkjsdf -- > names #sdflkjsdfhsdf - NamesI IsGlobal (HQ.HashQualified Name) + -- > names foo.bar foo.baz #sdflkjsdfhsdf + NamesI IsGlobal [(RawQuery, ErrorMessageOrName)] | AliasTermI !Bool HashOrHQSplit' Path.Split' -- bool = force? | AliasTypeI !Bool HashOrHQSplit' Path.Split' -- bool = force? | AliasManyI [Path.HQSplit] Path' diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index fd6291553f..f27bb6855c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -35,6 +35,7 @@ import U.Codebase.Sqlite.ProjectReflog qualified as ProjectReflog import Unison.Auth.Types (CredentialFailure) import Unison.Cli.MergeTypes (MergeSourceAndTarget, MergeSourceOrTarget) import Unison.Cli.Share.Projects.Types qualified as Share +import Unison.Codebase (CodebasePath) import Unison.Codebase.Editor.Input import Unison.Codebase.Editor.Output.BranchDiff (BranchDiffOutput) import Unison.Codebase.Editor.Output.BranchDiff qualified as BD @@ -43,6 +44,7 @@ import Unison.Codebase.Editor.RemoteRepo import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) import Unison.Codebase.Editor.SlurpResult qualified as SR import Unison.Codebase.Editor.StructuredArgument (StructuredArgument) +import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError) import Unison.Codebase.IntegrityCheck (IntegrityResult (..)) import Unison.Codebase.Path (Path') import Unison.Codebase.Path qualified as Path @@ -80,6 +82,7 @@ import Unison.Share.Sync.Types qualified as Sync import Unison.ShortHash (ShortHash) import Unison.Symbol (Symbol) import Unison.Sync.Types qualified as Share (DownloadEntitiesError, UploadEntitiesError) +import Unison.SyncV2.Types qualified as SyncV2 import Unison.Syntax.Parser qualified as Parser import Unison.Term (Term) import Unison.Type (Type) @@ -264,10 +267,12 @@ data Output | MovedOverExistingBranch Path' | DeletedEverything | ListNames + String -- input namesQuery for which this output is being produced Int -- hq length to print References [(Reference, [HQ'.HashQualified Name])] -- type match, type names [(Referent, [HQ'.HashQualified Name])] -- term match, term names | GlobalListNames + String -- input namesQuery for which this output is being produced (ProjectAndBranch ProjectName ProjectBranchName) Int -- hq length to print References [(Reference, [HQ'.HashQualified Name])] -- type match, type names @@ -442,6 +447,9 @@ data Output | -- | A literal output message. Use this if it's too cumbersome to create a new Output constructor, e.g. for -- ephemeral progress messages that are just simple strings like "Loading branch..." Literal !(P.Pretty P.ColorText) + | SyncPullError (Sync.SyncError SyncV2.PullError) + | SyncFromCodebaseMissingProjectBranch (ProjectAndBranch ProjectName ProjectBranchName) + | OpenCodebaseError CodebasePath OpenCodebaseError data MoreEntriesThanShown = MoreEntriesThanShown | AllEntriesShown deriving (Eq, Show) @@ -549,7 +557,7 @@ isFailure o = case o of MoveRootBranchConfirmation -> False MovedOverExistingBranch {} -> False DeletedEverything -> False - ListNames _ tys tms -> null tms && null tys + ListNames _ _ tys tms -> null tms && null tys GlobalListNames {} -> False ListOfDefinitions _ _ _ ds -> null ds GlobalFindBranchResults _ _ _ _ -> False @@ -682,6 +690,9 @@ isFailure o = case o of IncoherentDeclDuringMerge {} -> True IncoherentDeclDuringUpdate {} -> True Literal _ -> False + SyncPullError {} -> True + SyncFromCodebaseMissingProjectBranch {} -> True + OpenCodebaseError {} -> True isNumberedFailure :: NumberedOutput -> Bool isNumberedFailure = \case diff --git a/unison-cli/src/Unison/CommandLine/FZFResolvers.hs b/unison-cli/src/Unison/CommandLine/FZFResolvers.hs index d72e6db9bd..21ed49793b 100644 --- a/unison-cli/src/Unison/CommandLine/FZFResolvers.hs +++ b/unison-cli/src/Unison/CommandLine/FZFResolvers.hs @@ -164,7 +164,7 @@ projectNameResolver = FZFResolver {getOptions = projectNameOptions} -- E.g. '@unison/base' projectNameOptions :: OptionFetcher projectNameOptions codebase _projCtx _searchBranch0 = do - fmap (into @Text . SqliteProject.name) <$> Codebase.runTransaction codebase Q.loadAllProjects + fmap (into @Text . SqliteProject.name) <$> Codebase.runTransaction codebase Q.loadAllProjectsByRecentlyAccessed -- | All possible local project/branch names. -- E.g. '@unison/base/main' @@ -172,17 +172,12 @@ projectBranchOptions :: OptionFetcher projectBranchOptions codebase projCtx _searchBranch0 = do projs <- Codebase.runTransaction codebase Q.loadAllProjectBranchNamePairs projs - & foldMap - ( \(names, projIds) -> - if projIds.project == projCtx.project.projectId - then -- If the branch is in the current project, put a shortened version of the branch name first, - -- then the long-form name at the end of the list (in case the user still types the full name) - [(0 :: Int, "/" <> into @Text names.branch), (2, into @Text names)] - else [(1, into @Text names)] + & filter + ( \(_names, projIds) -> + -- If it's the same as the current branch, just omit it. + projIds.branch /= projCtx.branch.branchId ) - -- Put branches in this project first. - & List.sort - & fmap snd + & fmap (into @Text . fst) & pure -- | All possible local branch names within the current project. diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 982b756490..c43a1873df 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -97,6 +97,9 @@ module Unison.CommandLine.InputPatterns pushCreate, pushExhaustive, pushForce, + syncToFile, + syncFromFile, + syncFromCodebase, quit, releaseDraft, renameBranch, @@ -288,9 +291,13 @@ formatStructuredArgument schLength = \case else "." <> s pathArgStr = Text.pack $ show pathArg --- | Converts an arbitrary argument to a `String`. This is for cases where the +-- | Converts an arbitrary argument to a `String`. +-- +-- This is for cases where the -- command /should/ accept a structured argument of some type, but currently -- wants a `String`. +-- +-- This can also be used where the input argument needs to be included in the output. unifyArgument :: I.Argument -> String unifyArgument = either id (Text.unpack . formatStructuredArgument Nothing) @@ -745,6 +752,55 @@ handleProjectAndBranchNamesArg = SA.ProjectBranch (ProjectAndBranch mproj branch) -> pure $ maybe That These mproj branch otherNumArg -> Left $ wrongStructuredArgument "a project or branch" otherNumArg +handleOptionalProjectAndBranch :: I.Argument -> Either (P.Pretty CT.ColorText) (ProjectAndBranch (Maybe ProjectName) (Maybe ProjectBranchName)) +handleOptionalProjectAndBranch = + either + (\str -> fmap intoProjectAndBranch . first (const $ expectedButActually' "a project or branch" str) . tryInto @(These ProjectName ProjectBranchName) $ Text.pack str) + $ \case + SA.Project project -> pure $ ProjectAndBranch (Just project) Nothing + SA.ProjectBranch (ProjectAndBranch mproj branch) -> pure $ ProjectAndBranch mproj (Just branch) + otherNumArg -> Left $ wrongStructuredArgument "a project or branch" otherNumArg + where + intoProjectAndBranch :: These ProjectName ProjectBranchName -> ProjectAndBranch (Maybe ProjectName) (Maybe ProjectBranchName) + intoProjectAndBranch = \case + This project -> ProjectAndBranch (Just project) Nothing + That branch -> ProjectAndBranch Nothing (Just branch) + These project branch -> ProjectAndBranch (Just project) (Just branch) + +handleBranchWithOptionalProject :: I.Argument -> Either (P.Pretty CT.ColorText) (ProjectAndBranch (Maybe ProjectName) ProjectBranchName) +handleBranchWithOptionalProject = + either + ( \str -> + Text.pack str + & tryInto @(These ProjectName ProjectBranchName) + & first (const $ expectedButActually' "a project branch" str) + >>= \case + These project branch -> pure $ ProjectAndBranch (Just project) branch + That branch -> pure $ ProjectAndBranch Nothing branch + This _project -> Left $ expectedButActually' "a project branch" str + ) + ( \case + SA.ProjectBranch (ProjectAndBranch mproj branch) -> pure $ ProjectAndBranch mproj branch + otherNumArg -> Left $ wrongStructuredArgument "a project branch" otherNumArg + ) + +handleBranchWithProject :: I.Argument -> Either (P.Pretty CT.ColorText) (ProjectAndBranch ProjectName ProjectBranchName) +handleBranchWithProject = + either + ( \str -> + Text.pack str + & tryInto @(These ProjectName ProjectBranchName) + & first (const $ expectedButActually' "a project branch" str) + >>= \case + These project branch -> pure $ ProjectAndBranch project branch + That _branch -> Left $ expectedButActually' "a project branch" str + This _project -> Left $ expectedButActually' "a project branch" str + ) + ( \case + SA.ProjectBranch (ProjectAndBranch (Just proj) branch) -> pure $ ProjectAndBranch proj branch + otherNumArg -> Left $ wrongStructuredArgument "a project branch" otherNumArg + ) + mergeBuiltins :: InputPattern mergeBuiltins = InputPattern @@ -2092,6 +2148,96 @@ pushExhaustive = branchInclusion = AllBranches } +syncToFile :: InputPattern +syncToFile = + InputPattern + { patternName = "sync.to-file", + aliases = [], + visibility = I.Hidden, + params = + Parameters [("file-path", filePathArg)] $ + Optional [("branch", projectAndBranchNamesArg suggestionsConfig)] Nothing, + help = + ( P.wrapColumn2 + [ ( makeExample syncToFile ["./branch.usync"], + "saves the current branch to the file `foo.u`." + ), + ( makeExample syncToFile ["./main.usync", "/main"], + "saves the main branch to the file `main.usync`." + ) + ] + ), + parse = \case + [filePath, branch] -> Input.SyncToFileI <$> unsupportedStructuredArgument makeStandalone "a file name" filePath <*> handleOptionalProjectAndBranch branch + [filePath] -> Input.SyncToFileI <$> unsupportedStructuredArgument makeStandalone "a file name" filePath <*> pure (ProjectAndBranch Nothing Nothing) + args -> wrongArgsLength "one or two arguments" args + } + where + suggestionsConfig = + ProjectBranchSuggestionsConfig + { showProjectCompletions = True, + projectInclusion = AllProjects, + branchInclusion = AllBranches + } + +syncFromFile :: InputPattern +syncFromFile = + InputPattern + { patternName = "sync.from-file", + aliases = [], + visibility = I.Hidden, + params = + Parameters [("file-path", filePathArg), ("destination branch", projectAndBranchNamesArg suggestionsConfig)] $ + Optional [] Nothing, + help = + ( P.wrapColumn2 + [ ( makeExample syncFromFile ["./feature.usync", "/feature"], + "Sets the /feature branch to the contents of the file `main.usync`." + ) + ] + ), + parse = \case + [filePath, branch] -> Input.SyncFromFileI <$> unsupportedStructuredArgument makeStandalone "a file name" filePath <*> handleBranchWithOptionalProject branch + args -> wrongArgsLength "exactly two arguments" args + } + where + suggestionsConfig = + ProjectBranchSuggestionsConfig + { showProjectCompletions = True, + projectInclusion = AllProjects, + branchInclusion = AllBranches + } + +syncFromCodebase :: InputPattern +syncFromCodebase = + InputPattern + { patternName = "sync.from-codebase", + aliases = [], + visibility = I.Hidden, + params = + Parameters + [ ("codebase-location", filePathArg), + ("branch-to-sync", projectAndBranchNamesArg suggestionsConfig), + ("destination-branch", projectAndBranchNamesArg suggestionsConfig) + ] + $ Optional [] Nothing, + help = + ( P.wrapColumn2 + [ (makeExample syncFromCodebase ["./codebase", "/feature", "/main"], "Sets the /feature branch to the contents of the codebase at ./codebase.") + ] + ), + parse = \case + [codebaseLocation, branchToSync, destinationBranch] -> Input.SyncFromCodebaseI <$> unsupportedStructuredArgument makeStandalone "a file name" codebaseLocation <*> handleBranchWithProject branchToSync <*> handleBranchWithOptionalProject destinationBranch + args -> wrongArgsLength "exactly three arguments" args + } + where + suggestionsConfig = + ProjectBranchSuggestionsConfig + { showProjectCompletions = True, + projectInclusion = AllProjects, + branchInclusion = AllBranches + } + mergeOldSquashInputPattern :: InputPattern mergeOldSquashInputPattern = InputPattern @@ -2685,16 +2831,30 @@ names isGlobal = cmdName [] I.Visible - (Parameters [("name or hash", definitionQueryArg)] $ Optional [] Nothing) - (P.wrap $ makeExample (names isGlobal) ["foo"] <> description) - \case - [thing] -> Input.NamesI isGlobal <$> handleHashQualifiedNameArg thing - args -> wrongArgsLength "exactly one argument" args - where + (Parameters [] $ OnePlus ("name or hash", definitionQueryArg)) description - | isGlobal = "Iteratively search across all projects and branches for names matching `foo`. Note that this is expected to be quite slow and is primarily for debugging issues with your codebase." - | otherwise = "List all known names for `foo` in the current branch." + $ \case + [] -> wrongArgsLength "at least one argument" [] + [rawArg] -> do + let arg = handleArg rawArg + case arg of + (_, Left errMsg) -> Left errMsg + (argString, Right name) -> pure $ Input.NamesI isGlobal [(argString, Right name)] + rawArgs -> do + let args = handleArg <$> rawArgs + pure $ Input.NamesI isGlobal args + where + isGlobalPreamble = "Iteratively search names or hashes across all projects and branches." + isNotGlobalPreamble = "Search names or hashes in the current branch." cmdName = if isGlobal then "debug.names.global" else "names" + description = + P.lines + [ if isGlobal then isGlobalPreamble else isNotGlobalPreamble, + P.wrap $ makeExample (names isGlobal) ["foo"] <> "List all known names for `foo`.", + P.wrap $ makeExample (names isGlobal) ["foo", "#bar"] <> "List all known names for the name `foo` and for the hash `#bar`.", + P.wrap $ makeExample (names isGlobal) [] <> "without arguments invokes a search to select names/hashes to list, which requires that `fzf` can be found within your PATH." + ] + handleArg arg = (unifyArgument arg, handleHashQualifiedNameArg arg) dependents, dependencies :: InputPattern dependents = @@ -3653,6 +3813,9 @@ validInputs = pushCreate, pushExhaustive, pushForce, + syncToFile, + syncFromFile, + syncFromCodebase, quit, releaseDraft, renameBranch, diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index f347c33f8c..dfd09ee67d 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -14,6 +14,7 @@ import Data.List qualified as List import Data.List.Extra (notNull, nubOrd, nubOrdOn) import Data.List.NonEmpty qualified as NEList import Data.Map qualified as Map +import Data.Ord (comparing) import Data.Sequence qualified as Seq import Data.Set qualified as Set import Data.Set.NonEmpty (NESet) @@ -65,6 +66,7 @@ import Unison.Codebase.Editor.Output.PushPull qualified as PushPull import Unison.Codebase.Editor.SlurpResult qualified as SlurpResult import Unison.Codebase.Editor.StructuredArgument (StructuredArgument) import Unison.Codebase.Editor.StructuredArgument qualified as SA +import Unison.Codebase.Init.OpenCodebaseError qualified as CodebaseInit import Unison.Codebase.IntegrityCheck (IntegrityResult (..), prettyPrintIntegrityErrors) import Unison.Codebase.Patch qualified as Patch import Unison.Codebase.Path qualified as Path @@ -117,7 +119,9 @@ import Unison.Server.Backend (ShallowListEntry (..), TypeEntry (..)) import Unison.Server.Backend qualified as Backend import Unison.Server.SearchResultPrime qualified as SR' import Unison.Share.Sync.Types qualified as Share (CodeserverTransportError (..), GetCausalHashByPathError (..), PullError (..)) +import Unison.Share.Sync.Types qualified as Sync import Unison.Sync.Types qualified as Share +import Unison.SyncV2.Types qualified as SyncV2 import Unison.Syntax.DeclPrinter qualified as DeclPrinter import Unison.Syntax.HashQualified qualified as HQ (toText, unsafeFromVar) import Unison.Syntax.Name qualified as Name (toText) @@ -138,7 +142,6 @@ import Unison.Term (Term) import Unison.Term qualified as Term import Unison.Type (Type) import Unison.UnisonFile qualified as UF -import Unison.Util.ColorText qualified import Unison.Util.Conflicted (Conflicted (..)) import Unison.Util.Defn (Defn (..)) import Unison.Util.Defns (Defns (..)) @@ -863,23 +866,16 @@ notifyUser dir = \case "", output ] - ListNames len types terms -> - listOfNames len types terms - GlobalListNames projectBranchName len types terms -> do - output <- listOfNames len types terms + ListNames namesQuery len types terms -> + listOfNames namesQuery len types terms + GlobalListNames namesQuery projectBranchName len types terms -> do + output <- listOfNames namesQuery len types terms pure $ P.lines [ P.wrap $ "Found results in " <> P.text (into @Text projectBranchName), "", output ] - -- > names foo - -- Terms: - -- Hash: #asdflkjasdflkjasdf - -- Names: .util.frobnicate foo blarg.mcgee - -- - -- Term (with hash #asldfkjsdlfkjsdf): .util.frobnicate, foo, blarg.mcgee - -- Types (with hash #hsdflkjsdfsldkfj): Optional, Maybe, foo ListShallow buildPPE entries -> do let needPPE = entries @@ -972,6 +968,7 @@ notifyUser dir = \case -- defs in the codebase. In some cases it's fine for bindings to -- shadow codebase names, but you don't want it to capture them in -- the decompiled output. + let prettyBindings = P.bracket . P.lines $ P.wrap "The watch expression(s) reference these definitions:" @@ -2260,6 +2257,35 @@ notifyUser dir = \case <> "it. Then try the update again." ] Literal message -> pure message + SyncPullError syncErr -> + case syncErr of + Sync.TransportError te -> pure (prettyTransportError te) + Sync.SyncError pullErr -> pure (prettyPullV2Error pullErr) + SyncFromCodebaseMissingProjectBranch projectBranch -> + pure . P.wrap $ + "I couldn't sync from the codebase because the project branch" + <> prettyProjectAndBranchName projectBranch + <> "doesn't exist." + OpenCodebaseError codebasePath err -> case err of + CodebaseInit.OpenCodebaseDoesntExist -> + pure . P.wrap $ "I couldn't find a valid codebase at " <> prettyFilePath codebasePath + CodebaseInit.OpenCodebaseUnknownSchemaVersion schemaVersion -> + pure . P.wrap . P.lines $ + [ "I couldn't open the codebase at " <> prettyFilePath codebasePath <> ".", + "The schema version appears to be newer than the current UCM version can support.", + "You may need to upgrade UCM. The codebase is at schema version: " <> P.shown schemaVersion + ] + CodebaseInit.OpenCodebaseFileLockFailed -> do + pure . P.wrap . P.lines $ + [ "I couldn't open the codebase at " <> prettyFilePath codebasePath, + "It appears another process is using that codebase, please close other UCM instances and try again." + ] + CodebaseInit.OpenCodebaseRequiresMigration currentSV requiredSV -> + pure . P.wrap . P.lines $ + [ "I couldn't open the codebase at " <> prettyFilePath codebasePath, + "The codebase is at schema version " <> P.shown currentSV <> " but UCM requires schema version " <> P.shown requiredSV <> ".", + "Please open the other codebase with UCM directly to upgrade it to the latest version, then try again." + ] prettyShareError :: ShareError -> Pretty prettyShareError = @@ -2279,6 +2305,26 @@ prettyDownloadEntitiesError = \case Share.DownloadEntitiesProjectNotFound project -> shareProjectNotFound project Share.DownloadEntitiesEntityValidationFailure err -> prettyEntityValidationFailure err +prettyBranchRef :: SyncV2.BranchRef -> Pretty +prettyBranchRef (SyncV2.BranchRef txt) = P.blue (P.text txt) + +prettyDownloadEntitiesErrorV2 :: SyncV2.DownloadEntitiesError -> Pretty +prettyDownloadEntitiesErrorV2 = \case + SyncV2.DownloadEntitiesNoReadPermission branchRef -> prettyBranchRef branchRef + SyncV2.DownloadEntitiesUserNotFound userHandle -> shareUserNotFound (Share.RepoInfo userHandle) + SyncV2.DownloadEntitiesProjectNotFound project -> shareProjectNotFound project + SyncV2.DownloadEntitiesEntityValidationFailure err -> prettyEntityValidationFailure err + SyncV2.DownloadEntitiesInvalidBranchRef msg ref -> prettyInvalidBranchRef msg ref + +prettyInvalidBranchRef :: Text -> SyncV2.BranchRef -> Pretty +prettyInvalidBranchRef msg (SyncV2.BranchRef txt) = + P.wrap $ + "The server sent an invalid branch reference." + <> "The error was:" + <> P.text msg + <> "The branch reference was:" + <> P.text txt + prettyGetCausalHashByPathError :: Share.GetCausalHashByPathError -> Pretty prettyGetCausalHashByPathError = \case Share.GetCausalHashByPathErrorNoReadPermission sharePath -> noReadPermissionForPath sharePath @@ -2292,6 +2338,38 @@ prettyPullError = \case Share.PullError'NoHistoryAtPath sharePath -> P.wrap $ P.text "The server didn't find anything at" <> prettySharePath sharePath +prettyPullV2Error :: SyncV2.PullError -> Pretty +prettyPullV2Error = \case + SyncV2.PullError'DownloadEntities err -> prettyDownloadEntitiesErrorV2 err + SyncV2.PullError'Sync syncErr -> prettySyncErrorV2 syncErr + +prettySyncErrorV2 :: SyncV2.SyncError -> Pretty +prettySyncErrorV2 = \case + SyncV2.SyncErrorExpectedResultNotInMain hash -> + P.wrap $ + "The sync finished, but I'm missing an entity I expected." + <> "The missing hash is:" + <> prettyCausalHash hash + SyncV2.SyncErrorDeserializationFailure failure -> + P.wrap $ + "Failed to decode a response from the server." + <> "The error was:" + <> P.shown failure + SyncV2.SyncErrorMissingInitialChunk -> + P.wrap "The server didn't send the initial chunk of the response." + SyncV2.SyncErrorMisplacedInitialChunk -> + P.wrap "The server sent the initial chunk of the response in the wrong place." + SyncV2.SyncErrorStreamFailure msg -> + P.wrap $ + "Failed to stream data from the server." + <> "The error was:" + <> P.text msg + SyncV2.SyncErrorUnsupportedVersion version -> + P.wrap $ + "The server sent a response with an unsupported version." + <> "The version was:" + <> P.shown version + prettyUploadEntitiesError :: Share.UploadEntitiesError -> Pretty prettyUploadEntitiesError = \case Share.UploadEntitiesError'EntityValidationFailure validationFailureErr -> prettyEntityValidationFailure validationFailureErr @@ -2365,8 +2443,13 @@ prettyTransportError = \case Share.Timeout -> "The code server timed-out when responding to your request. Please try again later or report an issue if the problem persists." Share.UnexpectedResponse resp -> unexpectedServerResponse resp + Share.StreamingError err -> + P.lines + [ ("We encountered an error while streaming data from the code server: " <> P.text err), + P.red (P.text err) + ] -unexpectedServerResponse :: Servant.ResponseF LazyByteString.ByteString -> P.Pretty Unison.Util.ColorText.ColorText +unexpectedServerResponse :: Servant.ResponseF LazyByteString.ByteString -> Pretty unexpectedServerResponse resp = (P.lines . catMaybes) [ Just @@ -2918,44 +3001,63 @@ listOfDefinitions :: listOfDefinitions fscope ppe detailed results = pure $ listOfDefinitions' fscope ppe detailed results -listOfNames :: Int -> [(Reference, [HQ'.HashQualified Name])] -> [(Referent, [HQ'.HashQualified Name])] -> IO Pretty -listOfNames len types terms = do +listOfNames :: String -> Int -> [(Reference, [HQ'.HashQualified Name])] -> [(Referent, [HQ'.HashQualified Name])] -> IO Pretty +listOfNames namesQuery len types terms = do if null types && null terms then - pure . P.callout "😶" $ - P.sepNonEmpty "\n\n" $ - [ P.wrap "I couldn't find anything by that name." + pure + . P.sepNonEmpty "\n" + $ [ P.red prettyQuery, + P.string "😶", + P.wrap "I couldn't find anything by that name." ] else - pure . P.sepNonEmpty "\n\n" $ - [ formatTypes types, - formatTerms terms + pure . P.sepNonEmpty "\n" $ + [ P.green prettyQuery, + makeTable prettyRows ] where - formatTerms tms = - P.lines . P.nonEmpty $ P.plural tms (P.blue "Term") : List.intersperse "" (go <$> tms) + prettyQuery = P.singleQuoted' (P.string namesQuery) ":" + + makeTable = + P.column3Header "Hash" "Kind" "Names" + + prettyRows = makePrettyRows $ List.sortBy compareRows rows + makePrettyRows = + fmap + ( \(ref, kind, hqs) -> + ( P.syntaxToColor ref, + P.blue kind, + P.group $ + P.commas $ + P.bold . P.syntaxToColor . prettyHashQualified' + <$> hqs + ) + ) + + -- Compare rows by their list of names, first by comparing each name in the list + -- then by the length of the list of they share the same prefix + compareRows :: (a, b, [HQ'.HashQualified Name]) -> (a, b, [HQ'.HashQualified Name]) -> Ordering + compareRows (_, _, hqs1) (_, _, hqs2) = + Name.compareAlphabetical hqs1 hqs2 <> comparing length hqs1 hqs2 + + rows = termRows terms ++ typeRows types + + termRows terms = + makeSortedRow "Term" <$> prettyTerms where - go (ref, hqs) = - P.column2 - [ ("Hash:", P.syntaxToColor (prettyReferent len ref)), - ( "Names: ", - P.group $ - P.spaced $ - P.bold . P.syntaxToColor . prettyHashQualified' <$> List.sortBy Name.compareAlphabetical hqs - ) - ] - formatTypes types = - P.lines . P.nonEmpty $ P.plural types (P.blue "Type") : List.intersperse "" (go <$> types) + prettyTerms = terms & over (mapped . _1) (prettyReferent len) + + typeRows types = + makeSortedRow "Type" <$> prettyTypes where - go (ref, hqs) = - P.column2 - [ ("Hash:", P.syntaxToColor (prettyReference len ref)), - ( "Names:", - P.group $ - P.spaced $ - P.bold . P.syntaxToColor . prettyHashQualified' <$> List.sortBy Name.compareAlphabetical hqs - ) - ] + prettyTypes = types & over (mapped . _1) (prettyReference len) + + makeSortedRow kind (ref, hqs) = + ( ref, + kind, + List.sortBy Name.compareAlphabetical hqs + ) data ShowNumbers = ShowNumbers | HideNumbers diff --git a/unison-cli/src/Unison/Share/Sync/Types.hs b/unison-cli/src/Unison/Share/Sync/Types.hs index a53d14acbb..1d7066688c 100644 --- a/unison-cli/src/Unison/Share/Sync/Types.hs +++ b/unison-cli/src/Unison/Share/Sync/Types.hs @@ -39,6 +39,8 @@ data CodeserverTransportError | Unauthenticated Servant.BaseUrl | UnexpectedResponse Servant.Response | UnreachableCodeserver Servant.BaseUrl + | -- I wish Servant gave us more detail, but it's just Text. I don't think we ever hit these errors though. + StreamingError Text deriving stock (Show) deriving anyclass (Exception) diff --git a/unison-cli/src/Unison/Share/SyncV2.hs b/unison-cli/src/Unison/Share/SyncV2.hs new file mode 100644 index 0000000000..bbce0d95e6 --- /dev/null +++ b/unison-cli/src/Unison/Share/SyncV2.hs @@ -0,0 +1,395 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TypeOperators #-} + +module Unison.Share.SyncV2 + ( syncFromFile, + syncToFile, + syncFromCodebase, + ) +where + +import Codec.Serialise qualified as CBOR +import Conduit (ConduitT) +import Conduit qualified as C +import Control.Lens +import Control.Monad.Except +import Control.Monad.Reader (ask) +import Control.Monad.ST (ST, stToIO) +import Control.Monad.State +import Data.Attoparsec.ByteString qualified as A +import Data.Attoparsec.ByteString.Char8 qualified as A8 +import Data.ByteString qualified as BS +import Data.ByteString.Lazy qualified as BL +import Data.Conduit.Attoparsec qualified as C +import Data.Conduit.List qualified as C +import Data.Conduit.Zlib qualified as C +import Data.Graph qualified as Graph +import Data.Map qualified as Map +import Data.Set qualified as Set +import Data.Text.IO qualified as Text +import Servant.Conduit () +import System.Console.Regions qualified as Console.Regions +import U.Codebase.HashTags (CausalHash) +import U.Codebase.Sqlite.Queries qualified as Q +import U.Codebase.Sqlite.TempEntity (TempEntity) +import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle) +import Unison.Cli.Monad (Cli) +import Unison.Cli.Monad qualified as Cli +import Unison.Codebase qualified as Codebase +import Unison.Debug qualified as Debug +import Unison.Hash32 (Hash32) +import Unison.Prelude +import Unison.Share.ExpectedHashMismatches (expectedCausalHashMismatches, expectedComponentHashMismatches) +import Unison.Share.Sync.Types +import Unison.Sqlite qualified as Sqlite +import Unison.Sync.Common (causalHashToHash32, hash32ToCausalHash, tempEntityToEntity) +import Unison.Sync.Common qualified as Sync +import Unison.Sync.EntityValidation qualified as EV +import Unison.Sync.Types qualified as Share +import Unison.Sync.Types qualified as Sync +import Unison.SyncV2.Types (CBORBytes) +import Unison.SyncV2.Types qualified as SyncV2 +import Unison.Util.Servant.CBOR qualified as CBOR +import Unison.Util.Timing qualified as Timing +import UnliftIO qualified as IO + +type Stream i o = ConduitT i o StreamM () + +type SyncErr = SyncError SyncV2.PullError + +type StreamM = (ExceptT SyncErr (C.ResourceT IO)) + +batchSize :: Int +batchSize = 5000 + +------------------------------------------------------------------------------------------------------------------------ +-- Download entities + +validateAndSave :: Bool -> (Codebase.Codebase IO v a) -> [(Hash32, TempEntity)] -> StreamM () +validateAndSave shouldValidate codebase entities = do + let validateEntities = + runExceptT $ when shouldValidate (batchValidateEntities entities) + -- Validation is slow, run it in parallel with insertion, but don't commit the transaction until we're done + -- validation. + ExceptT . liftIO $ IO.withAsync validateEntities \validationTask -> do + Timing.time "Inserting entities" $ Codebase.runTransactionExceptT codebase do + for_ entities \(hash, entity) -> do + void . lift $ Q.saveTempEntityInMain v2HashHandle hash entity + lift (Sqlite.unsafeIO (IO.wait validationTask)) >>= \case + Left err -> throwError err + Right _ -> pure () + +-- | Syncs a stream which could send entities in any order. +syncUnsortedStream :: + Bool -> + (Codebase.Codebase IO v a) -> + Stream () SyncV2.EntityChunk -> + StreamM () +syncUnsortedStream shouldValidate codebase stream = do + Debug.debugLogM Debug.Temp $ "Syncing unsorted stream" + allResults <- C.runConduit $ stream C..| C.sinkList + allEntities <- ExceptT $ Timing.time "Unpacking chunks" $ liftIO $ Codebase.runTransactionExceptT codebase $ do unpackChunks allResults + let sortedEntities = sortDependencyFirst allEntities + validateAndSave shouldValidate codebase sortedEntities + +-- | Syncs a stream which sends entities which are already sorted in dependency order. +syncSortedStream :: + Bool -> + (Codebase.Codebase IO v a) -> + Stream () SyncV2.EntityChunk -> + StreamM () +syncSortedStream shouldValidate codebase stream = do + Debug.debugLogM Debug.Temp $ "Syncing sorted stream" + let handler :: Stream [SyncV2.EntityChunk] o + handler = C.mapM_C \chunkBatch -> do + entityBatch <- mapExceptT lift . ExceptT $ Codebase.runTransactionExceptT codebase do for chunkBatch unpackChunk + validateAndSave shouldValidate codebase (catMaybes entityBatch) + C.runConduit $ stream C..| C.chunksOf batchSize C..| handler + +unpackChunk :: SyncV2.EntityChunk -> ExceptT SyncErr Sqlite.Transaction (Maybe (Hash32, TempEntity)) +unpackChunk = \case + SyncV2.EntityChunk {hash, entityCBOR = entityBytes} -> do + -- Only want entities we don't already have + lift (Q.entityLocation hash) >>= \case + Just Q.EntityInMainStorage -> pure Nothing + _ -> do + (Just . (hash,)) <$> unpackEntity entityBytes + where + unpackEntity :: (CBORBytes TempEntity) -> ExceptT SyncErr Sqlite.Transaction TempEntity + unpackEntity entityBytes = do + case CBOR.deserialiseOrFailCBORBytes entityBytes of + Left err -> do throwError $ (SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err) + Right entity -> pure entity + +unpackChunks :: [SyncV2.EntityChunk] -> ExceptT SyncErr Sqlite.Transaction [(Hash32, TempEntity)] +unpackChunks xs = do + for xs unpackChunk + <&> catMaybes + +batchValidateEntities :: [(Hash32, TempEntity)] -> ExceptT SyncErr IO () +batchValidateEntities entities = do + mismatches <- fmap catMaybes $ liftIO $ IO.pooledForConcurrently entities \(hash, entity) -> do + IO.evaluate $ EV.validateTempEntity hash entity + for_ mismatches \case + err@(Share.EntityHashMismatch et (Share.HashMismatchForEntity {supplied, computed})) -> + let expectedMismatches = case et of + Share.TermComponentType -> expectedComponentHashMismatches + Share.DeclComponentType -> expectedComponentHashMismatches + Share.CausalType -> expectedCausalHashMismatches + _ -> mempty + in case Map.lookup supplied expectedMismatches of + Just expected + | expected == computed -> pure () + _ -> do + throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err + err -> do + throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err + +streamIntoCodebase :: Bool -> Codebase.Codebase IO v a -> SyncV2.StreamInitInfo -> Stream () SyncV2.EntityChunk -> StreamM () +streamIntoCodebase shouldValidate codebase SyncV2.StreamInitInfo {version, entitySorting, numEntities = numEntities} stream = ExceptT do + withStreamProgressCallback (fromIntegral <$> numEntities) \countC -> runExceptT do + let stream' = stream C..| countC + case version of + (SyncV2.Version 1) -> pure () + v -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorUnsupportedVersion v + + case entitySorting of + SyncV2.DependenciesFirst -> syncSortedStream shouldValidate codebase stream' + SyncV2.Unsorted -> syncUnsortedStream shouldValidate codebase stream' + +afterSyncChecks :: Codebase.Codebase IO v a -> Hash32 -> ExceptT (SyncError SyncV2.PullError) IO () +afterSyncChecks codebase hash = do + lift (didCausalSuccessfullyImport codebase hash) >>= \case + False -> do + throwError (SyncError (SyncV2.PullError'Sync . SyncV2.SyncErrorExpectedResultNotInMain . hash32ToCausalHash $ hash)) + True -> pure () + void $ liftIO (Codebase.withConnection codebase Sqlite.vacuum) + where + -- Verify that the expected hash made it into main storage. + didCausalSuccessfullyImport :: Codebase.Codebase IO v a -> Hash32 -> IO Bool + didCausalSuccessfullyImport codebase hash = do + let expectedHash = hash32ToCausalHash hash + isJust <$> (Codebase.runTransaction codebase $ Q.loadCausalByCausalHash expectedHash) + +-- | Topologically sort entities based on their dependencies. +sortDependencyFirst :: [(Hash32, TempEntity)] -> [(Hash32, TempEntity)] +sortDependencyFirst entities = do + let adjList = entities <&> \(hash32, entity) -> ((hash32, entity), hash32, Set.toList $ Share.entityDependencies (tempEntityToEntity entity)) + (graph, vertexInfo, _vertexForKey) = Graph.graphFromEdges adjList + in Graph.reverseTopSort graph <&> \v -> (view _1 $ vertexInfo v) + +syncFromFile :: + Bool -> + -- | Location of the sync-file + FilePath -> + Cli (Either (SyncError SyncV2.PullError) CausalHash) +syncFromFile shouldValidate syncFilePath = do + Cli.Env {codebase} <- ask + runExceptT do + Debug.debugLogM Debug.Temp $ "Kicking off sync" + mapExceptT liftIO $ Timing.time "File Sync" $ do + header <- mapExceptT C.runResourceT $ do + let stream = C.sourceFile syncFilePath C..| C.ungzip C..| decodeUnframedEntities + (header, rest) <- initializeStream stream + streamIntoCodebase shouldValidate codebase header rest + pure header + afterSyncChecks codebase (SyncV2.rootCausalHash header) + pure . hash32ToCausalHash $ SyncV2.rootCausalHash header + +syncFromCodebase :: + Bool -> + -- | The codebase to sync from. + Sqlite.Connection -> + (Codebase.Codebase IO v a) -> + -- | The hash to sync. + CausalHash -> + IO (Either (SyncError SyncV2.PullError) ()) +syncFromCodebase shouldValidate srcConn destCodebase causalHash = do + liftIO . C.runResourceT . runExceptT $ withEntityStream srcConn causalHash Nothing \_total entityStream -> do + (header, rest) <- initializeStream entityStream + streamIntoCodebase shouldValidate destCodebase header rest + mapExceptT liftIO (afterSyncChecks destCodebase (causalHashToHash32 causalHash)) + +withEntityStream :: + (MonadIO m) => + Sqlite.Connection -> + CausalHash -> + Maybe SyncV2.BranchRef -> + (Int -> Stream () SyncV2.DownloadEntitiesChunk -> m r) -> + m r +withEntityStream conn rootHash mayBranchRef callback = do + entities <- liftIO $ withEntityLoadingCallback $ \counter -> do + Sqlite.runTransaction conn (depsForCausal rootHash counter) + liftIO $ Text.hPutStrLn IO.stderr $ "Finished loading entities, writing sync-file." + let totalEntities = fromIntegral $ Map.size entities + let initialChunk = + SyncV2.InitialC + ( SyncV2.StreamInitInfo + { rootCausalHash = causalHashToHash32 rootHash, + version = SyncV2.Version 1, + entitySorting = SyncV2.DependenciesFirst, + numEntities = Just $ fromIntegral totalEntities, + rootBranchRef = mayBranchRef + } + ) + let contents = + entities + & fmap (Sync.entityToTempEntity id) + & Map.toList + & sortDependencyFirst + & ( fmap \(hash, entity) -> + let entityCBOR = (CBOR.serialiseCBORBytes entity) + in SyncV2.EntityC (SyncV2.EntityChunk {hash, entityCBOR}) + ) + & (initialChunk :) + let stream = C.yieldMany contents + callback totalEntities stream + +syncToFile :: + Codebase.Codebase IO v a -> + CausalHash -> + Maybe SyncV2.BranchRef -> + FilePath -> + IO (Either SyncErr ()) +syncToFile codebase rootHash mayBranchRef destFilePath = do + liftIO $ Codebase.withConnection codebase \conn -> do + C.runResourceT $ + withEntityStream conn rootHash mayBranchRef \mayTotal stream -> do + withStreamProgressCallback (Just mayTotal) \countC -> runExceptT do + C.runConduit $ stream C..| countC C..| C.map (BL.toStrict . CBOR.serialise) C..| C.transPipe liftIO C.gzip C..| C.sinkFile destFilePath + +-- | Collect all dependencies of a given causal hash. +depsForCausal :: CausalHash -> (Int -> IO ()) -> Sqlite.Transaction (Map Hash32 (Sync.Entity Text Hash32 Hash32)) +depsForCausal causalHash counter = do + flip execStateT mempty $ expandEntities (causalHashToHash32 causalHash) + where + expandEntities :: Hash32 -> ((StateT (Map Hash32 (Sync.Entity Text Hash32 Hash32)) Sqlite.Transaction)) () + expandEntities hash32 = do + gets (Map.member hash32) >>= \case + True -> pure () + False -> do + entity <- lift $ Sync.expectEntity hash32 + modify (Map.insert hash32 entity) + lift . Sqlite.unsafeIO $ counter 1 + traverseOf_ Sync.entityHashes_ expandEntities entity + +-- | Gets the framed chunks from a NetString framed stream. +_unNetString :: ConduitT ByteString ByteString StreamM () +_unNetString = do + bs <- C.sinkParser $ do + len <- A8.decimal + _ <- A8.char ':' + bs <- A.take len + _ <- A8.char ',' + pure bs + C.yield bs + +_decodeFramedEntity :: ByteString -> StreamM SyncV2.DownloadEntitiesChunk +_decodeFramedEntity bs = do + case CBOR.deserialiseOrFail (BL.fromStrict bs) of + Left err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err + Right chunk -> pure chunk + +-- Expects a stream of tightly-packed CBOR entities without any framing/separators. +decodeUnframedEntities :: ConduitT ByteString SyncV2.DownloadEntitiesChunk StreamM () +decodeUnframedEntities = C.transPipe (mapExceptT (lift . stToIO)) $ do + C.await >>= \case + Nothing -> pure () + Just bs -> do + d <- newDecoder + loop bs d + where + newDecoder :: ConduitT ByteString SyncV2.DownloadEntitiesChunk (ExceptT SyncErr (ST s)) (Maybe ByteString -> ST s (CBOR.IDecode s (SyncV2.DownloadEntitiesChunk))) + newDecoder = do + (lift . lift) CBOR.deserialiseIncremental >>= \case + CBOR.Done _ _ _ -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorStreamFailure "Invalid initial decoder" + CBOR.Fail _ _ err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err + CBOR.Partial k -> pure k + loop :: ByteString -> (Maybe ByteString -> ST s (CBOR.IDecode s (SyncV2.DownloadEntitiesChunk))) -> ConduitT ByteString SyncV2.DownloadEntitiesChunk (ExceptT SyncErr (ST s)) () + loop bs k = do + (lift . lift) (k (Just bs)) >>= \case + CBOR.Fail _ _ err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err + CBOR.Partial k' -> do + -- We need more input, try to get some + nextBS <- C.await + case nextBS of + Nothing -> do + -- No more input, try to finish up the decoder. + (lift . lift) (k' Nothing) >>= \case + CBOR.Done _ _ a -> C.yield a + CBOR.Fail _ _ err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err + CBOR.Partial _ -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorStreamFailure "Unexpected end of input" + Just bs' -> + -- Have some input, keep going. + loop bs' k' + CBOR.Done rem _ a -> do + C.yield a + if BS.null rem + then do + -- If we had no leftovers, we can check if there's any input left. + C.await >>= \case + Nothing -> pure () + Just bs'' -> do + -- If we have input left, start up a new decoder. + k <- newDecoder + loop bs'' k + else do + -- We have leftovers, start a new decoder and use those. + k <- newDecoder + loop rem k + +-- | Peel the header off the stream and parse the remaining entity chunks. +initializeStream :: Stream () SyncV2.DownloadEntitiesChunk -> StreamM (SyncV2.StreamInitInfo, Stream () SyncV2.EntityChunk) +initializeStream stream = do + (streamRemainder, init) <- stream C.$$+ C.headC + Debug.debugM Debug.Temp "Got initial chunk: " init + case init of + Nothing -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorMissingInitialChunk + Just chunk -> do + case chunk of + SyncV2.InitialC info -> do + let entityStream = C.unsealConduitT streamRemainder C..| C.mapM parseEntity + pure $ (info, entityStream) + SyncV2.EntityC _ -> do + Debug.debugLogM Debug.Temp $ "Got unexpected entity chunk" + throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorMissingInitialChunk + SyncV2.ErrorC (SyncV2.ErrorChunk err) -> throwError . SyncError . SyncV2.PullError'DownloadEntities $ err + where + parseEntity :: SyncV2.DownloadEntitiesChunk -> StreamM SyncV2.EntityChunk + parseEntity = \case + SyncV2.EntityC chunk -> pure chunk + SyncV2.ErrorC (SyncV2.ErrorChunk err) -> throwError . SyncError $ SyncV2.PullError'DownloadEntities err + SyncV2.InitialC {} -> throwError . SyncError $ SyncV2.PullError'Sync SyncV2.SyncErrorMisplacedInitialChunk + +-- Provide the given action a callback that display to the terminal. +withStreamProgressCallback :: (MonadIO m, MonadUnliftIO n) => Maybe Int -> (ConduitT i i m () -> n a) -> n a +withStreamProgressCallback total action = do + entitiesDownloadedVar <- IO.newTVarIO (0 :: Int) + IO.withRunInIO \toIO -> do + Console.Regions.displayConsoleRegions do + Console.Regions.withConsoleRegion Console.Regions.Linear \region -> do + Console.Regions.setConsoleRegion region do + entitiesDownloaded <- IO.readTVar entitiesDownloadedVar + pure $ + "\n Processed " + <> tShow entitiesDownloaded + <> maybe "" (\total -> " / " <> tShow total) total + <> " entities...\n\n" + toIO $ action $ C.awaitForever \i -> do + liftIO $ IO.atomically (IO.modifyTVar' entitiesDownloadedVar (+ 1)) + C.yield i + +withEntityLoadingCallback :: (MonadUnliftIO m) => ((Int -> m ()) -> m a) -> m a +withEntityLoadingCallback action = do + counterVar <- IO.newTVarIO (0 :: Int) + IO.withRunInIO \toIO -> do + Console.Regions.displayConsoleRegions do + Console.Regions.withConsoleRegion Console.Regions.Linear \region -> do + Console.Regions.setConsoleRegion region do + processed <- IO.readTVar counterVar + pure $ + "\n Loading " + <> tShow processed + <> " entities...\n\n" + toIO $ action $ \i -> do + liftIO $ IO.atomically (IO.modifyTVar' counterVar (+ i)) diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index ab8d4ecc07..8a40ecd6e4 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -74,6 +74,7 @@ library Unison.Codebase.Editor.HandleInput.MoveBranch Unison.Codebase.Editor.HandleInput.MoveTerm Unison.Codebase.Editor.HandleInput.MoveType + Unison.Codebase.Editor.HandleInput.Names Unison.Codebase.Editor.HandleInput.NamespaceDependencies Unison.Codebase.Editor.HandleInput.NamespaceDiffUtils Unison.Codebase.Editor.HandleInput.ProjectClone @@ -88,6 +89,7 @@ library Unison.Codebase.Editor.HandleInput.Run Unison.Codebase.Editor.HandleInput.RuntimeUtils Unison.Codebase.Editor.HandleInput.ShowDefinition + Unison.Codebase.Editor.HandleInput.SyncV2 Unison.Codebase.Editor.HandleInput.TermResolution Unison.Codebase.Editor.HandleInput.Tests Unison.Codebase.Editor.HandleInput.Todo @@ -151,6 +153,7 @@ library Unison.Share.ExpectedHashMismatches Unison.Share.Sync Unison.Share.Sync.Types + Unison.Share.SyncV2 Unison.Util.HTTP Unison.Version hs-source-dirs: @@ -198,12 +201,15 @@ library , aeson-pretty , ansi-terminal , async + , attoparsec , base , bytestring , cmark , co-log-core , code-page , concurrent-output + , conduit + , conduit-extra , containers >=0.6.3 , cryptonite , directory @@ -239,8 +245,10 @@ library , recover-rtti , regex-tdfa , semialign + , serialise , servant , servant-client + , servant-conduit , stm , temporary , text diff --git a/unison-core/src/Unison/Util/Defn.hs b/unison-core/src/Unison/Util/Defn.hs index d897491de4..26a0fdd222 100644 --- a/unison-core/src/Unison/Util/Defn.hs +++ b/unison-core/src/Unison/Util/Defn.hs @@ -3,7 +3,28 @@ module Unison.Util.Defn ) where +import Data.Bifoldable (Bifoldable (..)) +import Data.Bifunctor (Bifunctor (..)) +import Data.Bitraversable (Bitraversable (..)) +import GHC.Generics (Generic) + -- | A "definition" is either a term or a type. data Defn term typ = TermDefn term | TypeDefn typ + deriving stock (Generic, Functor, Foldable, Traversable, Show, Eq, Ord) + +instance Bifunctor Defn where + bimap f g = \case + TermDefn x -> TermDefn (f x) + TypeDefn y -> TypeDefn (g y) + +instance Bifoldable Defn where + bifoldMap f g = \case + TermDefn x -> f x + TypeDefn y -> g y + +instance Bitraversable Defn where + bitraverse f g = \case + TermDefn x -> TermDefn <$> f x + TypeDefn y -> TypeDefn <$> g y diff --git a/unison-core/src/Unison/Util/Defns.hs b/unison-core/src/Unison/Util/Defns.hs index fed00742b4..cacaac8d1f 100644 --- a/unison-core/src/Unison/Util/Defns.hs +++ b/unison-core/src/Unison/Util/Defns.hs @@ -4,6 +4,8 @@ module Unison.Util.Defns DefnsF2, DefnsF3, DefnsF4, + terms_, + types_, alignDefnsWith, defnsAreEmpty, fromTerms, @@ -19,6 +21,7 @@ module Unison.Util.Defns ) where +import Control.Lens (Lens) import Data.Align (Semialign, alignWith) import Data.Bifoldable (Bifoldable, bifoldMap) import Data.Bitraversable (Bitraversable, bitraverse) @@ -31,7 +34,7 @@ data Defns terms types = Defns { terms :: terms, types :: types } - deriving stock (Generic, Functor, Show) + deriving stock (Generic, Functor, Show, Eq, Ord) deriving (Monoid, Semigroup) via GenericSemigroupMonoid (Defns terms types) instance Bifoldable Defns where @@ -46,6 +49,12 @@ instance Bitraversable Defns where bitraverse f g (Defns x y) = Defns <$> f x <*> g y +terms_ :: Lens (Defns terms types) (Defns terms' types) terms terms' +terms_ f (Defns x y) = (\x' -> Defns x' y) <$> f x + +types_ :: Lens (Defns terms types) (Defns terms types') types types' +types_ f (Defns x y) = (\y' -> Defns x y') <$> f y + -- | A common shape of definitions - terms and types are stored in the same structure. type DefnsF f terms types = Defns (f terms) (f types) diff --git a/unison-core/src/Unison/Util/Nametree.hs b/unison-core/src/Unison/Util/Nametree.hs index e87bdde344..50ae6d1510 100644 --- a/unison-core/src/Unison/Util/Nametree.hs +++ b/unison-core/src/Unison/Util/Nametree.hs @@ -3,6 +3,7 @@ module Unison.Util.Nametree Nametree (..), traverseNametreeWithName, unfoldNametree, + unionWith, -- ** Flattening and unflattening flattenNametree, @@ -33,6 +34,16 @@ data Nametree a = Nametree } deriving stock (Functor, Foldable, Traversable, Generic, Show) +unionWith :: (a -> a -> a) -> Nametree a -> Nametree a -> Nametree a +unionWith f (Nametree x xs) (Nametree y ys) = + Nametree (f x y) (Map.unionWith (unionWith f) xs ys) + +instance (Semigroup a) => Semigroup (Nametree a) where + (<>) = unionWith (<>) + +instance (Monoid a) => Monoid (Nametree a) where + mempty = Nametree mempty mempty + instance Semialign Nametree where alignWith :: (These a b -> c) -> Nametree a -> Nametree b -> Nametree c alignWith f (Nametree x xs) (Nametree y ys) = diff --git a/unison-merge/package.yaml b/unison-merge/package.yaml index 53b339cf9f..1ef15a7954 100644 --- a/unison-merge/package.yaml +++ b/unison-merge/package.yaml @@ -7,6 +7,7 @@ ghc-options: -Wall dependencies: - base - containers + - either - lens - mtl - nonempty-containers diff --git a/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs b/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs index 697e693d6b..97fe824740 100644 --- a/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs +++ b/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs @@ -132,7 +132,7 @@ data IncoherentDeclReason -- Foo.Bar#Foo IncoherentDeclReason'NestedDeclAlias !Name !Name -- shorter name, longer name | IncoherentDeclReason'StrayConstructor !TypeReferenceId !Name - deriving stock (Show) + deriving stock (Eq, Show) checkDeclCoherency :: (HasCallStack) => diff --git a/unison-merge/src/Unison/Merge/Diff.hs b/unison-merge/src/Unison/Merge/Diff.hs index 39be392c28..37625c66c7 100644 --- a/unison-merge/src/Unison/Merge/Diff.hs +++ b/unison-merge/src/Unison/Merge/Diff.hs @@ -1,12 +1,18 @@ module Unison.Merge.Diff ( nameBasedNamespaceDiff, + humanizeDiffs, ) where +import Data.Either.Combinators (mapRight) +import Data.List.NonEmpty qualified as NEL +import Data.List.NonEmpty qualified as NEList import Data.Map.Strict qualified as Map -import Data.Semialign (alignWith) +import Data.Semialign (Unalign (..), alignWith) import Data.Set qualified as Set +import Data.Set.NonEmpty qualified as NESet import Data.These (These (..)) +import Data.Zip qualified as Zip import U.Codebase.Reference (TypeReference) import Unison.ConstructorReference (GConstructorReference (..)) import Unison.DataDeclaration (Decl) @@ -14,20 +20,24 @@ import Unison.DataDeclaration qualified as DataDeclaration import Unison.DeclNameLookup (DeclNameLookup) import Unison.DeclNameLookup qualified as DeclNameLookup import Unison.Hash (Hash (Hash)) -import Unison.HashQualifiedPrime qualified as HQ' import Unison.Merge.DiffOp (DiffOp (..)) +import Unison.Merge.HumanDiffOp (HumanDiffOp (..)) import Unison.Merge.PartialDeclNameLookup (PartialDeclNameLookup (..)) import Unison.Merge.Synhash qualified as Synhash import Unison.Merge.Synhashed (Synhashed (..)) +import Unison.Merge.Synhashed qualified as Synhashed import Unison.Merge.ThreeWay (ThreeWay (..)) import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Merge.TwoWay (TwoWay (..)) import Unison.Merge.Updated (Updated (..)) import Unison.Name (Name) +import Unison.Names (Names) +import Unison.Names qualified as Names import Unison.Parser.Ann (Ann) import Unison.Prelude hiding (catMaybes) import Unison.PrettyPrintEnv (PrettyPrintEnv (..)) -import Unison.PrettyPrintEnv qualified as Ppe +import Unison.PrettyPrintEnv qualified as PPE +import Unison.PrettyPrintEnvDecl qualified as PPED import Unison.Reference (Reference' (..), TermReference, TermReferenceId, TypeReferenceId) import Unison.Referent (Referent) import Unison.Referent qualified as Referent @@ -36,7 +46,10 @@ import Unison.Syntax.Name qualified as Name import Unison.Term (Term) import Unison.Util.BiMultimap (BiMultimap) import Unison.Util.BiMultimap qualified as BiMultimap -import Unison.Util.Defns (Defns (..), DefnsF2, DefnsF3, zipDefnsWith) +import Unison.Util.Defns (Defns (..), DefnsF, DefnsF2, DefnsF3, zipDefnsWith) +import Unison.Util.Defns qualified as Defns +import Unison.Util.Relation (Relation) +import Unison.Util.Relation qualified as Rel -- | @nameBasedNamespaceDiff db declNameLookups defns@ returns Alice's and Bob's name-based namespace diffs, each in the -- form: @@ -50,40 +63,131 @@ nameBasedNamespaceDiff :: (HasCallStack) => TwoWay DeclNameLookup -> PartialDeclNameLookup -> + ThreeWay PPED.PrettyPrintEnvDecl -> ThreeWay (Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name)) -> Defns (Map TermReferenceId (Term Symbol Ann)) (Map TypeReferenceId (Decl Symbol Ann)) -> - TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference) -nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup defns hydratedDefns = - let lcaHashes = synhashLcaDefns ppe lcaDeclNameLookup defns.lca hydratedDefns - hashes = synhashDefns ppe hydratedDefns <$> declNameLookups <*> ThreeWay.forgetLca defns - in diffHashedNamespaceDefns lcaHashes <$> hashes + ( -- Core diffs, i.e. adds, deletes, and updates which have different synhashes. + TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + -- Propagated updates, i.e. updates which have the same synhash but different Unison hashes. + TwoWay (DefnsF2 (Map Name) Updated Referent TypeReference) + ) +nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup ppeds defns hydratedDefns = + let lcaHashes = synhashLcaDefns synhashPPE lcaDeclNameLookup defns.lca hydratedDefns + aliceHashes = synhashDefns synhashPPE hydratedDefns declNameLookups.alice defns.alice + bobHashes = synhashDefns synhashPPE hydratedDefns declNameLookups.bob defns.bob + in (diffHashedNamespaceDefns lcaHashes <$> TwoWay {alice = aliceHashes, bob = bobHashes}) + & Zip.unzip where - ppe :: PrettyPrintEnv - ppe = - -- The order between Alice and Bob isn't important here for syntactic hashing; not sure right now if it matters - -- that the LCA is added last - deepNamespaceDefinitionsToPpe defns.alice - `Ppe.addFallback` deepNamespaceDefinitionsToPpe defns.bob - `Ppe.addFallback` deepNamespaceDefinitionsToPpe defns.lca + synhashPPE :: PPE.PrettyPrintEnv + synhashPPE = + let ThreeWay {lca = lcaPPE, alice = alicePPE, bob = bobPPE} = PPED.unsuffixifiedPPE <$> ppeds + in alicePPE `PPE.addFallback` bobPPE `PPE.addFallback` lcaPPE diffHashedNamespaceDefns :: DefnsF2 (Map Name) Synhashed term typ -> DefnsF2 (Map Name) Synhashed term typ -> - DefnsF3 (Map Name) DiffOp Synhashed term typ -diffHashedNamespaceDefns = - zipDefnsWith f f + ( -- Core diffs, i.e. adds, deletes, and updates which have different synhashes. + DefnsF3 (Map Name) DiffOp Synhashed term typ, + -- Propagated updates, i.e. updates which have the same synhash but different Unison hashes. + DefnsF2 (Map Name) Updated term typ + ) +diffHashedNamespaceDefns d1 d2 = + zipDefnsWith f f d1 d2 + & splitPropagated where - f :: Map Name (Synhashed ref) -> Map Name (Synhashed ref) -> Map Name (DiffOp (Synhashed ref)) + f :: + Map Name (Synhashed ref) -> + Map Name (Synhashed ref) -> + (Map Name (DiffOp (Synhashed ref)), Map Name (Updated ref)) f old new = - Map.mapMaybe id (alignWith g old new) + unalign (eitherToThese . mapRight (fmap Synhashed.value) <$> alignWith g old new) - g :: (Eq x) => These x x -> Maybe (DiffOp x) + g :: (Eq x) => These x x -> Either (DiffOp x) (Updated x) g = \case - This old -> Just (DiffOp'Delete old) - That new -> Just (DiffOp'Add new) + This old -> Left (DiffOp'Delete old) + That new -> Left (DiffOp'Add new) These old new - | old == new -> Nothing - | otherwise -> Just (DiffOp'Update Updated {old, new}) + | old == new -> Right Updated {old, new} + | otherwise -> Left (DiffOp'Update Updated {old, new}) + + splitPropagated :: + Defns + ( Map Name (DiffOp (Synhashed term)), + Map Name (Updated term) + ) + (Map Name (DiffOp (Synhashed typ)), Map Name (Updated typ)) -> + (DefnsF3 (Map Name) DiffOp Synhashed term typ, DefnsF2 (Map Name) Updated term typ) + splitPropagated Defns {terms, types} = + (Defns {terms = fst terms, types = fst types}, Defns {terms = snd terms, types = snd types}) + +-- | Post-process a diff to identify relationships humans might care about, such as whether a given addition could be +-- interpreted as an alias of an existing definition, or whether an add and deletion could be a rename. +humanizeDiffs :: + ThreeWay Names -> + TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference) -> + TwoWay (DefnsF2 (Map Name) Updated Referent TypeReference) -> + TwoWay (DefnsF2 (Map Name) HumanDiffOp Referent TypeReference) +humanizeDiffs names3 diffs propagatedUpdates = + zipWithF3 nameRelations diffs propagatedUpdates \relation diffOps propagatedUpdates -> + Defns.zipDefnsWith4 computeHumanDiffOp computeHumanDiffOp lcaRelation relation diffOps propagatedUpdates + where + zipWithF3 :: (Zip.Zip f) => f a -> f b -> f c -> (a -> b -> c -> d) -> f d + zipWithF3 a b c f = Zip.zipWith (\(x, y) z -> f x y z) (Zip.zip a b) c + + namesToRelations :: Names -> (DefnsF (Relation Name) Referent TypeReference) + namesToRelations names = Defns {terms = Names.terms names, types = Names.types names} + + lcaRelation :: DefnsF (Relation Name) Referent TypeReference + lcaRelation = namesToRelations names3.lca + + nameRelations :: TwoWay (DefnsF (Relation Name) Referent TypeReference) + nameRelations = namesToRelations <$> ThreeWay.forgetLca names3 + + computeHumanDiffOp :: + forall ref. + (Show ref, Ord ref) => + Relation Name ref -> + Relation Name ref -> + Map Name (DiffOp (Synhashed ref)) -> + Map Name (Updated ref) -> + Map Name (HumanDiffOp ref) + computeHumanDiffOp oldRelation newRelation diffs propagatedUpdates = alignWith go diffs propagatedUpdates + where + go :: These (DiffOp (Synhashed ref)) (Updated ref) -> (HumanDiffOp ref) + go = \case + This diff -> humanizeDiffOp (Synhashed.value <$> diff) + That updated -> HumanDiffOp'PropagatedUpdate updated + These diff updated -> error (reportBug "E488729" ("The impossible happened, an update in merge was detected as both a propagated AND core update " ++ show diff ++ " and " ++ show updated)) + + humanizeDiffOp :: DiffOp ref -> HumanDiffOp ref + humanizeDiffOp = \case + DiffOp'Add ref -> + -- This name is newly added. We need to check if it's a new definition, an alias, or a rename. + case Set.toList (Rel.lookupRan ref oldRelation) of + -- No old names for this ref, so it's a new addition not an alias + [] -> HumanDiffOp'Add ref + -- There are old names for this ref, but not old refs for this name, so it's + -- either a new alias or a rename. + -- + -- If at least one old name for this ref no longer exists, we treat it like a + -- rename. + (n : ns) -> do + let existingNames = NESet.fromList (n NEList.:| ns) + case NESet.nonEmptySet (Rel.lookupRan ref newRelation) of + Nothing -> error (reportBug "E458329" ("Expected to find at least one name for ref in new namespace, since we found the ref by the name.")) + Just allNewNames -> + case NESet.nonEmptySet (NESet.difference existingNames allNewNames) of + -- If all the old names still exist in the new namespace, it's a new alias. + Nothing -> HumanDiffOp'AliasOf ref existingNames + -- Otherwise, treat it as a rename. + Just namesWhichDisappeared -> + HumanDiffOp'RenamedFrom ref namesWhichDisappeared + DiffOp'Delete ref -> + case NEL.nonEmpty $ Set.toList (Rel.lookupRan ref newRelation) of + -- No names for this ref, it was removed. + Nothing -> HumanDiffOp'Delete ref + Just newNames -> HumanDiffOp'RenamedTo ref (NESet.fromList newNames) + DiffOp'Update Updated {old, new} -> HumanDiffOp'Update Updated {old, new} ------------------------------------------------------------------------------------------------------------------------ -- Syntactic hashing @@ -183,19 +287,6 @@ synhashDefnsWith hashTerm hashType = do hashType1 name typ = Synhashed (hashType name typ) typ ------------------------------------------------------------------------------------------------------------------------- --- Pretty-print env helpers - -deepNamespaceDefinitionsToPpe :: Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name) -> PrettyPrintEnv -deepNamespaceDefinitionsToPpe Defns {terms, types} = - PrettyPrintEnv (arbitraryName terms) (arbitraryName types) - where - arbitraryName :: (Ord ref) => BiMultimap ref Name -> ref -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)] - arbitraryName names ref = - BiMultimap.lookupDom ref names - & Set.lookupMin - & maybe [] \name -> [(HQ'.NameOnly name, HQ'.NameOnly name)] - ------------------------------------------------------------------------------------------------------------------------ -- Looking up terms and decls that we expect to be there diff --git a/unison-merge/src/Unison/Merge/DiffOp.hs b/unison-merge/src/Unison/Merge/DiffOp.hs index db980f480b..9a17b3031f 100644 --- a/unison-merge/src/Unison/Merge/DiffOp.hs +++ b/unison-merge/src/Unison/Merge/DiffOp.hs @@ -14,4 +14,4 @@ data DiffOp a = DiffOp'Add !a | DiffOp'Delete !a | DiffOp'Update !(Updated a) - deriving stock (Functor, Show) + deriving stock (Foldable, Functor, Show, Traversable) diff --git a/unison-merge/src/Unison/Merge/HumanDiffOp.hs b/unison-merge/src/Unison/Merge/HumanDiffOp.hs new file mode 100644 index 0000000000..1a4c5e4299 --- /dev/null +++ b/unison-merge/src/Unison/Merge/HumanDiffOp.hs @@ -0,0 +1,28 @@ +module Unison.Merge.HumanDiffOp + ( HumanDiffOp (..), + ) +where + +import Data.Set.NonEmpty (NESet) +import Unison.Merge.Updated (Updated) +import Unison.Name (Name) + +-- | A diff operation is one of: +-- +-- * An add (where nothing was) +-- * A delete (of the thing that was) +-- * An update (from old to new) +-- * A propagated update (from old to new) +-- * An alias of some definition(s) on the other side +-- * A rename from some definition(s) on the other side +data HumanDiffOp ref + = HumanDiffOp'Add !ref + | HumanDiffOp'Delete !ref + | HumanDiffOp'Update !(Updated ref) + | HumanDiffOp'PropagatedUpdate !(Updated ref) + | HumanDiffOp'AliasOf !ref !(NESet Name {- existing names -}) + | -- The definition at this location was renamed from the given set of names to the current place + HumanDiffOp'RenamedFrom !ref !(NESet Name) + | -- The definition at this location was renamed to the given set of names from the current place + HumanDiffOp'RenamedTo !ref !(NESet Name) + deriving stock (Show) diff --git a/unison-merge/src/Unison/Merge/Internal/Types.hs b/unison-merge/src/Unison/Merge/Internal/Types.hs new file mode 100644 index 0000000000..d1c29ebb01 --- /dev/null +++ b/unison-merge/src/Unison/Merge/Internal/Types.hs @@ -0,0 +1,60 @@ +-- | Internal types module to house types that would require mutual recursion at the module level if defined separately +module Unison.Merge.Internal.Types + ( ThreeWay (..), + TwoOrThreeWay (..), + ) +where + +import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith)) +import Data.These (These (..)) +import Unison.Prelude + +data ThreeWay a = ThreeWay + { lca :: !a, + alice :: !a, + bob :: !a + } + deriving stock (Foldable, Functor, Generic, Traversable) + +instance Applicative ThreeWay where + pure :: a -> ThreeWay a + pure x = + ThreeWay x x x + + (<*>) :: ThreeWay (a -> b) -> ThreeWay a -> ThreeWay b + ThreeWay f g h <*> ThreeWay x y z = + ThreeWay (f x) (g y) (h z) + +instance Semialign ThreeWay where + alignWith :: (These a b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c + alignWith f (ThreeWay a b c) (ThreeWay x y z) = + ThreeWay (f (These a x)) (f (These b y)) (f (These c z)) + +instance Unzip ThreeWay where + unzipWith :: (c -> (a, b)) -> ThreeWay c -> (ThreeWay a, ThreeWay b) + unzipWith f (ThreeWay a b c) = + let (i, x) = f a + (j, y) = f b + (k, z) = f c + in (ThreeWay i j k, ThreeWay x y z) + +instance Zip ThreeWay where + zipWith :: (a -> b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c + zipWith f (ThreeWay a b c) (ThreeWay x y z) = + ThreeWay (f a x) (f b y) (f c z) + +data TwoOrThreeWay a = TwoOrThreeWay + { lca :: Maybe a, + alice :: a, + bob :: a + } + deriving stock (Foldable, Functor, Generic, Traversable) + +instance Applicative TwoOrThreeWay where + pure :: a -> TwoOrThreeWay a + pure x = + TwoOrThreeWay (Just x) x x + + (<*>) :: TwoOrThreeWay (a -> b) -> TwoOrThreeWay a -> TwoOrThreeWay b + TwoOrThreeWay f g h <*> TwoOrThreeWay x y z = + TwoOrThreeWay (f <*> x) (g y) (h z) diff --git a/unison-merge/src/Unison/Merge/Libdeps.hs b/unison-merge/src/Unison/Merge/Libdeps.hs index ec0b9899d4..ff580a7b9b 100644 --- a/unison-merge/src/Unison/Merge/Libdeps.hs +++ b/unison-merge/src/Unison/Merge/Libdeps.hs @@ -2,6 +2,7 @@ module Unison.Merge.Libdeps ( LibdepDiffOp (..), diffLibdeps, + mergeLibdepsDiffs, applyLibdepsDiff, getTwoFreshLibdepNames, ) @@ -15,6 +16,7 @@ import Data.These (These (..)) import Unison.Merge.DiffOp (DiffOp (..)) import Unison.Merge.EitherWay qualified as EitherWay import Unison.Merge.ThreeWay (ThreeWay (..)) +import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Merge.TwoDiffOps (TwoDiffOps (..)) import Unison.Merge.TwoDiffOps qualified as TwoDiffOps import Unison.Merge.TwoWay (TwoWay (..)) @@ -33,45 +35,43 @@ data LibdepDiffOp a | AddBothLibdeps !a !a | DeleteLibdep --- | Perform a three-way diff on two collections of library dependencies. +-- | Perform two two-way diffs on two collections of library dependencies. This is only half of a three-way diff: use +-- 'mergeLibdepsDiffs' to complete it. diffLibdeps :: + forall k v. (Ord k, Eq v) => -- | Library dependencies. ThreeWay (Map k v) -> - -- | Library dependencies diff. - Map k (LibdepDiffOp v) + -- | Library dependencies diffs. + TwoWay (Map k (DiffOp v)) diffLibdeps libdeps = - mergeDiffs (twoWayDiff libdeps.lca libdeps.alice) (twoWayDiff libdeps.lca libdeps.bob) - --- `twoWayDiff old new` computes a diff between old thing `old` and new thing `new`. --- --- Values present in `old` but not `new` are tagged as "deleted"; similar for "added" and "updated". -twoWayDiff :: (Ord k, Eq v) => Map k v -> Map k v -> Map k (DiffOp v) -twoWayDiff = - Map.merge - (Map.mapMissing \_ -> DiffOp'Delete) - (Map.mapMissing \_ -> DiffOp'Add) - ( Map.zipWithMaybeMatched \_ old new -> - if old == new - then Nothing - else Just (DiffOp'Update Updated {old, new}) - ) + f <$> ThreeWay.forgetLca libdeps + where + f :: Map k v -> Map k (DiffOp v) + f = + Map.merge + (Map.mapMissing \_ -> DiffOp'Delete) + (Map.mapMissing \_ -> DiffOp'Add) + ( Map.zipWithMaybeMatched \_ old new -> + if old == new + then Nothing + else Just (DiffOp'Update Updated {old, new}) + ) + libdeps.lca -- Merge two library dependency diffs together: -- -- * Keep all adds/updates (allowing conflicts as necessary, which will be resolved later) -- * Ignore deletes that only one party makes (because the other party may expect the dep to still be there) -mergeDiffs :: +mergeLibdepsDiffs :: forall k v. (Ord k, Eq v) => - -- The LCA->Alice library dependencies diff. - Map k (DiffOp v) -> - -- The LCA->Bob library dependencies diff. - Map k (DiffOp v) -> + -- The LCA->Alice and LCA->Bob library dependencies diffs. + TwoWay (Map k (DiffOp v)) -> -- The merged library dependencies diff. Map k (LibdepDiffOp v) -mergeDiffs alice bob = - catMaybes (alignWith combineDiffOps alice bob) +mergeLibdepsDiffs diffs = + catMaybes (alignWith combineDiffOps diffs.alice diffs.bob) combineDiffOps :: (Eq a) => These (DiffOp a) (DiffOp a) -> Maybe (LibdepDiffOp a) combineDiffOps = diff --git a/unison-merge/src/Unison/Merge/Mergeblob1.hs b/unison-merge/src/Unison/Merge/Mergeblob1.hs index aef0ec7973..df86ed9f7d 100644 --- a/unison-merge/src/Unison/Merge/Mergeblob1.hs +++ b/unison-merge/src/Unison/Merge/Mergeblob1.hs @@ -1,20 +1,26 @@ module Unison.Merge.Mergeblob1 ( Mergeblob1 (..), + hydratedDefnsLabeledDependencies, makeMergeblob1, ) where +import Control.Lens import Data.List qualified as List import Data.Map.Strict qualified as Map +import Data.Set qualified as Set import Unison.DataDeclaration (Decl) import Unison.DataDeclaration qualified as DataDeclaration +import Unison.DataDeclaration.Dependencies qualified as Decl import Unison.DeclNameLookup (DeclNameLookup) +import Unison.LabeledDependency qualified as LD import Unison.Merge.CombineDiffs (CombinedDiffOp, combineDiffs) import Unison.Merge.DeclCoherencyCheck (IncoherentDeclReason, checkDeclCoherency, lenientCheckDeclCoherency) -import Unison.Merge.Diff (nameBasedNamespaceDiff) +import Unison.Merge.Diff (humanizeDiffs, nameBasedNamespaceDiff) import Unison.Merge.DiffOp (DiffOp) import Unison.Merge.EitherWay (EitherWay (..)) -import Unison.Merge.Libdeps (LibdepDiffOp, applyLibdepsDiff, diffLibdeps, getTwoFreshLibdepNames) +import Unison.Merge.HumanDiffOp (HumanDiffOp) +import Unison.Merge.Libdeps (applyLibdepsDiff, diffLibdeps, getTwoFreshLibdepNames, mergeLibdepsDiffs) import Unison.Merge.Mergeblob0 (Mergeblob0 (..)) import Unison.Merge.PartialDeclNameLookup (PartialDeclNameLookup) import Unison.Merge.PartitionCombinedDiffs (partitionCombinedDiffs) @@ -25,13 +31,20 @@ import Unison.Merge.TwoWay (TwoWay (..)) import Unison.Merge.Unconflicts (Unconflicts) import Unison.Name (Name) import Unison.NameSegment (NameSegment) +import Unison.Names (Names) import Unison.Parser.Ann (Ann) import Unison.Prelude +import Unison.PrettyPrintEnv.Names qualified as PPE +import Unison.PrettyPrintEnvDecl qualified as PPED +import Unison.PrettyPrintEnvDecl.Names qualified as PPED import Unison.Reference (TermReference, TermReferenceId, TypeReference, TypeReferenceId) +import Unison.Reference qualified as Reference import Unison.Referent (Referent) import Unison.Symbol (Symbol) import Unison.Term (Term) +import Unison.Term qualified as Term import Unison.Type (Type) +import Unison.Type qualified as Type import Unison.Util.BiMultimap (BiMultimap) import Unison.Util.Defns (Defns (..), DefnsF, DefnsF2, DefnsF3) @@ -40,7 +53,8 @@ data Mergeblob1 libdep = Mergeblob1 declNameLookups :: TwoWay DeclNameLookup, defns :: ThreeWay (Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name)), diff :: DefnsF2 (Map Name) CombinedDiffOp Referent TypeReference, - diffs :: TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + diffsFromLCA :: TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + humanDiffsFromLCA :: TwoWay (DefnsF2 (Map Name) HumanDiffOp Referent TypeReference), hydratedDefns :: ThreeWay ( DefnsF @@ -49,16 +63,42 @@ data Mergeblob1 libdep = Mergeblob1 (TypeReferenceId, Decl Symbol Ann) ), lcaDeclNameLookup :: PartialDeclNameLookup, - libdeps :: Map NameSegment libdep, - libdepsDiff :: Map NameSegment (LibdepDiffOp libdep), lcaLibdeps :: Map NameSegment libdep, + libdeps :: Map NameSegment libdep, + libdepsDiffs :: TwoWay (Map NameSegment (DiffOp libdep)), unconflicts :: DefnsF Unconflicts Referent TypeReference } +-- | Get a names object for all the hydrated definitions AND their direct dependencies +hydratedDefnsLabeledDependencies :: + DefnsF + (Map Name) + (TermReferenceId, (Term Symbol Ann, Type Symbol Ann)) + (TypeReferenceId, Decl Symbol Ann) -> + Set LD.LabeledDependency +hydratedDefnsLabeledDependencies defns = + let termDeps :: Set LD.LabeledDependency + termDeps = + foldOf + ( folded + . beside + (to Reference.DerivedId . to LD.TermReference . to Set.singleton) + (beside (to Term.labeledDependencies) (to Type.labeledDependencies)) + ) + defns.terms + + typeDeps :: Set LD.LabeledDependency + typeDeps = + defns.types + & foldMap \(typeRefId, typeDecl) -> + Decl.labeledDeclDependenciesIncludingSelfAndFieldAccessors (Reference.DerivedId typeRefId) typeDecl + in Set.union termDeps typeDeps + makeMergeblob1 :: forall libdep. (Eq libdep) => Mergeblob0 libdep -> + ThreeWay Names {- Names for _at least_ every reference in 'hydratedDefnDependencies' -} -> ThreeWay ( DefnsF (Map Name) @@ -66,7 +106,9 @@ makeMergeblob1 :: (TypeReferenceId, Decl Symbol Ann) ) -> Either (EitherWay IncoherentDeclReason) (Mergeblob1 libdep) -makeMergeblob1 blob hydratedDefns = do +makeMergeblob1 blob names3 hydratedDefns = do + let ppeds3 :: ThreeWay PPED.PrettyPrintEnvDecl + ppeds3 = names3 <&> \names -> PPED.makePPED (PPE.namer names) (PPE.suffixifyByHash names) -- Make one big constructor count lookup for all type decls let numConstructors = Map.empty @@ -94,10 +136,11 @@ makeMergeblob1 blob hydratedDefns = do lenientCheckDeclCoherency blob.nametrees.lca numConstructors -- Diff LCA->Alice and LCA->Bob - let diffs = + let (diffsFromLCA, propagatedUpdates) = nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup + ppeds3 blob.defns Defns { terms = @@ -111,21 +154,22 @@ makeMergeblob1 blob hydratedDefns = do } -- Combine the LCA->Alice and LCA->Bob diffs together - let diff = - combineDiffs diffs + let diff = combineDiffs diffsFromLCA + + let humanDiffsFromLCA = humanizeDiffs names3 diffsFromLCA propagatedUpdates -- Partition the combined diff into the conflicted things and the unconflicted things let (conflicts, unconflicts) = partitionCombinedDiffs (ThreeWay.forgetLca blob.defns) declNameLookups diff -- Diff and merge libdeps - let libdepsDiff :: Map NameSegment (LibdepDiffOp libdep) - libdepsDiff = + let libdepsDiffs :: TwoWay (Map NameSegment (DiffOp libdep)) + libdepsDiffs = diffLibdeps blob.libdeps let libdeps :: Map NameSegment libdep libdeps = - applyLibdepsDiff getTwoFreshLibdepNames blob.libdeps libdepsDiff + applyLibdepsDiff getTwoFreshLibdepNames blob.libdeps (mergeLibdepsDiffs libdepsDiffs) pure Mergeblob1 @@ -133,11 +177,12 @@ makeMergeblob1 blob hydratedDefns = do declNameLookups, defns = blob.defns, diff, - diffs, + diffsFromLCA, + humanDiffsFromLCA, hydratedDefns, lcaDeclNameLookup, - libdeps, - libdepsDiff, lcaLibdeps = blob.libdeps.lca, + libdeps, + libdepsDiffs, unconflicts } diff --git a/unison-merge/src/Unison/Merge/Mergeblob2.hs b/unison-merge/src/Unison/Merge/Mergeblob2.hs index 629d8d2146..b42594e438 100644 --- a/unison-merge/src/Unison/Merge/Mergeblob2.hs +++ b/unison-merge/src/Unison/Merge/Mergeblob2.hs @@ -65,7 +65,7 @@ data Mergeblob2Error makeMergeblob2 :: Mergeblob1 libdep -> Either Mergeblob2Error (Mergeblob2 libdep) makeMergeblob2 blob = do -- Bail early if it looks like we can't proceed with the merge, because Alice or Bob has one or more conflicted alias - for_ ((,) <$> TwoWay Alice Bob <*> blob.diffs) \(who, diff) -> + for_ ((,) <$> TwoWay Alice Bob <*> blob.diffsFromLCA) \(who, diff) -> whenJust (findConflictedAlias blob.defns.lca diff) $ Left . Mergeblob2Error'ConflictedAlias . who diff --git a/unison-merge/src/Unison/Merge/ThreeWay.hs b/unison-merge/src/Unison/Merge/ThreeWay.hs index cc9d24c47d..aa49f7b9d3 100644 --- a/unison-merge/src/Unison/Merge/ThreeWay.hs +++ b/unison-merge/src/Unison/Merge/ThreeWay.hs @@ -1,48 +1,18 @@ module Unison.Merge.ThreeWay ( ThreeWay (..), forgetLca, + toTwoOrThreeWay, ) where -import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith)) -import Data.These (These (..)) +import Unison.Merge.Internal.Types (ThreeWay (..)) +import Unison.Merge.TwoOrThreeWay (TwoOrThreeWay (..)) import Unison.Merge.TwoWay (TwoWay (..)) -import Unison.Prelude - -data ThreeWay a = ThreeWay - { lca :: !a, - alice :: !a, - bob :: !a - } - deriving stock (Foldable, Functor, Generic, Traversable) - -instance Applicative ThreeWay where - pure :: a -> ThreeWay a - pure x = - ThreeWay x x x - - (<*>) :: ThreeWay (a -> b) -> ThreeWay a -> ThreeWay b - ThreeWay f g h <*> ThreeWay x y z = - ThreeWay (f x) (g y) (h z) - -instance Semialign ThreeWay where - alignWith :: (These a b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c - alignWith f (ThreeWay a b c) (ThreeWay x y z) = - ThreeWay (f (These a x)) (f (These b y)) (f (These c z)) - -instance Unzip ThreeWay where - unzipWith :: (c -> (a, b)) -> ThreeWay c -> (ThreeWay a, ThreeWay b) - unzipWith f (ThreeWay a b c) = - let (i, x) = f a - (j, y) = f b - (k, z) = f c - in (ThreeWay i j k, ThreeWay x y z) - -instance Zip ThreeWay where - zipWith :: (a -> b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c - zipWith f (ThreeWay a b c) (ThreeWay x y z) = - ThreeWay (f a x) (f b y) (f c z) forgetLca :: ThreeWay a -> TwoWay a forgetLca ThreeWay {alice, bob} = TwoWay {alice, bob} + +toTwoOrThreeWay :: ThreeWay a -> TwoOrThreeWay a +toTwoOrThreeWay ThreeWay {alice, bob, lca} = + TwoOrThreeWay {alice, bob, lca = Just lca} diff --git a/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs b/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs index 556ff0fd2d..cec3725c11 100644 --- a/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs +++ b/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs @@ -1,13 +1,12 @@ module Unison.Merge.TwoOrThreeWay ( TwoOrThreeWay (..), + toThreeWay, ) where +import Unison.Merge.Internal.Types (ThreeWay (..), TwoOrThreeWay (..)) import Unison.Prelude -data TwoOrThreeWay a = TwoOrThreeWay - { lca :: Maybe a, - alice :: a, - bob :: a - } - deriving stock (Foldable, Functor, Generic, Traversable) +toThreeWay :: a -> TwoOrThreeWay a -> ThreeWay a +toThreeWay x TwoOrThreeWay {alice, bob, lca} = + ThreeWay {alice, bob, lca = fromMaybe x lca} diff --git a/unison-merge/src/Unison/Merge/Updated.hs b/unison-merge/src/Unison/Merge/Updated.hs index 00b64ed98b..6dd5fc41b8 100644 --- a/unison-merge/src/Unison/Merge/Updated.hs +++ b/unison-merge/src/Unison/Merge/Updated.hs @@ -10,4 +10,4 @@ data Updated a = Updated { old :: a, new :: a } - deriving stock (Functor, Generic, Show) + deriving stock (Foldable, Functor, Generic, Show, Traversable) diff --git a/unison-merge/unison-merge.cabal b/unison-merge/unison-merge.cabal index e53e024a67..91d98ef5f7 100644 --- a/unison-merge/unison-merge.cabal +++ b/unison-merge/unison-merge.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.36.0. +-- This file has been generated from package.yaml by hpack version 0.37.0. -- -- see: https://github.com/sol/hpack @@ -25,6 +25,8 @@ library Unison.Merge.EitherWay Unison.Merge.EitherWayI Unison.Merge.FindConflictedAlias + Unison.Merge.HumanDiffOp + Unison.Merge.Internal.Types Unison.Merge.Libdeps Unison.Merge.Mergeblob0 Unison.Merge.Mergeblob1 @@ -83,6 +85,7 @@ library build-depends: base , containers + , either , lens , mtl , nonempty-containers diff --git a/unison-runtime/src/Unison/Runtime/Machine.hs b/unison-runtime/src/Unison/Runtime/Machine.hs index 75a13ad384..8976269e17 100644 --- a/unison-runtime/src/Unison/Runtime/Machine.hs +++ b/unison-runtime/src/Unison/Runtime/Machine.hs @@ -683,14 +683,13 @@ eval env !denv !activeThreads !stk !k r (Match i br) = do n <- peekOffN stk i eval env denv activeThreads stk k r $ selectBranch n br eval env !denv !activeThreads !stk !k r (DMatch mr i br) = do - (t, stk) <- dumpDataNoTag mr stk =<< peekOff stk i - eval env denv activeThreads stk k r $ - selectBranch (maskTags t) br + (nx, stk) <- dataBranch mr stk br =<< bpeekOff stk i + eval env denv activeThreads stk k r nx eval env !denv !activeThreads !stk !k r (NMatch _mr i br) = do n <- peekOffN stk i eval env denv activeThreads stk k r $ selectBranch n br eval env !denv !activeThreads !stk !k r (RMatch i pu br) = do - (t, stk) <- dumpDataNoTag Nothing stk =<< peekOff stk i + (t, stk) <- dumpDataValNoTag stk =<< peekOff stk i if t == TT.pureEffectTag then eval env denv activeThreads stk k r pu else case ANF.unpackTags t of @@ -1000,46 +999,41 @@ buildData !stk !r !t (VArgV i) = do l = fsize stk - i {-# INLINE buildData #-} +dumpDataValNoTag :: + Stack -> + Val -> + IO (PackedTag, Stack) +dumpDataValNoTag stk (BoxedVal c) = + (closureTag c,) <$> dumpDataNoTag Nothing stk c +dumpDataValNoTag _ v = + die $ "dumpDataValNoTag: unboxed val: " ++ show v +{-# inline dumpDataValNoTag #-} + -- Dumps a data type closure to the stack without writing its tag. -- Instead, the tag is returned for direct case analysis. dumpDataNoTag :: Maybe Reference -> Stack -> - Val -> - IO (PackedTag, Stack) + Closure -> + IO Stack dumpDataNoTag !mr !stk = \case -- Normally we want to avoid dumping unboxed values since it's unnecessary, but sometimes we don't know the type of -- the incoming value and end up dumping unboxed values, so we just push them back to the stack as-is. e.g. in type-casts/coercions - val@(UnboxedVal _ t) -> do + Enum _ _ -> pure stk + Data1 _ _ x -> do stk <- bump stk - poke stk val - pure (unboxedPackedTag t, stk) - BoxedVal clos -> case clos of - (Enum _ t) -> pure (t, stk) - (Data1 _ t x) -> do - stk <- bump stk - poke stk x - pure (t, stk) - (Data2 _ t x y) -> do - stk <- bumpn stk 2 - pokeOff stk 1 y - poke stk x - pure (t, stk) - (DataG _ t seg) -> do - stk <- dumpSeg stk seg S - pure (t, stk) - clo -> - die $ - "dumpDataNoTag: bad closure: " - ++ show clo - ++ maybe "" (\r -> "\nexpected type: " ++ show r) mr - where - unboxedPackedTag :: UnboxedTypeTag -> PackedTag - unboxedPackedTag = \case - CharTag -> TT.charTag - FloatTag -> TT.floatTag - IntTag -> TT.intTag - NatTag -> TT.natTag + poke stk x + pure stk + Data2 _ _ x y -> do + stk <- bumpn stk 2 + pokeOff stk 1 y + stk <$ poke stk x + DataG _ _ seg -> dumpSeg stk seg S + clo -> + die $ + "dumpDataNoTag: bad closure: " + ++ show clo + ++ maybe "" (\r -> "\nexpected type: " ++ show r) mr {-# INLINE dumpDataNoTag #-} -- Note: although the representation allows it, it is impossible @@ -1995,6 +1989,94 @@ selectBranch t (TestW df cs) = lookupWithDefault df t cs selectBranch _ (TestT {}) = error "impossible" {-# INLINE selectBranch #-} +-- Combined branch selection and field dumping function for data types. +-- Fields should only be dumped on _matches_, not default cases, because +-- default cases potentially cover many constructors which could result +-- in a variable number of values being put on the stack. Default cases +-- uniformly expect _no_ values to be added to the stack. +dataBranch + :: Maybe Reference -> Stack -> MBranch -> Closure -> IO (MSection, Stack) +dataBranch mrf stk (Test1 u cu df) = \case + Enum _ t + | maskTags t == u -> pure (cu, stk) + | otherwise -> pure (df, stk) + Data1 _ t x + | maskTags t == u -> do + stk <- bump stk + (cu, stk) <$ poke stk x + | otherwise -> pure (df, stk) + Data2 _ t x y + | maskTags t == u -> do + stk <- bumpn stk 2 + pokeOff stk 1 y + (cu, stk) <$ poke stk x + | otherwise -> pure (df, stk) + DataG _ t seg + | maskTags t == u -> (cu,) <$> dumpSeg stk seg S + | otherwise -> pure (df, stk) + clo -> dataBranchClosureError mrf clo +dataBranch mrf stk (Test2 u cu v cv df) = \case + Enum _ t + | maskTags t == u -> pure (cu, stk) + | maskTags t == v -> pure (cv, stk) + | otherwise -> pure (df, stk) + Data1 _ t x + | maskTags t == u -> do + stk <- bump stk + (cu, stk) <$ poke stk x + | maskTags t == v -> do + stk <- bump stk + (cv, stk) <$ poke stk x + | otherwise -> pure (df, stk) + Data2 _ t x y + | maskTags t == u -> do + stk <- bumpn stk 2 + pokeOff stk 1 y + (cu, stk) <$ poke stk x + | maskTags t == v -> do + stk <- bumpn stk 2 + pokeOff stk 1 y + (cv, stk) <$ poke stk x + | otherwise -> pure (df, stk) + DataG _ t seg + | maskTags t == u -> (cu,) <$> dumpSeg stk seg S + | maskTags t == v -> (cv,) <$> dumpSeg stk seg S + | otherwise -> pure (df, stk) + clo -> dataBranchClosureError mrf clo +dataBranch mrf stk (TestW df bs) = \case + Enum _ t + | Just ca <- EC.lookup (maskTags t) bs -> pure (ca, stk) + | otherwise -> pure (df, stk) + Data1 _ t x + | Just ca <- EC.lookup (maskTags t) bs -> do + stk <- bump stk + (ca, stk) <$ poke stk x + | otherwise -> pure (df, stk) + Data2 _ t x y + | Just ca <- EC.lookup (maskTags t) bs -> do + stk <- bumpn stk 2 + pokeOff stk 1 y + (ca, stk) <$ poke stk x + | otherwise -> pure (df, stk) + DataG _ t seg + | Just ca <- EC.lookup (maskTags t) bs -> + (ca,) <$> dumpSeg stk seg S + | otherwise -> pure (df, stk) + clo -> dataBranchClosureError mrf clo +dataBranch _ _ br = \_ -> + dataBranchBranchError br +{-# inline dataBranch #-} + +dataBranchClosureError :: Maybe Reference -> Closure -> IO a +dataBranchClosureError mrf clo = + die $ "dataBranch: bad closure: " + ++ show clo + ++ maybe "" (\ r -> "\nexpected type: " ++ show r) mrf + +dataBranchBranchError :: MBranch -> IO a +dataBranchBranchError br = + die $ "dataBranch: unexpected branch: " ++ show br + -- Splits off a portion of the continuation up to a given prompt. -- -- The main procedure walks along the 'code' stack `k`, keeping track of how diff --git a/unison-runtime/src/Unison/Runtime/Pattern.hs b/unison-runtime/src/Unison/Runtime/Pattern.hs index 610a456d3a..21999727a3 100644 --- a/unison-runtime/src/Unison/Runtime/Pattern.hs +++ b/unison-runtime/src/Unison/Runtime/Pattern.hs @@ -13,6 +13,7 @@ module Unison.Runtime.Pattern where import Control.Monad.State (State, evalState, modify, runState, state) +import Data.Containers.ListUtils (nubOrd) import Data.List (transpose) import Data.Map.Strict ( fromListWith, @@ -24,7 +25,7 @@ import Data.Set qualified as Set import Unison.ABT ( absChain', renames, - visitPure, + visit, pattern AbsN', ) import Unison.Builtin.Decls (builtinDataDecls, builtinEffectDecls) @@ -92,6 +93,11 @@ builtinDataSpec = Map.fromList decls | (_, x, y) <- builtinEffectDecls ] +findPattern :: Eq v => v -> PatternRow v -> Maybe (Pattern v) +findPattern v (PR ms _ _) + | (_, p : _) <- break ((== v) . loc) ms = Just p + | otherwise = Nothing + -- A pattern compilation matrix is just a list of rows. There is -- no need for the rows to have uniform length; the variable -- annotations on the patterns in the rows keep track of what @@ -125,8 +131,11 @@ refutable (P.Unbound _) = False refutable (P.Var _) = False refutable _ = True -rowIrrefutable :: PatternRow v -> Bool -rowIrrefutable (PR ps _ _) = null ps +noMatches :: PatternRow v -> Bool +noMatches (PR ps _ _) = null ps + +rowRefutable :: PatternRow v -> Bool +rowRefutable (PR ps g _) = isJust g || not (null ps) firstRow :: ([P.Pattern v] -> Maybe v) -> Heuristic v firstRow f (PM (r : _)) = f $ matches r @@ -481,6 +490,19 @@ splitMatrix v rf cons (PM rs) = where mmap = fmap (\(t, fs) -> (t, splitRow v rf t fs =<< rs)) cons +-- Eliminates a variable from a matrix, keeping the rows that are +-- _not_ specific matches on that variable (so, would potentially +-- occur in a default case). +antiSplitMatrix :: + (Var v) => + v -> + PatternMatrix v -> + PatternMatrix v +antiSplitMatrix v (PM rs) = PM (f =<< rs) + where + -- keep rows that do not have a refutable pattern for v + f r = [ r | isNothing $ findPattern v r ] + -- Monad for pattern preparation. It is a state monad carrying a fresh -- variable source, the list of variables bound the pattern being -- prepared, and a variable renaming mapping. @@ -596,7 +618,7 @@ compile _ _ (PM []) = apps' bu [text () "pattern match failure"] where bu = ref () (Builtin "bug") compile spec ctx m@(PM (r : rs)) - | rowIrrefutable r = + | noMatches r = case guard r of Nothing -> body r Just g -> iff mempty g (body r) $ compile spec ctx (PM rs) @@ -614,8 +636,11 @@ compile spec ctx m@(PM (r : rs)) case lookupData rf spec of Right cons -> match () (var () v) $ - buildCase spec rf False cons ctx - <$> splitMatrix v (Just rf) (numberCons cons) m + (buildCase spec rf False cons ctx + <$> splitMatrix v (Just rf) ncons m) + ++ buildDefaultCase spec False needDefault ctx dm + where + needDefault = length ncons < length cons Left err -> internalBug err | PReq rfs <- ty = match () (var () v) $ @@ -631,7 +656,29 @@ compile spec ctx m@(PM (r : rs)) internalBug "unknown pattern compilation type" where v = choose heuristics m + ncons = relevantConstructors m v ty = Map.findWithDefault Unknown v ctx + dm = antiSplitMatrix v m + +-- Calculates the data constructors—with their arities—that should be +-- matched on when splitting a matrix on a given variable. This +-- includes +relevantConstructors :: Ord v => PatternMatrix v -> v -> [(Int, Int)] +relevantConstructors (PM rows) v = search [] rows + where + search acc (row : rows) + | rowRefutable row = case findPattern v row of + Just (P.Constructor _ (ConstructorReference _ t) sps) -> + search ((fromIntegral t, length sps) : acc) rows + Just (P.Boolean _ b) -> + search ((if b then 1 else 0, 0) : acc) rows + Just p -> + internalBug $ "unexpected data pattern: " ++ show p + -- if the pattern is not found, it must have been irrefutable, + -- so contributes no relevant constructor. + _ -> search acc rows + -- irrefutable row, or no rows left + search acc _ = nubOrd $ reverse acc buildCaseBuiltin :: (Var v) => @@ -677,6 +724,18 @@ buildCase spec r eff cons ctx0 (t, vts, m) = vs = ((),) . fst <$> vts ctx = Map.fromList vts <> ctx0 +buildDefaultCase :: + (Var v) => + DataSpec -> + Bool -> + Bool -> + Ctx v -> + PatternMatrix v -> + [MatchCase () (Term v)] +buildDefaultCase spec _eff needed ctx pm + | needed = [MatchCase (Unbound ()) Nothing $ compile spec ctx pm] + | otherwise = [] + mkRow :: (Var v) => v -> @@ -706,34 +765,41 @@ initialize :: PType -> Term v -> [MatchCase () (Term v)] -> - (Maybe v, (v, PType), PatternMatrix v) -initialize r sc cs = - ( lv, - (sv, r), - PM $ evalState (traverse (mkRow sv) cs) 1 - ) + State Word64 (Maybe v, (v, PType), PatternMatrix v) +initialize r sc cs = do + (lv, sv) <- vars + rs <- traverse (mkRow sv) cs + pure (lv, (sv, r), PM rs) where - (lv, sv) - | Var' v <- sc = (Nothing, v) - | pv <- freshenId 0 $ typed Pattern = - (Just pv, pv) + vars + | Var' v <- sc = pure (Nothing, v) + | otherwise = mkVars <$> grabId + mkVars n = (Just pv, pv) + where + pv = freshenId n $ typed Pattern + +grabId :: State Word64 Word64 +grabId = state $ \n -> (n, n+1) splitPatterns :: (Var v) => DataSpec -> Term v -> Term v -splitPatterns spec0 = visitPure $ \case +splitPatterns spec0 tm = evalState (splitPatterns0 spec tm) 0 + where + spec = Map.insert Rf.booleanRef (Right [0, 0]) spec0 + +splitPatterns0 :: (Var v) => DataSpec -> Term v -> State Word64 (Term v) +splitPatterns0 spec = visit $ \case Match' sc0 cs0 - | ty <- determineType $ p <$> cs0, - (lv, scrut, pm) <- initialize ty sc cs, - body <- compile spec (uncurry Map.singleton scrut) pm -> - Just $ case lv of + | ty <- determineType $ p <$> cs0 -> Just $ do + sc <- splitPatterns0 spec sc0 + cs <- (traverse . traverse) (splitPatterns0 spec) cs0 + (lv, scrut, pm) <- initialize ty sc cs + let body = compile spec (uncurry Map.singleton scrut) pm + pure $ case lv of Just v -> let1 False [(((), v), sc)] body _ -> body - where - sc = splitPatterns spec sc0 - cs = fmap (splitPatterns spec) <$> cs0 _ -> Nothing where p (MatchCase pp _ _) = pp - spec = Map.insert Rf.booleanRef (Right [0, 0]) spec0 builtinCase :: Set Reference builtinCase = diff --git a/unison-runtime/src/Unison/Runtime/Stack.hs b/unison-runtime/src/Unison/Runtime/Stack.hs index 164a4591f3..730f5351f0 100644 --- a/unison-runtime/src/Unison/Runtime/Stack.hs +++ b/unison-runtime/src/Unison/Runtime/Stack.hs @@ -22,6 +22,7 @@ module Unison.Runtime.Stack BlackHole, UnboxedTypeTag ), + closureTag, UnboxedTypeTag (..), unboxedTypeTagToInt, unboxedTypeTagFromInt, @@ -153,7 +154,7 @@ module Unison.Runtime.Stack ) where -import Control.Exception (throwIO) +import Control.Exception (throw, throwIO) import Control.Monad.Primitive import Data.Char qualified as Char import Data.IORef (IORef) @@ -371,6 +372,15 @@ splitData = \case (DataG r t seg) -> Just (r, t, segToList seg) _ -> Nothing +closureTag :: Closure -> PackedTag +closureTag (Enum _ t) = t +closureTag (Data1 _ t _) = t +closureTag (Data2 _ t _ _) = t +closureTag (DataG _ t _) = t +closureTag c = + throw $ Panic "closureTag: unexpected closure" (Just $ BoxedVal c) +{-# inline closureTag #-} + -- | Converts a list of integers representing an unboxed segment back into the -- appropriate segment. Segments are stored backwards in the runtime, so this -- reverses the list. diff --git a/unison-share-api/package.yaml b/unison-share-api/package.yaml index 8ed217cf4d..65cf9f8dfd 100644 --- a/unison-share-api/package.yaml +++ b/unison-share-api/package.yaml @@ -2,65 +2,91 @@ name: unison-share-api github: unisonweb/unison copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors + library: source-dirs: src when: - condition: false other-modules: Paths_unison_share_api -dependencies: - - aeson >= 2.0.0.0 - - async - - base - - binary - - bytes - - bytestring - - containers - - Diff - - directory - - errors - - extra - - filepath - - fuzzyfind - - http-media - - http-types - - lens - - lucid - - memory - - mtl - - nonempty-containers - - openapi3 - - regex-tdfa - - servant - - servant-docs - - servant-openapi3 - - servant-server - - text - - transformers - - unison-codebase - - unison-codebase-sqlite - - unison-codebase-sqlite-hashing-v2 - - unison-core - - unison-core1 - - unison-hash - - unison-hash-orphans-aeson - - unison-hashing-v2 - - unison-parser-typechecker - - unison-prelude - - unison-pretty-printer - - unison-runtime - - unison-util-relation - - unison-share-projects-api - - unison-sqlite - - unison-syntax - - unliftio - - uri-encode - - utf8-string - - vector - - wai - - wai-cors - - warp - - yaml + dependencies: + - aeson >= 2.0.0.0 + - async + - base + - binary + - bytes + - bytestring + - cborg + - containers + - Diff + - directory + - errors + - extra + - filepath + - fuzzyfind + - http-media + - http-types + - lens + - lucid + - memory + - mtl + - nonempty-containers + - openapi3 + - regex-tdfa + - serialise + - servant + - servant-docs + - servant-openapi3 + - servant-server + - text + - transformers + - unison-codebase + - unison-codebase-sqlite + - unison-codebase-sqlite-hashing-v2 + - unison-core + - unison-core1 + - unison-hash + - unison-hash-orphans-aeson + - unison-hashing-v2 + - unison-parser-typechecker + - unison-prelude + - unison-pretty-printer + - unison-runtime + - unison-util-relation + - unison-util-base32hex + - unison-share-projects-api + - unison-sqlite + - unison-syntax + - unliftio + - uri-encode + - utf8-string + - vector + - wai + - wai-cors + - warp + - yaml + +tests: + unison-share-api-tests: + when: + - condition: false + other-modules: Paths_unison_share_api + dependencies: + - code-page + - easytest + - hedgehog + - unison-share-api + - base + - bytestring + - serialise + - text + - unison-hash + - unison-prelude + - unison-codebase-sqlite + - vector + + main: Main.hs + source-dirs: tests ghc-options: -Wall @@ -95,3 +121,4 @@ default-extensions: - TypeOperators - ViewPatterns - ImportQualifiedPost + - QuasiQuotes diff --git a/unison-share-api/src/Unison/Server/Local/Endpoints/Projects.hs b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects.hs index b908b7499b..b48940dde3 100644 --- a/unison-share-api/src/Unison/Server/Local/Endpoints/Projects.hs +++ b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects.hs @@ -11,9 +11,7 @@ module Unison.Server.Local.Endpoints.Projects ) where -import Data.Aeson (ToJSON (..)) -import Data.Aeson qualified as Aeson -import Data.OpenApi (ToParamSchema, ToSchema) +import Data.OpenApi (ToParamSchema) import GHC.Generics () import Servant import Servant.Docs @@ -22,48 +20,21 @@ import U.Codebase.Sqlite.Project qualified as SqliteProject import U.Codebase.Sqlite.Queries qualified as Q import Unison.Codebase (Codebase) import Unison.Codebase qualified as Codebase -import Unison.Core.Project (ProjectBranchName (UnsafeProjectBranchName), ProjectName (UnsafeProjectName)) import Unison.Parser.Ann (Ann) import Unison.Prelude +import Unison.Project (ProjectName) import Unison.Server.Backend (Backend) +import Unison.Server.Local.Endpoints.Projects.Queries qualified as PG +import Unison.Server.Local.Endpoints.Projects.Queries qualified as PQ +import Unison.Server.Local.Endpoints.Projects.Types import Unison.Symbol (Symbol) -data ProjectListing = ProjectListing - { projectName :: ProjectName - } - deriving stock (Show, Generic) - -instance ToSchema ProjectListing - -instance ToJSON ProjectListing where - toJSON ProjectListing {projectName} = - Aeson.object ["projectName" Aeson..= projectName] - -instance ToSample ProjectListing where - toSamples _ = - singleSample $ ProjectListing (UnsafeProjectName "my-project") - -data ProjectBranchListing = ProjectBranchListing - { branchName :: ProjectBranchName - } - deriving stock (Show, Generic) - -instance ToSchema ProjectBranchListing - -instance ToJSON ProjectBranchListing where - toJSON ProjectBranchListing {branchName} = - Aeson.object ["branchName" Aeson..= branchName] - -instance ToSample ProjectBranchListing where - toSamples _ = - singleSample $ ProjectBranchListing (UnsafeProjectBranchName "my-branch") - type ListProjectsEndpoint = - QueryParam "prefix" PrefixFilter + QueryParam "query" Query :> Get '[JSON] [ProjectListing] type ListProjectBranchesEndpoint = - QueryParam "prefix" PrefixFilter + QueryParam "query" Query :> Get '[JSON] [ProjectBranchListing] newtype PrefixFilter = PrefixFilter @@ -86,20 +57,39 @@ instance Docs.ToSample PrefixFilter where toSamples _ = singleSample $ PrefixFilter "my-proj" +newtype Query = Query + { getQuery :: Text + } + deriving stock (Show, Generic) + deriving newtype (FromHttpApiData) + +instance ToParamSchema Query + +instance ToParam (QueryParam "query" Query) where + toParam _ = + DocQueryParam + "query" + ["my-proj"] + "Filter for results containing the given text." + Normal + +instance Docs.ToSample Query where + toSamples _ = + singleSample $ Query "my-proj" + projectListingEndpoint :: Codebase IO Symbol Ann -> - Maybe PrefixFilter -> + -- Infix Query + Maybe Query -> Backend IO [ProjectListing] -projectListingEndpoint codebase mayPrefix = liftIO . Codebase.runTransaction codebase $ do - projects <- Q.loadAllProjectsBeginningWith (prefix <$> mayPrefix) - pure $ ProjectListing . SqliteProject.name <$> projects +projectListingEndpoint codebase mayQuery = liftIO . Codebase.runTransaction codebase $ do + PQ.listProjects (getQuery <$> mayQuery) projectBranchListingEndpoint :: Codebase IO Symbol Ann -> ProjectName -> - Maybe PrefixFilter -> + Maybe Query -> Backend IO [ProjectBranchListing] -projectBranchListingEndpoint codebase projectName mayPrefix = liftIO . Codebase.runTransaction codebase . fmap fold . runMaybeT $ do +projectBranchListingEndpoint codebase projectName mayQuery = liftIO . Codebase.runTransaction codebase . fmap fold . runMaybeT $ do SqliteProject.Project {projectId} <- MaybeT $ Q.loadProjectByName projectName - lift (Q.loadAllProjectBranchesBeginningWith projectId (prefix <$> mayPrefix)) - <&> fmap (ProjectBranchListing . snd) + lift (PG.listProjectBranches projectId (getQuery <$> mayQuery)) diff --git a/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Queries.hs b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Queries.hs new file mode 100644 index 0000000000..24360e4d0c --- /dev/null +++ b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Queries.hs @@ -0,0 +1,39 @@ +module Unison.Server.Local.Endpoints.Projects.Queries + ( listProjects, + listProjectBranches, + ) +where + +import Data.Text (Text) +import U.Codebase.Sqlite.DbId (ProjectId) +import Unison.Server.Local.Endpoints.Projects.Types +import Unison.Sqlite + +-- | Load all project listings, optionally requiring an infix match with a query. +listProjects :: Maybe Text -> Transaction [ProjectListing] +listProjects mayUnsafeQuery = do + let mayQuery = fmap (likeEscape '\\') mayUnsafeQuery + queryListRow + [sql| + SELECT project.name, branch.name + FROM project + LEFT JOIN most_recent_branch mrb + ON project.id = mrb.project_id + LEFT JOIN project_branch branch + ON mrb.branch_id = branch.branch_id + WHERE (:mayQuery IS NULL OR project.name LIKE '%' || :mayQuery || '%' ESCAPE '\') + ORDER BY branch.last_accessed DESC NULLS LAST, project.name ASC + |] + +-- | Load all project listings, optionally requiring an infix match with a query. +listProjectBranches :: ProjectId -> Maybe Text -> Transaction [ProjectBranchListing] +listProjectBranches projectId mayUnsafeQuery = do + let mayQuery = fmap (likeEscape '\\') mayUnsafeQuery + queryListRow + [sql| + SELECT project_branch.name + FROM project_branch + WHERE project_branch.project_id = :projectId + AND (:mayQuery IS NULL OR project_branch.name LIKE '%' || :mayQuery || '%' ESCAPE '\') + ORDER BY project_branch.last_accessed DESC NULLS LAST, project_branch.name ASC + |] diff --git a/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Types.hs b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Types.hs new file mode 100644 index 0000000000..25eb921941 --- /dev/null +++ b/unison-share-api/src/Unison/Server/Local/Endpoints/Projects/Types.hs @@ -0,0 +1,55 @@ +module Unison.Server.Local.Endpoints.Projects.Types + ( ProjectListing (..), + ProjectBranchListing (..), + ) +where + +import Data.Aeson (ToJSON (..)) +import Data.Aeson qualified as Aeson +import Data.OpenApi +import GHC.Generics () +import Servant.Docs +import Unison.Core.Project (ProjectBranchName (UnsafeProjectBranchName), ProjectName (UnsafeProjectName)) +import Unison.Prelude +import Unison.Server.Orphans () +import Unison.Sqlite (FromRow (..), field) + +data ProjectListing = ProjectListing + { projectName :: ProjectName, + mostRecentActiveBranch :: Maybe ProjectBranchName + } + deriving stock (Show, Generic) + +instance FromRow ProjectListing where + fromRow = ProjectListing <$> field <*> field + +instance ToSchema ProjectListing + +instance ToJSON ProjectListing where + toJSON (ProjectListing projectName mostRecentActiveBranch) = + Aeson.object + [ "projectName" Aeson..= projectName, + "activeBranchRef" Aeson..= mostRecentActiveBranch + ] + +instance ToSample ProjectListing where + toSamples _ = + singleSample $ ProjectListing (UnsafeProjectName "my-project") Nothing + +data ProjectBranchListing = ProjectBranchListing + { branchName :: ProjectBranchName + } + deriving stock (Show, Generic) + +instance FromRow ProjectBranchListing where + fromRow = ProjectBranchListing <$> field + +instance ToSchema ProjectBranchListing + +instance ToJSON ProjectBranchListing where + toJSON ProjectBranchListing {branchName} = + Aeson.object ["branchName" Aeson..= branchName] + +instance ToSample ProjectBranchListing where + toSamples _ = + singleSample $ ProjectBranchListing (UnsafeProjectBranchName "my-branch") diff --git a/unison-share-api/src/Unison/Server/Orphans.hs b/unison-share-api/src/Unison/Server/Orphans.hs index bab2d26fef..4b76c3e450 100644 --- a/unison-share-api/src/Unison/Server/Orphans.hs +++ b/unison-share-api/src/Unison/Server/Orphans.hs @@ -1,8 +1,14 @@ {-# LANGUAGE DataKinds #-} +{-# LANGUAGE MultiWayIf #-} {-# OPTIONS_GHC -Wno-orphans #-} module Unison.Server.Orphans where +import Codec.CBOR.Decoding qualified as CBOR +import Codec.CBOR.Encoding qualified as CBOR +import Codec.Serialise (Serialise (..)) +import Codec.Serialise qualified as CBOR +import Codec.Serialise.Class qualified as CBOR import Control.Lens import Data.Aeson import Data.Aeson qualified as Aeson @@ -12,9 +18,20 @@ import Data.List.NonEmpty (NonEmpty (..)) import Data.OpenApi import Data.Proxy import Data.Text qualified as Text +import Data.Vector (Vector) +import Data.Vector qualified as Vector import Servant import Servant.Docs (DocCapture (DocCapture), DocQueryParam (..), ParamKind (..), ToCapture (..), ToParam (..)) import U.Codebase.HashTags +import U.Codebase.Sqlite.Branch.Format qualified as BranchFormat +import U.Codebase.Sqlite.Causal qualified as SqliteCausal +import U.Codebase.Sqlite.Decl.Format qualified as DeclFormat +import U.Codebase.Sqlite.Entity qualified as Entity +import U.Codebase.Sqlite.LocalIds qualified as LocalIds +import U.Codebase.Sqlite.Patch.Format qualified as PatchFormat +import U.Codebase.Sqlite.TempEntity (TempEntity) +import U.Codebase.Sqlite.Term.Format qualified as TermFormat +import U.Util.Base32Hex (Base32Hex (..)) import Unison.Codebase.Editor.DisplayObject import Unison.Codebase.Path qualified as Path import Unison.Codebase.Path.Parse qualified as Path @@ -25,6 +42,7 @@ import Unison.ConstructorType qualified as CT import Unison.Core.Project (ProjectBranchName (..), ProjectName (..)) import Unison.Hash (Hash (..)) import Unison.Hash qualified as Hash +import Unison.Hash32 (Hash32 (..)) import Unison.HashQualified qualified as HQ import Unison.HashQualifiedPrime qualified as HQ' import Unison.Name (Name) @@ -34,8 +52,10 @@ import Unison.Prelude import Unison.Project import Unison.Reference qualified as Reference import Unison.Referent qualified as Referent +import Unison.Share.API.Hash (HashJWT (..)) import Unison.ShortHash (ShortHash) import Unison.ShortHash qualified as SH +import Unison.Sqlite qualified as Sqlite import Unison.Syntax.HashQualified qualified as HQ (parseText) import Unison.Syntax.HashQualifiedPrime qualified as HQ' (parseText) import Unison.Syntax.Name qualified as Name (parseTextEither, toText) @@ -387,6 +407,8 @@ deriving anyclass instance (ToSchema n) => ToSchema (HQ.HashQualified n) deriving anyclass instance (ToSchema n) => ToSchema (HQ'.HashQualified n) +deriving via Text instance Sqlite.FromField ProjectName + instance FromHttpApiData ProjectName where parseQueryParam = mapLeft tShow . tryInto @ProjectName @@ -406,6 +428,8 @@ instance ToSchema ProjectName deriving via Text instance ToJSON ProjectName +deriving via Text instance Sqlite.FromField ProjectBranchName + instance FromHttpApiData ProjectBranchName where parseQueryParam = mapLeft tShow . tryInto @ProjectBranchName @@ -424,3 +448,111 @@ instance ToCapture (Capture "branch-name" ProjectBranchName) where "The name of a branch in a project. E.g. @handle/name" deriving via Text instance ToJSON ProjectBranchName + +-- CBOR encodings + +deriving via Text instance Serialise Hash32 + +deriving via Text instance Serialise HashJWT + +data SyncTag + = TermComponentTag + | DeclComponentTag + | PatchTag + | NamespaceTag + | CausalTag + deriving (Eq, Show) + +instance Serialise SyncTag where + encode = \case + TermComponentTag -> CBOR.encodeWord 0 + DeclComponentTag -> CBOR.encodeWord 1 + PatchTag -> CBOR.encodeWord 2 + NamespaceTag -> CBOR.encodeWord 3 + CausalTag -> CBOR.encodeWord 4 + + decode = do + tag <- CBOR.decodeWord + case tag of + 0 -> pure TermComponentTag + 1 -> pure DeclComponentTag + 2 -> pure PatchTag + 3 -> pure NamespaceTag + 4 -> pure CausalTag + _ -> fail $ "Unknown tag: " <> show tag + +newtype ComponentBody t d = ComponentBody {unComponentBody :: (LocalIds.LocalIds' t d, ByteString)} + +instance (Serialise t, Serialise d) => Serialise (ComponentBody t d) where + encode (ComponentBody (LocalIds.LocalIds {textLookup, defnLookup}, bytes)) = + CBOR.encodeVector textLookup + <> CBOR.encodeVector defnLookup + <> CBOR.encodeBytes bytes + + decode = do + textLookup <- CBOR.decodeVector + defnLookup <- CBOR.decodeVector + bytes <- CBOR.decodeBytes + pure $ ComponentBody (LocalIds.LocalIds {textLookup, defnLookup}, bytes) + +instance Serialise TempEntity where + encode = \case + Entity.TC (TermFormat.SyncTerm (TermFormat.SyncLocallyIndexedComponent elements)) -> + CBOR.encode TermComponentTag + <> CBOR.encodeVector (coerce @(Vector (LocalIds.LocalIds' Text Hash32, ByteString)) @(Vector (ComponentBody Text Hash32)) elements) + Entity.DC (DeclFormat.SyncDecl (DeclFormat.SyncLocallyIndexedComponent elements)) -> + CBOR.encode DeclComponentTag + <> CBOR.encodeVector (coerce @(Vector (LocalIds.LocalIds' Text Hash32, ByteString)) @(Vector (ComponentBody Text Hash32)) elements) + Entity.P (PatchFormat.SyncDiff {}) -> error "Serializing Diffs are not supported" + Entity.P (PatchFormat.SyncFull (PatchFormat.LocalIds {patchTextLookup, patchHashLookup, patchDefnLookup}) bytes) -> + CBOR.encode PatchTag + <> CBOR.encodeVector patchTextLookup + <> CBOR.encodeVector patchHashLookup + <> CBOR.encodeVector patchDefnLookup + <> CBOR.encodeBytes bytes + Entity.N (BranchFormat.SyncDiff {}) -> error "Serializing Diffs are not supported" + Entity.N (BranchFormat.SyncFull (BranchFormat.LocalIds {branchTextLookup, branchDefnLookup, branchPatchLookup, branchChildLookup}) (BranchFormat.LocalBranchBytes bytes)) -> + CBOR.encode NamespaceTag + <> CBOR.encodeVector branchTextLookup + <> CBOR.encodeVector branchDefnLookup + <> CBOR.encodeVector branchPatchLookup + <> CBOR.encodeVector branchChildLookup + <> CBOR.encodeBytes bytes + Entity.C (SqliteCausal.SyncCausalFormat {valueHash, parents}) -> + CBOR.encode CausalTag + <> CBOR.encode valueHash + <> CBOR.encodeVector parents + + decode = do + CBOR.decode >>= \case + TermComponentTag -> do + elements <- coerce @(Vector (ComponentBody Text Hash32)) @(Vector (LocalIds.LocalIds' Text Hash32, ByteString)) <$> CBOR.decodeVector + pure $ Entity.TC (TermFormat.SyncTerm (TermFormat.SyncLocallyIndexedComponent elements)) + DeclComponentTag -> do + elements <- coerce @(Vector (ComponentBody Text Hash32)) @(Vector (LocalIds.LocalIds' Text Hash32, ByteString)) <$> CBOR.decodeVector + pure $ Entity.DC (DeclFormat.SyncDecl (DeclFormat.SyncLocallyIndexedComponent elements)) + PatchTag -> do + patchTextLookup <- CBOR.decodeVector + patchHashLookup <- CBOR.decodeVector + patchDefnLookup <- CBOR.decodeVector + bytes <- CBOR.decodeBytes + pure $ Entity.P (PatchFormat.SyncFull (PatchFormat.LocalIds {patchTextLookup, patchHashLookup, patchDefnLookup}) bytes) + NamespaceTag -> do + branchTextLookup <- CBOR.decodeVector + branchDefnLookup <- CBOR.decodeVector + branchPatchLookup <- CBOR.decodeVector + branchChildLookup <- CBOR.decodeVector + bytes <- CBOR.decodeBytes + pure $ Entity.N (BranchFormat.SyncFull (BranchFormat.LocalIds {branchTextLookup, branchDefnLookup, branchPatchLookup, branchChildLookup}) (BranchFormat.LocalBranchBytes bytes)) + CausalTag -> do + valueHash <- CBOR.decode + parents <- CBOR.decodeVector + pure $ Entity.C (SqliteCausal.SyncCausalFormat {valueHash, parents}) + +encodeVectorWith :: (a -> CBOR.Encoding) -> Vector.Vector a -> CBOR.Encoding +encodeVectorWith f xs = + CBOR.encodeListLen (fromIntegral $ Vector.length xs) + <> (foldr (\a b -> f a <> b) mempty xs) + +instance Ord CBOR.DeserialiseFailure where + compare (CBOR.DeserialiseFailure o s) (CBOR.DeserialiseFailure o' s') = compare (o, s) (o', s') diff --git a/unison-share-api/src/Unison/Sync/EntityValidation.hs b/unison-share-api/src/Unison/Sync/EntityValidation.hs index 02ad6d8330..4e8c854407 100644 --- a/unison-share-api/src/Unison/Sync/EntityValidation.hs +++ b/unison-share-api/src/Unison/Sync/EntityValidation.hs @@ -4,6 +4,7 @@ -- | Module for validating hashes of entities received/sent via sync. module Unison.Sync.EntityValidation ( validateEntity, + validateTempEntity, ) where @@ -21,6 +22,7 @@ import U.Codebase.Sqlite.HashHandle qualified as HH import U.Codebase.Sqlite.Orphans () import U.Codebase.Sqlite.Patch.Format qualified as PatchFormat import U.Codebase.Sqlite.Serialization qualified as Serialization +import U.Codebase.Sqlite.TempEntity (TempEntity) import U.Codebase.Sqlite.Term.Format qualified as TermFormat import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle) import Unison.Hash (Hash) @@ -35,7 +37,13 @@ import Unison.Sync.Types qualified as Share -- We should add more validation as more entities are shared. validateEntity :: Hash32 -> Share.Entity Text Hash32 Hash32 -> Maybe Share.EntityValidationError validateEntity expectedHash32 entity = do - case Share.entityToTempEntity id entity of + validateTempEntity expectedHash32 $ Share.entityToTempEntity id entity + +-- | Note: We currently only validate Namespace hashes. +-- We should add more validation as more entities are shared. +validateTempEntity :: Hash32 -> TempEntity -> Maybe Share.EntityValidationError +validateTempEntity expectedHash32 tempEntity = do + case tempEntity of Entity.TC (TermFormat.SyncTerm localComp) -> do validateTerm expectedHash localComp Entity.DC (DeclFormat.SyncDecl localComp) -> do diff --git a/unison-share-api/src/Unison/Sync/Types.hs b/unison-share-api/src/Unison/Sync/Types.hs index 35d7030cc8..51a1dd4538 100644 --- a/unison-share-api/src/Unison/Sync/Types.hs +++ b/unison-share-api/src/Unison/Sync/Types.hs @@ -56,7 +56,11 @@ module Unison.Sync.Types ) where -import Control.Lens (both, traverseOf) +import Codec.CBOR.Decoding qualified as CBOR +import Codec.CBOR.Encoding qualified as CBOR +import Codec.Serialise +import Codec.Serialise qualified as CBOR +import Control.Lens (both, foldMapOf, traverseOf) import Data.Aeson import Data.Aeson qualified as Aeson import Data.Aeson.Types qualified as Aeson @@ -73,6 +77,7 @@ import U.Codebase.Sqlite.Branch.Format (LocalBranchBytes (..)) import Unison.Hash32 (Hash32) import Unison.Hash32.Orphans.Aeson () import Unison.Prelude +import Unison.Server.Orphans () import Unison.Share.API.Hash (HashJWT) import Unison.Util.Set qualified as Set @@ -91,6 +96,7 @@ instance FromJSON Base64Bytes where newtype RepoInfo = RepoInfo {unRepoInfo :: Text} deriving newtype (Show, Eq, Ord, ToJSON, FromJSON) + deriving (Serialise) via Text data Path = Path { -- This is a nonempty list, where we require the first segment to be the repo name / user name / whatever, @@ -168,28 +174,8 @@ entityHashes_ f = \case C causal -> C <$> causalHashes_ f causal -- | Get the direct dependencies of an entity (which are actually sync'd). --- --- FIXME use generic-lens here? (typed @hash) entityDependencies :: (Ord hash) => Entity text noSyncHash hash -> Set hash -entityDependencies = \case - TC (TermComponent terms) -> flip foldMap terms \(LocalIds {hashes}, _term) -> Set.fromList hashes - DC (DeclComponent decls) -> flip foldMap decls \(LocalIds {hashes}, _decl) -> Set.fromList hashes - P Patch {newHashLookup} -> Set.fromList newHashLookup - PD PatchDiff {parent, newHashLookup} -> Set.insert parent (Set.fromList newHashLookup) - N Namespace {defnLookup, patchLookup, childLookup} -> - Set.unions - [ Set.fromList defnLookup, - Set.fromList patchLookup, - foldMap (\(namespaceHash, causalHash) -> Set.fromList [namespaceHash, causalHash]) childLookup - ] - ND NamespaceDiff {parent, defnLookup, patchLookup, childLookup} -> - Set.unions - [ Set.singleton parent, - Set.fromList defnLookup, - Set.fromList patchLookup, - foldMap (\(namespaceHash, causalHash) -> Set.fromList [namespaceHash, causalHash]) childLookup - ] - C Causal {namespaceHash, parents} -> Set.insert namespaceHash parents +entityDependencies = foldMapOf entityHashes_ Set.singleton data TermComponent text hash = TermComponent [(LocalIds text hash, ByteString)] deriving stock (Show, Eq, Functor, Ord) @@ -482,6 +468,27 @@ data EntityType | CausalType deriving stock (Eq, Ord, Show) +instance Serialise EntityType where + encode = \case + TermComponentType -> CBOR.encodeWord8 0 + DeclComponentType -> CBOR.encodeWord8 1 + PatchType -> CBOR.encodeWord8 2 + PatchDiffType -> CBOR.encodeWord8 3 + NamespaceType -> CBOR.encodeWord8 4 + NamespaceDiffType -> CBOR.encodeWord8 5 + CausalType -> CBOR.encodeWord8 6 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure TermComponentType + 1 -> pure DeclComponentType + 2 -> pure PatchType + 3 -> pure PatchDiffType + 4 -> pure NamespaceType + 5 -> pure NamespaceDiffType + 6 -> pure CausalType + _ -> fail "invalid tag" + instance ToJSON EntityType where toJSON = String . \case @@ -618,6 +625,43 @@ data EntityValidationError deriving stock (Show, Eq, Ord) deriving anyclass (Exception) +data EntityValidationErrorTag + = HashMismatchTag + | UnsupportedTypeTag + | InvalidByteEncodingTag + | HashResolutionFailureTag + deriving stock (Eq, Show) + +instance Serialise EntityValidationErrorTag where + encode = \case + HashMismatchTag -> CBOR.encodeWord8 0 + UnsupportedTypeTag -> CBOR.encodeWord8 1 + InvalidByteEncodingTag -> CBOR.encodeWord8 2 + HashResolutionFailureTag -> CBOR.encodeWord8 3 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure HashMismatchTag + 1 -> pure UnsupportedTypeTag + 2 -> pure InvalidByteEncodingTag + 3 -> pure HashResolutionFailureTag + _ -> fail "invalid tag" + +instance Serialise EntityValidationError where + encode = \case + EntityHashMismatch typ mismatch -> CBOR.encode HashMismatchTag <> CBOR.encode typ <> CBOR.encode mismatch + UnsupportedEntityType hash typ -> CBOR.encode UnsupportedTypeTag <> CBOR.encode hash <> CBOR.encode typ + InvalidByteEncoding hash typ errMsg -> CBOR.encode InvalidByteEncodingTag <> CBOR.encode hash <> CBOR.encode typ <> CBOR.encode errMsg + HashResolutionFailure hash -> CBOR.encode HashResolutionFailureTag <> CBOR.encode hash + + decode = do + tag <- CBOR.decode + case tag of + HashMismatchTag -> EntityHashMismatch <$> CBOR.decode <*> CBOR.decode + UnsupportedTypeTag -> UnsupportedEntityType <$> CBOR.decode <*> CBOR.decode + InvalidByteEncodingTag -> InvalidByteEncoding <$> CBOR.decode <*> CBOR.decode <*> CBOR.decode + HashResolutionFailureTag -> HashResolutionFailure <$> CBOR.decode + instance ToJSON EntityValidationError where toJSON = \case EntityHashMismatch typ mismatch -> jsonUnion "mismatched_hash" (object ["type" .= typ, "mismatch" .= mismatch]) @@ -693,6 +737,10 @@ data HashMismatchForEntity = HashMismatchForEntity } deriving stock (Show, Eq, Ord) +instance Serialise HashMismatchForEntity where + encode (HashMismatchForEntity supplied computed) = CBOR.encode supplied <> CBOR.encode computed + decode = HashMismatchForEntity <$> CBOR.decode <*> CBOR.decode + instance ToJSON UploadEntitiesResponse where toJSON = \case UploadEntitiesSuccess -> jsonUnion "success" (Object mempty) diff --git a/unison-share-api/src/Unison/SyncV2/Types.hs b/unison-share-api/src/Unison/SyncV2/Types.hs new file mode 100644 index 0000000000..2f4432ee74 --- /dev/null +++ b/unison-share-api/src/Unison/SyncV2/Types.hs @@ -0,0 +1,308 @@ +module Unison.SyncV2.Types + ( DownloadEntitiesRequest (..), + DownloadEntitiesChunk (..), + EntityChunk (..), + ErrorChunk (..), + StreamInitInfo (..), + SyncError (..), + DownloadEntitiesError (..), + CBORBytes (..), + EntityKind (..), + serialiseCBORBytes, + deserialiseOrFailCBORBytes, + BranchRef (..), + PullError (..), + EntitySorting (..), + Version (..), + ) +where + +import Codec.CBOR.Encoding qualified as CBOR +import Codec.Serialise (Serialise (..)) +import Codec.Serialise qualified as CBOR +import Codec.Serialise.Decoding qualified as CBOR +import Control.Exception (Exception) +import Data.Aeson (FromJSON (..), ToJSON (..), object, withObject, (.:), (.=)) +import Data.Map (Map) +import Data.Map qualified as Map +import Data.Set (Set) +import Data.Text (Text) +import Data.Text qualified as Text +import Data.Word (Word16, Word64) +import U.Codebase.HashTags (CausalHash) +import U.Codebase.Sqlite.TempEntity (TempEntity) +import Unison.Core.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName) +import Unison.Debug qualified as Debug +import Unison.Hash32 (Hash32) +import Unison.Prelude (From (..)) +import Unison.Server.Orphans () +import Unison.Share.API.Hash (HashJWT) +import Unison.Sync.Types qualified as SyncV1 +import Unison.Util.Servant.CBOR + +newtype BranchRef = BranchRef {unBranchRef :: Text} + deriving (Serialise, Eq, Show, Ord, ToJSON, FromJSON) via Text + +instance From (ProjectAndBranch ProjectName ProjectBranchName) BranchRef where + from pab = BranchRef $ from pab + +data GetCausalHashErrorTag + = GetCausalHashNoReadPermissionTag + | GetCausalHashUserNotFoundTag + | GetCausalHashInvalidBranchRefTag + deriving stock (Show, Eq, Ord) + +instance Serialise GetCausalHashErrorTag where + encode GetCausalHashNoReadPermissionTag = CBOR.encodeWord8 0 + encode GetCausalHashUserNotFoundTag = CBOR.encodeWord8 1 + encode GetCausalHashInvalidBranchRefTag = CBOR.encodeWord8 2 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure GetCausalHashNoReadPermissionTag + 1 -> pure GetCausalHashUserNotFoundTag + 2 -> pure GetCausalHashInvalidBranchRefTag + _ -> fail "invalid tag" + +data DownloadEntitiesRequest = DownloadEntitiesRequest + { causalHash :: HashJWT, + branchRef :: BranchRef, + knownHashes :: Set Hash32 + } + +instance Serialise DownloadEntitiesRequest where + encode (DownloadEntitiesRequest {causalHash, branchRef, knownHashes}) = + encode causalHash <> encode branchRef <> encode knownHashes + decode = DownloadEntitiesRequest <$> decode <*> decode <*> decode + +instance FromJSON DownloadEntitiesRequest where + parseJSON = withObject "DownloadEntitiesRequest" $ \o -> do + causalHash <- o .: "causalHash" + branchRef <- o .: "branchRef" + knownHashes <- o .: "knownHashes" + pure DownloadEntitiesRequest {causalHash, branchRef, knownHashes} + +instance ToJSON DownloadEntitiesRequest where + toJSON (DownloadEntitiesRequest {causalHash, branchRef, knownHashes}) = + object + [ "causalHash" .= causalHash, + "branchRef" .= branchRef, + "knownHashes" .= knownHashes + ] + +data DownloadEntitiesError + = DownloadEntitiesNoReadPermission BranchRef + | -- | msg, branchRef + DownloadEntitiesInvalidBranchRef Text BranchRef + | -- | userHandle + DownloadEntitiesUserNotFound Text + | -- | project shorthand + DownloadEntitiesProjectNotFound Text + | DownloadEntitiesEntityValidationFailure SyncV1.EntityValidationError + deriving stock (Eq, Show, Ord) + +data DownloadEntitiesErrorTag + = NoReadPermissionTag + | InvalidBranchRefTag + | UserNotFoundTag + | ProjectNotFoundTag + | EntityValidationFailureTag + deriving stock (Eq, Show, Ord) + +instance Serialise DownloadEntitiesErrorTag where + encode = \case + NoReadPermissionTag -> CBOR.encodeWord8 0 + InvalidBranchRefTag -> CBOR.encodeWord8 1 + UserNotFoundTag -> CBOR.encodeWord8 2 + ProjectNotFoundTag -> CBOR.encodeWord8 3 + EntityValidationFailureTag -> CBOR.encodeWord8 4 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure NoReadPermissionTag + 1 -> pure InvalidBranchRefTag + 2 -> pure UserNotFoundTag + 3 -> pure ProjectNotFoundTag + 4 -> pure EntityValidationFailureTag + _ -> fail "invalid tag" + +instance Serialise DownloadEntitiesError where + encode = \case + DownloadEntitiesNoReadPermission branchRef -> CBOR.encode NoReadPermissionTag <> CBOR.encode branchRef + DownloadEntitiesInvalidBranchRef msg branchRef -> CBOR.encode InvalidBranchRefTag <> CBOR.encode (msg, branchRef) + DownloadEntitiesUserNotFound userHandle -> CBOR.encode UserNotFoundTag <> CBOR.encode userHandle + DownloadEntitiesProjectNotFound projectShorthand -> CBOR.encode ProjectNotFoundTag <> CBOR.encode projectShorthand + DownloadEntitiesEntityValidationFailure err -> CBOR.encode EntityValidationFailureTag <> CBOR.encode err + + decode = do + tag <- CBOR.decode + case tag of + NoReadPermissionTag -> DownloadEntitiesNoReadPermission <$> CBOR.decode + InvalidBranchRefTag -> uncurry DownloadEntitiesInvalidBranchRef <$> CBOR.decode + UserNotFoundTag -> DownloadEntitiesUserNotFound <$> CBOR.decode + ProjectNotFoundTag -> DownloadEntitiesProjectNotFound <$> CBOR.decode + EntityValidationFailureTag -> DownloadEntitiesEntityValidationFailure <$> CBOR.decode + +data EntitySorting + = -- all dependencies of an entity are guaranteed to be sent before the entity itself + DependenciesFirst + | -- no guarantees. + Unsorted + deriving (Show, Eq, Ord) + +instance Serialise EntitySorting where + encode = \case + DependenciesFirst -> CBOR.encodeWord8 0 + Unsorted -> CBOR.encodeWord8 1 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure DependenciesFirst + 1 -> pure Unsorted + _ -> fail "invalid tag" + +newtype Version = Version Word16 + deriving stock (Show) + deriving newtype (Eq, Ord, Serialise) + +data StreamInitInfo = StreamInitInfo + { version :: Version, + entitySorting :: EntitySorting, + numEntities :: Maybe Word64, + rootCausalHash :: Hash32, + rootBranchRef :: Maybe BranchRef + } + deriving (Show, Eq, Ord) + +decodeMapKey :: (Serialise r) => Text -> Map Text UnknownCBORBytes -> CBOR.Decoder s r +decodeMapKey k m = + optionalDecodeMapKey k m >>= \case + Nothing -> fail $ "Expected key: " <> Text.unpack k + Just x -> pure x + +optionalDecodeMapKey :: (Serialise r) => Text -> Map Text UnknownCBORBytes -> CBOR.Decoder s (Maybe r) +optionalDecodeMapKey k m = + case Map.lookup k m of + Nothing -> pure Nothing + Just bs -> Just <$> decodeUnknownCBORBytes bs + +-- | Serialised as a map to allow for future expansion +instance Serialise StreamInitInfo where + encode (StreamInitInfo {version, entitySorting, numEntities, rootCausalHash, rootBranchRef}) = + CBOR.encode + ( Map.fromList $ + [ ("v" :: Text, serialiseUnknownCBORBytes version), + ("es", serialiseUnknownCBORBytes entitySorting), + ("rc", serialiseUnknownCBORBytes rootCausalHash) + ] + <> maybe [] (\ne -> [("ne", serialiseUnknownCBORBytes ne)]) numEntities + <> maybe [] (\br -> [("br", serialiseUnknownCBORBytes br)]) rootBranchRef + ) + decode = do + Debug.debugLogM Debug.Temp "Decoding StreamInitInfo" + Debug.debugLogM Debug.Temp "Decoding Map" + m <- CBOR.decode + Debug.debugLogM Debug.Temp "Decoding Version" + version <- decodeMapKey "v" m + Debug.debugLogM Debug.Temp "Decoding Entity Sorting" + entitySorting <- decodeMapKey "es" m + Debug.debugLogM Debug.Temp "Decoding Number of Entities" + numEntities <- (optionalDecodeMapKey "ne" m) + Debug.debugLogM Debug.Temp "Decoding Root Causal Hash" + rootCausalHash <- decodeMapKey "rc" m + Debug.debugLogM Debug.Temp "Decoding Branch Ref" + rootBranchRef <- optionalDecodeMapKey "br" m + pure StreamInitInfo {version, entitySorting, numEntities, rootCausalHash, rootBranchRef} + +data EntityChunk = EntityChunk + { hash :: Hash32, + entityCBOR :: CBORBytes TempEntity + } + deriving (Show, Eq, Ord) + +instance Serialise EntityChunk where + encode (EntityChunk {hash, entityCBOR}) = CBOR.encode hash <> CBOR.encode entityCBOR + decode = EntityChunk <$> CBOR.decode <*> CBOR.decode + +data ErrorChunk = ErrorChunk + { err :: DownloadEntitiesError + } + deriving (Show, Eq, Ord) + +instance Serialise ErrorChunk where + encode (ErrorChunk {err}) = CBOR.encode err + decode = ErrorChunk <$> CBOR.decode + +-- | A chunk of the download entities response stream. +data DownloadEntitiesChunk + = InitialC StreamInitInfo + | EntityC EntityChunk + | ErrorC ErrorChunk + deriving (Show, Eq, Ord) + +data DownloadEntitiesChunkTag = InitialChunkTag | EntityChunkTag | ErrorChunkTag + deriving (Show, Eq, Ord) + +instance Serialise DownloadEntitiesChunkTag where + encode InitialChunkTag = CBOR.encodeWord8 0 + encode EntityChunkTag = CBOR.encodeWord8 1 + encode ErrorChunkTag = CBOR.encodeWord8 2 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure InitialChunkTag + 1 -> pure EntityChunkTag + 2 -> pure ErrorChunkTag + _ -> fail "invalid tag" + +instance Serialise DownloadEntitiesChunk where + encode (EntityC ec) = encode EntityChunkTag <> CBOR.encode ec + encode (ErrorC ec) = encode ErrorChunkTag <> CBOR.encode ec + encode (InitialC ic) = encode InitialChunkTag <> encode ic + decode = do + tag <- decode + case tag of + InitialChunkTag -> InitialC <$> decode + EntityChunkTag -> EntityC <$> decode + ErrorChunkTag -> ErrorC <$> decode + +-- | An error occurred while pulling code from Unison Share. +data PullError + = PullError'DownloadEntities DownloadEntitiesError + | PullError'Sync SyncError + deriving stock (Show, Eq, Ord) + deriving anyclass (Exception) + +data SyncError + = SyncErrorExpectedResultNotInMain CausalHash + | SyncErrorDeserializationFailure CBOR.DeserialiseFailure + | SyncErrorMissingInitialChunk + | SyncErrorMisplacedInitialChunk + | SyncErrorStreamFailure Text + | SyncErrorUnsupportedVersion Version + deriving stock (Show, Eq, Ord) + +data EntityKind + = CausalEntity + | NamespaceEntity + | TermEntity + | TypeEntity + | PatchEntity + deriving (Show, Eq, Ord) + +instance Serialise EntityKind where + encode = \case + CausalEntity -> CBOR.encodeWord8 0 + NamespaceEntity -> CBOR.encodeWord8 1 + TermEntity -> CBOR.encodeWord8 2 + TypeEntity -> CBOR.encodeWord8 3 + PatchEntity -> CBOR.encodeWord8 4 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure CausalEntity + 1 -> pure NamespaceEntity + 2 -> pure TermEntity + 3 -> pure TypeEntity + 4 -> pure PatchEntity + _ -> fail "invalid tag" diff --git a/unison-share-api/src/Unison/Util/Servant/CBOR.hs b/unison-share-api/src/Unison/Util/Servant/CBOR.hs new file mode 100644 index 0000000000..18fd94904c --- /dev/null +++ b/unison-share-api/src/Unison/Util/Servant/CBOR.hs @@ -0,0 +1,88 @@ +-- | Servant configuration for the CBOR media type +-- +-- Adapted from https://hackage.haskell.org/package/servant-serialization-0.3/docs/Servant-API-ContentTypes-SerialiseCBOR.html via MIT license +module Unison.Util.Servant.CBOR + ( CBOR, + UnknownCBORBytes, + CBORBytes (..), + deserialiseOrFailCBORBytes, + serialiseCBORBytes, + decodeCBORBytes, + decodeUnknownCBORBytes, + serialiseUnknownCBORBytes, + ) +where + +import Codec.CBOR.Read (DeserialiseFailure (..)) +import Codec.Serialise (Serialise, deserialiseOrFail, serialise) +import Codec.Serialise qualified as CBOR +import Codec.Serialise.Decoding qualified as CBOR +import Data.ByteString.Lazy qualified as BL +import Data.List.NonEmpty qualified as NonEmpty +import Network.HTTP.Media.MediaType qualified as MediaType +import Servant + +-- | Content-type for encoding and decoding objects as their CBOR representations +data CBOR + +-- | Mime-type for CBOR and additional ones using the word "hackage" and the +-- name of the package "serialise". +instance Accept CBOR where + contentTypes Proxy = + NonEmpty.singleton ("application" MediaType.// "cbor") + +-- | +-- +-- >>> mimeRender (Proxy :: Proxy CBOR) ("Hello" :: String) +-- "eHello" +instance (Serialise a) => MimeRender CBOR a where + mimeRender Proxy = serialise + +-- | +-- +-- >>> let bsl = mimeRender (Proxy :: Proxy CBOR) (3.14 :: Float) +-- >>> mimeUnrender (Proxy :: Proxy CBOR) bsl :: Either String Float +-- Right 3.14 +-- +-- >>> mimeUnrender (Proxy :: Proxy CBOR) (bsl <> "trailing garbage") :: Either String Float +-- Right 3.14 +-- +-- >>> mimeUnrender (Proxy :: Proxy CBOR) ("preceding garbage" <> bsl) :: Either String Float +-- Left "Codec.Serialise.deserialiseOrFail: expected float at byte-offset 0" +instance (Serialise a) => MimeUnrender CBOR a where + mimeUnrender Proxy = mapLeft prettyErr . deserialiseOrFail + where + mapLeft f = either (Left . f) Right + prettyErr (DeserialiseFailure offset err) = + "Codec.Serialise.deserialiseOrFail: " ++ err ++ " at byte-offset " ++ show offset + +-- | Wrapper for CBOR data that has already been serialized. +-- In our case, we use this because we may load pre-serialized CBOR directly from the database, +-- but it's also useful in allowing us to more quickly seek through a CBOR stream, since we only need to decode the CBOR when/if we actually need to use it, and can skip past it using a byte offset otherwise. +-- +-- The 't' phantom type is the type of the data encoded in the bytestring. +newtype CBORBytes t = CBORBytes BL.ByteString + deriving (Serialise) via (BL.ByteString) + deriving (Eq, Show, Ord) + +-- | Deserialize a 'CBORBytes' value into its tagged type, throwing an error if the deserialization fails. +deserialiseOrFailCBORBytes :: (Serialise t) => CBORBytes t -> Either CBOR.DeserialiseFailure t +deserialiseOrFailCBORBytes (CBORBytes bs) = CBOR.deserialiseOrFail bs + +decodeCBORBytes :: (Serialise t) => CBORBytes t -> CBOR.Decoder s t +decodeCBORBytes (CBORBytes bs) = decodeUnknownCBORBytes (CBORBytes bs) + +decodeUnknownCBORBytes :: (Serialise t) => UnknownCBORBytes -> CBOR.Decoder s t +decodeUnknownCBORBytes (CBORBytes bs) = case deserialiseOrFailCBORBytes (CBORBytes bs) of + Left err -> fail (show err) + Right t -> pure t + +serialiseCBORBytes :: (Serialise t) => t -> CBORBytes t +serialiseCBORBytes = CBORBytes . CBOR.serialise + +serialiseUnknownCBORBytes :: (Serialise t) => t -> UnknownCBORBytes +serialiseUnknownCBORBytes = CBORBytes . CBOR.serialise + +data Unknown + +type UnknownCBORBytes = CBORBytes Unknown diff --git a/unison-share-api/tests/Main.hs b/unison-share-api/tests/Main.hs new file mode 100644 index 0000000000..232452d79b --- /dev/null +++ b/unison-share-api/tests/Main.hs @@ -0,0 +1,23 @@ +module Main where + +import EasyTest +import System.Environment (getArgs) +import System.IO +import System.IO.CodePage (withCP65001) +import Unison.Test.Sync.Roundtrip qualified as SyncRoundtrip + +test :: Test () +test = + tests + [ SyncRoundtrip.test + ] + +main :: IO () +main = withCP65001 do + args <- getArgs + mapM_ (`hSetEncoding` utf8) [stdout, stdin, stderr] + case args of + [] -> runOnly "" test + [prefix] -> runOnly prefix test + [seed, prefix] -> rerunOnly (read seed) prefix test + _ -> error "expected no args, a prefix, or a seed and a prefix" diff --git a/unison-share-api/tests/Unison/Test/Sync/Gen.hs b/unison-share-api/tests/Unison/Test/Sync/Gen.hs new file mode 100644 index 0000000000..8e45bc1445 --- /dev/null +++ b/unison-share-api/tests/Unison/Test/Sync/Gen.hs @@ -0,0 +1,93 @@ +-- | Hedghog generators for Sync types. +module Unison.Test.Sync.Gen + ( genTempEntity, + ) +where + +import Data.ByteString (ByteString) +import Data.ByteString.Short qualified as BShort +import Data.Text (Text) +import Data.Vector qualified as Vector +import Hedgehog +import Hedgehog.Gen qualified as Gen +import Hedgehog.Range qualified as Range +import U.Codebase.Sqlite.Branch.Format qualified as BranchFormat +import U.Codebase.Sqlite.Causal qualified as CausalFormat +import U.Codebase.Sqlite.Decl.Format qualified as DeclFormat +import U.Codebase.Sqlite.Entity qualified as Entity +import U.Codebase.Sqlite.LocalIds qualified as LocalIds +import U.Codebase.Sqlite.Patch.Format qualified as PatchFormat +import U.Codebase.Sqlite.TempEntity (TempEntity) +import U.Codebase.Sqlite.Term.Format qualified as TermFormat +import Unison.Hash (Hash (..)) +import Unison.Hash32 (Hash32) +import Unison.Hash32 qualified as Hash32 + +genTempEntity :: Gen TempEntity +genTempEntity = do + Gen.choice + [ Entity.TC <$> genSyncTermFormat, + Entity.DC <$> genSyncDeclFormat, + Entity.P <$> genPatchFormat, + Entity.N <$> genNamespaceFormat, + Entity.C <$> genCausalFormat + ] + +genSyncTermFormat :: Gen (TermFormat.SyncTermFormat' Text Hash32) +genSyncTermFormat = do + elems <- Gen.list (Range.linear 1 4) do + localIds <- genLocalIds + term <- genBodyBytes + pure (localIds, term) + pure $ TermFormat.SyncTerm $ TermFormat.SyncLocallyIndexedComponent $ Vector.fromList elems + +genSyncDeclFormat :: Gen (DeclFormat.SyncDeclFormat' Text Hash32) +genSyncDeclFormat = do + elems <- Gen.list (Range.linear 1 4) do + localIds <- genLocalIds + decl <- genBodyBytes + pure (localIds, decl) + pure $ DeclFormat.SyncDecl $ DeclFormat.SyncLocallyIndexedComponent $ Vector.fromList elems + +genPatchFormat :: Gen (PatchFormat.SyncPatchFormat' Hash32 Text Hash32 Hash32) +genPatchFormat = do + patchTextLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genTextLiteral + patchHashLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genHash32 + patchDefnLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genHash32 + let localIds = PatchFormat.LocalIds {patchTextLookup, patchHashLookup, patchDefnLookup} + body <- genBodyBytes + pure $ PatchFormat.SyncFull localIds body + +genNamespaceFormat :: Gen (BranchFormat.SyncBranchFormat' Hash32 Text Hash32 Hash32 (Hash32, Hash32)) +genNamespaceFormat = do + branchTextLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genTextLiteral + branchDefnLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genHash32 + branchPatchLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) genHash32 + branchChildLookup <- Vector.fromList <$> Gen.list (Range.linear 0 5) ((,) <$> genHash32 <*> genHash32) + let branchLocalIds = BranchFormat.LocalIds {branchTextLookup, branchDefnLookup, branchPatchLookup, branchChildLookup} + body <- BranchFormat.LocalBranchBytes <$> genBodyBytes + pure $ BranchFormat.SyncFull branchLocalIds body + +genCausalFormat :: Gen (CausalFormat.SyncCausalFormat' Hash32 Hash32) +genCausalFormat = do + valueHash <- genHash32 + parents <- Vector.fromList <$> Gen.list (Range.linear 0 5) genHash32 + pure $ CausalFormat.SyncCausalFormat {valueHash, parents} + +genBodyBytes :: Gen ByteString +genBodyBytes = Gen.bytes (Range.linear 0 100) + +genLocalIds :: Gen (LocalIds.LocalIds' Text Hash32) +genLocalIds = do + textLookup <- Vector.fromList <$> Gen.list (Range.linear 0 10) genTextLiteral + defnLookup <- Vector.fromList <$> Gen.list (Range.linear 0 10) genHash32 + pure $ LocalIds.LocalIds {textLookup, defnLookup} + +genHash32 :: Gen Hash32 +genHash32 = Hash32.fromHash <$> genHash + +genHash :: Gen Hash +genHash = Hash . BShort.toShort <$> Gen.bytes (Range.singleton 64) + +genTextLiteral :: Gen Text +genTextLiteral = Gen.text (Range.linear 0 100) Gen.unicodeAll diff --git a/unison-share-api/tests/Unison/Test/Sync/Roundtrip.hs b/unison-share-api/tests/Unison/Test/Sync/Roundtrip.hs new file mode 100644 index 0000000000..fb83748817 --- /dev/null +++ b/unison-share-api/tests/Unison/Test/Sync/Roundtrip.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- | Roundtrip tests for types used in sync. +module Unison.Test.Sync.Roundtrip (Unison.Test.Sync.Roundtrip.test) where + +import Codec.Serialise qualified as Serialise +import EasyTest qualified as EasyTest +import Hedgehog hiding (Test, test) +import Unison.Prelude +import Unison.Server.Orphans () +import Unison.Test.Sync.Gen qualified as Gen + +test :: EasyTest.Test () +test = + void . EasyTest.scope "syncv2.roundtrip" $ do + success <- + EasyTest.io $ + checkParallel $ + Group + "syncv2.roundtrip" + [ ("termComponentRoundtrip", termComponentRoundtrip) + ] + EasyTest.expect success + +termComponentRoundtrip :: Property +termComponentRoundtrip = + property $ do + te <- forAll $ Gen.genTempEntity + (Serialise.deserialise . Serialise.serialise $ te) === te diff --git a/unison-share-api/unison-share-api.cabal b/unison-share-api/unison-share-api.cabal index 52cb824d14..d56eb5fb7a 100644 --- a/unison-share-api/unison-share-api.cabal +++ b/unison-share-api/unison-share-api.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.36.0. +-- This file has been generated from package.yaml by hpack version 0.37.0. -- -- see: https://github.com/sol/hpack @@ -34,6 +34,8 @@ library Unison.Server.Local.Endpoints.NamespaceDetails Unison.Server.Local.Endpoints.NamespaceListing Unison.Server.Local.Endpoints.Projects + Unison.Server.Local.Endpoints.Projects.Queries + Unison.Server.Local.Endpoints.Projects.Types Unison.Server.Local.Endpoints.UCM Unison.Server.NameSearch Unison.Server.NameSearch.FromNames @@ -48,7 +50,9 @@ library Unison.Sync.Common Unison.Sync.EntityValidation Unison.Sync.Types + Unison.SyncV2.Types Unison.Util.Find + Unison.Util.Servant.CBOR hs-source-dirs: src default-extensions: @@ -81,6 +85,7 @@ library TypeOperators ViewPatterns ImportQualifiedPost + QuasiQuotes ghc-options: -Wall build-depends: Diff @@ -90,6 +95,7 @@ library , binary , bytes , bytestring + , cborg , containers , directory , errors @@ -105,6 +111,7 @@ library , nonempty-containers , openapi3 , regex-tdfa + , serialise , servant , servant-docs , servant-openapi3 @@ -126,6 +133,7 @@ library , unison-share-projects-api , unison-sqlite , unison-syntax + , unison-util-base32hex , unison-util-relation , unliftio , uri-encode @@ -136,3 +144,58 @@ library , warp , yaml default-language: Haskell2010 + +test-suite unison-share-api-tests + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: + Unison.Test.Sync.Gen + Unison.Test.Sync.Roundtrip + hs-source-dirs: + tests + default-extensions: + BlockArguments + ConstraintKinds + DeriveAnyClass + DeriveFunctor + DeriveGeneric + DerivingStrategies + DerivingVia + DoAndIfThenElse + DuplicateRecordFields + FlexibleContexts + FlexibleInstances + GADTs + GeneralizedNewtypeDeriving + KindSignatures + ImportQualifiedPost + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + NumericUnderscores + OverloadedStrings + PatternSynonyms + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeOperators + ViewPatterns + ImportQualifiedPost + QuasiQuotes + ghc-options: -Wall + build-depends: + base + , bytestring + , code-page + , easytest + , hedgehog + , serialise + , text + , unison-codebase-sqlite + , unison-hash + , unison-prelude + , unison-share-api + , vector + default-language: Haskell2010 diff --git a/unison-src/transcripts/idempotent/ability-order-doesnt-affect-hash.md b/unison-src/transcripts/idempotent/ability-order-doesnt-affect-hash.md index da9c866125..09af223a06 100644 --- a/unison-src/transcripts/idempotent/ability-order-doesnt-affect-hash.md +++ b/unison-src/transcripts/idempotent/ability-order-doesnt-affect-hash.md @@ -26,7 +26,7 @@ scratch/main> add scratch/main> names term1 - Term - Hash: #42m1ui9g56 - Names: term1 term2 + 'term1': + Hash Kind Names + #42m1ui9g56 Term term1, term2 ``` diff --git a/unison-src/transcripts/idempotent/api-list-projects-branches.md b/unison-src/transcripts/idempotent/api-list-projects-branches.md index 02d2d2541f..2f3bc28b22 100644 --- a/unison-src/transcripts/idempotent/api-list-projects-branches.md +++ b/unison-src/transcripts/idempotent/api-list-projects-branches.md @@ -1,17 +1,24 @@ # List Projects And Branches Test +I create projects and branches in reverse alphabetical order, and starting with `z` +to place them after `main` alphabetically. +This is because the results from the listing endpoints is sorted by (timestamp, name); but +the default sqlite timestamp only has second-level precision and the transcript will sometimes +lump many of those together. Doing it this way ensures both the creation timestamp and name sort +the same direction so we don't end up with flaky non-deterministic tests. + ``` ucm :hide -scratch/main> project.create-empty project-one +scratch/main> project.create-empty project-cherry -scratch/main> project.create-empty project-two +scratch/main> project.create-empty project-banana -scratch/main> project.create-empty project-three +scratch/main> project.create-empty project-apple -project-one/main> branch branch-one +project-apple/main> branch a-branch-cherry -project-one/main> branch branch-two +project-apple/main> branch a-branch-banana -project-one/main> branch branch-three +project-apple/main> branch a-branch-apple ``` ``` api @@ -19,52 +26,51 @@ project-one/main> branch branch-three GET /api/projects [ { - "projectName": "project-one" + "activeBranchRef": "a-branch-apple", + "projectName": "project-apple" }, { - "projectName": "project-three" + "activeBranchRef": "main", + "projectName": "project-banana" }, { - "projectName": "project-two" + "activeBranchRef": "main", + "projectName": "project-cherry" }, { + "activeBranchRef": "main", "projectName": "scratch" } ] --- Should list projects starting with project-t -GET /api/projects?prefix=project-t +-- Can query for some infix of the project name +GET /api/projects?query=bana [ { - "projectName": "project-three" - }, - { - "projectName": "project-two" + "activeBranchRef": "main", + "projectName": "project-banana" } ] -- Should list all branches -GET /api/projects/project-one/branches +GET /api/projects/project-apple/branches [ { - "branchName": "branch-one" + "branchName": "a-branch-apple" }, { - "branchName": "branch-three" + "branchName": "a-branch-banana" }, { - "branchName": "branch-two" + "branchName": "a-branch-cherry" }, { "branchName": "main" } ] --- Should list all branches beginning with branch-t -GET /api/projects/project-one/branches?prefix=branch-t +-- Can query for some infix of the project name +GET /api/projects/project-apple/branches?query=bana [ { - "branchName": "branch-three" - }, - { - "branchName": "branch-two" + "branchName": "a-branch-banana" } ] ``` diff --git a/unison-src/transcripts/idempotent/deep-names.md b/unison-src/transcripts/idempotent/deep-names.md index 34d842859d..d82b97409c 100644 --- a/unison-src/transcripts/idempotent/deep-names.md +++ b/unison-src/transcripts/idempotent/deep-names.md @@ -53,15 +53,15 @@ As such, we see two copies of `a` and two copies of `x` via these direct depende ``` ucm scratch/app1> names a - Term - Hash: #gjmq673r1v - Names: lib.text_v1.a lib.text_v2.a + 'a': + Hash Kind Names + #gjmq673r1v Term lib.text_v1.a, lib.text_v2.a scratch/app1> names x - Term - Hash: #nsmc4p1ra4 - Names: lib.http_v3.x lib.http_v4.x + 'x': + Hash Kind Names + #nsmc4p1ra4 Term lib.http_v3.x, lib.http_v4.x ``` Our `app2` project includes the `http` library twice as direct dependencies, and once as an indirect dependency via `webutil`. @@ -103,13 +103,13 @@ We see neither the second indirect copy of `a` nor the indirect copy of `x` via ``` ucm scratch/app2> names a - Term - Hash: #gjmq673r1v - Names: lib.webutil.lib.text_v1.a + 'a': + Hash Kind Names + #gjmq673r1v Term lib.webutil.lib.text_v1.a scratch/app2> names x - Term - Hash: #nsmc4p1ra4 - Names: lib.http_v1.x lib.http_v2.x + 'x': + Hash Kind Names + #nsmc4p1ra4 Term lib.http_v1.x, lib.http_v2.x ``` diff --git a/unison-src/transcripts/idempotent/fix-5326.md b/unison-src/transcripts/idempotent/fix-5326.md index 267648cb4c..5d06bcbcf9 100644 --- a/unison-src/transcripts/idempotent/fix-5326.md +++ b/unison-src/transcripts/idempotent/fix-5326.md @@ -193,7 +193,9 @@ scratch/main> merge /foo Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... diff --git a/unison-src/transcripts/idempotent/fix-pattern-capture.md b/unison-src/transcripts/idempotent/fix-pattern-capture.md new file mode 100644 index 0000000000..9d181d77fb --- /dev/null +++ b/unison-src/transcripts/idempotent/fix-pattern-capture.md @@ -0,0 +1,63 @@ +``` ucm :hide +scratch/main> builtins.merge +``` + +Checks a case that was resulting in variable capture when compiling +pattern matching. `y` was evidently getting captured by the variable +introduced for `confuser decoy` + +``` unison +type NatBox = NatBox Nat +type Decoy a = { confuser : Tres } + +type Tres = One | Two | Three + +xyzzy : NatBox -> Decoy a -> Nat +xyzzy box decoy = + (NatBox y) = box + (natty) = -- Note that these parentheses are required + match confuser decoy with + Tres.One -> y + Two -> y + 1 + Three -> 11 + natty + +> xyzzy (NatBox 1) (Decoy One) +> xyzzy (NatBox 1) (Decoy Two) +> xyzzy (NatBox 1) (Decoy Three) +``` + +``` ucm :added-by-ucm + Loading changes detected in scratch.u. + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + type Decoy a + type NatBox + type Tres + Decoy.confuser : Decoy a -> Tres + Decoy.confuser.modify : (Tres ->{g} Tres) + -> Decoy a1 + ->{g} Decoy a + Decoy.confuser.set : Tres -> Decoy a1 -> Decoy a + xyzzy : NatBox -> Decoy a -> Nat + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 16 | > xyzzy (NatBox 1) (Decoy One) + ⧩ + 1 + + 17 | > xyzzy (NatBox 1) (Decoy Two) + ⧩ + 2 + + 18 | > xyzzy (NatBox 1) (Decoy Three) + ⧩ + 11 +``` diff --git a/unison-src/transcripts/idempotent/fuzzy-options.md b/unison-src/transcripts/idempotent/fuzzy-options.md index 0e6ae51d30..2bfadbb3b7 100644 --- a/unison-src/transcripts/idempotent/fuzzy-options.md +++ b/unison-src/transcripts/idempotent/fuzzy-options.md @@ -57,26 +57,3 @@ scratch/main> debug.fuzzy-options find-in _ Select a namespace: * nested ``` - -Project Branch args - -``` ucm -myproject/main> branch mybranch - - Done. I've created the mybranch branch based off of main. - - Tip: To merge your work back into the main branch, first - `switch /main` then `merge /mybranch`. - -scratch/main> debug.fuzzy-options switch _ - - Select a project or branch to switch to: - * /empty - * /main - * myproject/main - * myproject/mybranch - * scratch/empty - * scratch/main - * myproject - * scratch -``` diff --git a/unison-src/transcripts/idempotent/help.md b/unison-src/transcripts/idempotent/help.md index 7dc5975ed0..6525ba7459 100644 --- a/unison-src/transcripts/idempotent/help.md +++ b/unison-src/transcripts/idempotent/help.md @@ -152,10 +152,13 @@ scratch/main> help operation. debug.names.global - `debug.names.global foo` Iteratively search across all - projects and branches for names matching `foo`. Note that this - is expected to be quite slow and is primarily for debugging - issues with your codebase. + Iteratively search names or hashes across all projects and branches. + `debug.names.global foo` List all known names for `foo`. + `debug.names.global foo #bar` List all known names for the + name `foo` and for the hash `#bar`. + `debug.names.global` without arguments invokes a search to + select names/hashes to list, which requires that `fzf` can be + found within your PATH. debug.numberedArgs Dump the contents of the numbered args state. @@ -553,8 +556,13 @@ scratch/main> help `move.type foo bar` renames `foo` to `bar`. names - `names foo` List all known names for `foo` in the current - branch. + Search names or hashes in the current branch. + `names foo` List all known names for `foo`. + `names foo #bar` List all known names for the name `foo` and + for the hash `#bar`. + `names` without arguments invokes a search to select + names/hashes to list, which requires that `fzf` can be found + within your PATH. namespace.dependencies List the external dependencies of the specified namespace. diff --git a/unison-src/transcripts/idempotent/names.md b/unison-src/transcripts/idempotent/names.md index ca74561ba8..f88bc4ac98 100644 --- a/unison-src/transcripts/idempotent/names.md +++ b/unison-src/transcripts/idempotent/names.md @@ -16,6 +16,24 @@ some.otherplace.x = 10 somewhere.z = 1 -- Some similar name with a different value somewhere.y = 2 + +another.Boolean = true + +dd.baz = true +aa.baz = true +bb.baz = true +cc.baz = true + +d.baz = 100 +a.baz = 100 +b.baz = 100 +c.baz = 100 + +type a.baz = Boolean +type z.baz = Boolean + + +xyz.baz = 100.1 ``` ``` ucm :added-by-ucm @@ -27,11 +45,23 @@ somewhere.y = 2 ⍟ These new definitions are ok to `add`: + type a.baz + type z.baz + a.baz : Nat + aa.baz : Boolean + another.Boolean : Boolean + b.baz : Nat + bb.baz : Boolean + c.baz : Nat + cc.baz : Boolean + d.baz : Nat + dd.baz : Boolean some.otherplace.x : Nat some.otherplace.y : Nat some.place.x : Nat somewhere.y : Nat somewhere.z : Nat + xyz.baz : Float ``` ``` ucm @@ -39,11 +69,23 @@ scratch/main> add ⍟ I've added these definitions: + type a.baz + type z.baz + a.baz : Nat + aa.baz : Boolean + another.Boolean : Boolean + b.baz : Nat + bb.baz : Boolean + c.baz : Nat + cc.baz : Boolean + d.baz : Nat + dd.baz : Boolean some.otherplace.x : Nat some.otherplace.y : Nat some.place.x : Nat somewhere.y : Nat somewhere.z : Nat + xyz.baz : Float ``` `names` searches relative to the current path. @@ -53,45 +95,76 @@ scratch/main> add scratch/main> names x - Terms - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + 'x': + Hash Kind Names + #pi25gcdv0o Term some.otherplace.x + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z + +-- We can search for multiple names in one command - Hash: #pi25gcdv0o - Names: some.otherplace.x +scratch/main> names x y + + 'x': + Hash Kind Names + #pi25gcdv0o Term some.otherplace.x + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z + + 'y': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z + #dcgdua2lj6 Term somewhere.y -- We can search by hash, and see all aliases of that hash scratch/main> names #gjmq673r1v - Term - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + '#gjmq673r1v': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z -- Works with absolute names too scratch/main> names .some.place.x - Term - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + '.some.place.x': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z ``` `debug.names.global` searches from the root, and absolutely qualifies results ``` ucm --- We can search from a different branch and find all names in the codebase named 'x', and each of their aliases respectively. +-- We can search from a different branch and find all names in the codebase named 'x' and those named 'y', and each of their aliases respectively. -scratch/other> debug.names.global x +scratch/other> debug.names.global x y Found results in scratch/main - Terms - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + 'x': + Hash Kind Names + #pi25gcdv0o Term some.otherplace.x + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z + + Found results in scratch/main - Hash: #pi25gcdv0o - Names: some.otherplace.x + 'y': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z + #dcgdua2lj6 Term somewhere.y -- We can search by hash, and see all aliases of that hash in the codebase @@ -99,9 +172,11 @@ scratch/other> debug.names.global #gjmq673r1v Found results in scratch/main - Term - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + '#gjmq673r1v': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z -- We can search using an absolute name @@ -109,7 +184,70 @@ scratch/other> debug.names.global .some.place.x Found results in scratch/main - Term - Hash: #gjmq673r1v - Names: some.otherplace.y some.place.x somewhere.z + '.some.place.x': + Hash Kind Names + #gjmq673r1v Term some.otherplace.y, + some.place.x, + somewhere.z +``` + +``` ucm :error +-- We can handle many name queries, some of which fail and some of which succeed + +-- The names command is considered to have failed because there are 1 or more query failures + +-- We can display hashes that are references to types and to terms + +-- Each list of names in the Names column is sorted alphabetically + +-- Each row is sorted by the Names column, alphabetically by name and then by the length of the list + +scratch/main> names max /invalid1 /invalid2 + Boolean foo baz + + 'max': + Hash Kind Names + ##Float.max Term lib.builtins.Float.max + + /invalid1: + /invalid1 is not a well-formed name, hash, or hash-qualified + name. I expected something like `foo`, `#abc123`, or + `foo#abc123`. + + /invalid2: + /invalid2 is not a well-formed name, hash, or hash-qualified + name. I expected something like `foo`, `#abc123`, or + `foo#abc123`. + + '+': + Hash Kind Names + ##Float.+ Term lib.builtins.Float.+ + ##Int.+ Term lib.builtins.Int.+ + ##Nat.+ Term lib.builtins.Nat.+ + + 'Boolean': + Hash Kind Names + #idl63c82kf#0 Term a.baz.Boolean + #56fi1cmq3u Term aa.baz, + another.Boolean, + bb.baz, + cc.baz, + dd.baz + ##Boolean Type lib.builtins.Boolean + #cmihlkoddu#0 Term z.baz.Boolean + + 'foo': + 😶 + I couldn't find anything by that name. + + 'baz': + Hash Kind Names + #idl63c82kf Type a.baz + #u1qsl3nk5t Term a.baz, b.baz, c.baz, d.baz + #56fi1cmq3u Term aa.baz, + another.Boolean, + bb.baz, + cc.baz, + dd.baz + #00kr10tpqr Term xyz.baz + #cmihlkoddu Type z.baz ``` diff --git a/unison-src/transcripts/idempotent/suffixes.md b/unison-src/transcripts/idempotent/suffixes.md index 762ffe5448..29a46024e9 100644 --- a/unison-src/transcripts/idempotent/suffixes.md +++ b/unison-src/transcripts/idempotent/suffixes.md @@ -161,7 +161,7 @@ scratch/main> view distributed.abra.cadabra scratch/main> names distributed.lib.baz.qux - Term - Hash: #nhup096n2s - Names: lib.distributed.lib.baz.qux + 'distributed.lib.baz.qux': + Hash Kind Names + #nhup096n2s Term lib.distributed.lib.baz.qux ``` diff --git a/unison-src/transcripts/idempotent/unique-type-churn.md b/unison-src/transcripts/idempotent/unique-type-churn.md index 79b8a9684c..8adce63e89 100644 --- a/unison-src/transcripts/idempotent/unique-type-churn.md +++ b/unison-src/transcripts/idempotent/unique-type-churn.md @@ -51,13 +51,10 @@ If the name stays the same, the churn is even prevented if the type is updated a ``` ucm scratch/main> names A - Type - Hash: #j743idicb1 - Names: A - - Term - Hash: #j743idicb1#0 - Names: A.A + 'A': + Hash Kind Names + #j743idicb1 Type A + #j743idicb1#0 Term A.A ``` ``` unison @@ -87,13 +84,10 @@ scratch/main> update scratch/main> names A - Type - Hash: #186m0i6upt - Names: A - - Term - Hash: #186m0i6upt#0 - Names: A.A + 'A': + Hash Kind Names + #186m0i6upt Type A + #186m0i6upt#0 Term A.A ``` ``` unison @@ -125,11 +119,8 @@ scratch/main> update scratch/main> names A - Type - Hash: #j743idicb1 - Names: A - - Term - Hash: #j743idicb1#0 - Names: A.A + 'A': + Hash Kind Names + #j743idicb1 Type A + #j743idicb1#0 Term A.A ``` diff --git a/unison-src/transcripts/idempotent/update-ignores-lib-namespace.md b/unison-src/transcripts/idempotent/update-ignores-lib-namespace.md index 946fe14ceb..f2e029420e 100644 --- a/unison-src/transcripts/idempotent/update-ignores-lib-namespace.md +++ b/unison-src/transcripts/idempotent/update-ignores-lib-namespace.md @@ -61,7 +61,7 @@ scratch/main> update scratch/main> names foo - Term - Hash: #9ntnotdp87 - Names: foo + 'foo': + Hash Kind Names + #9ntnotdp87 Term foo ``` diff --git a/unison-src/transcripts/merge.output.md b/unison-src/transcripts/merge.output.md index e12726898d..f1ea01fb1d 100644 --- a/unison-src/transcripts/merge.output.md +++ b/unison-src/transcripts/merge.output.md @@ -68,7 +68,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -137,7 +139,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -220,7 +224,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -325,7 +331,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -439,7 +447,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -524,7 +534,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -603,7 +615,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -843,7 +857,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -934,7 +950,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1036,7 +1054,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1141,7 +1161,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1224,7 +1246,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1307,7 +1331,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1386,7 +1412,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1481,7 +1509,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1600,7 +1630,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1692,7 +1724,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1794,7 +1828,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1967,7 +2003,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2033,7 +2071,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2098,7 +2138,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2164,7 +2206,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2206,9 +2250,9 @@ scratch/alice> add ``` ucm scratch/alice> names A - Type - Hash: #65mdg7015r - Names: A A.inner.X + 'A': + Hash Kind Names + #65mdg7015r Type A, A.inner.X ``` Bob's branch: @@ -2231,7 +2275,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... On scratch/alice, the type A.inner.X is an alias of A. I'm not able to perform a merge when a type exists nested under an @@ -2288,7 +2334,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge, because I need all constructor names to be nested somewhere beneath the @@ -2494,7 +2542,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2619,7 +2669,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2728,7 +2780,9 @@ scratch/main> merge topic Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2875,7 +2929,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3047,7 +3103,9 @@ scratch/bob> merge /alice Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3063,7 +3121,9 @@ scratch/carol> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3217,7 +3277,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3297,7 +3359,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3364,15 +3428,15 @@ scratch/merge-bob-into-alice> update scratch/merge-bob-into-alice> names Bar - Type - Hash: #h3af39sae7 - Names: Bar + 'Bar': + Hash Kind Names + #h3af39sae7 Type Bar scratch/alice> names Bar - Type - Hash: #h3af39sae7 - Names: Bar + 'Bar': + Hash Kind Names + #h3af39sae7 Type Bar ``` ``` ucm :hide @@ -3496,7 +3560,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... diff --git a/unison-src/transcripts/project-outputs/docs/release-steps.output.md b/unison-src/transcripts/project-outputs/docs/release-steps.output.md index 52eb16ab5d..810e7bba84 100644 --- a/unison-src/transcripts/project-outputs/docs/release-steps.output.md +++ b/unison-src/transcripts/project-outputs/docs/release-steps.output.md @@ -18,7 +18,11 @@ Edit `releases._.README` to include `Release: `. .basedev.release> push git(git@github.com:unisonweb/base) ``` -## 2\. Run Release script +## 2\. Check or run cloud client tests + +https://github.com/unisoncomputing/cloud-client-tests/actions/workflows/cloud-client-tests.yml + +## 3\. Run Release script - **Milestone Release**: Look up the most recent release; bump the number and remove any trailing letters, e.g. `./scripts/make-release release/M5 trunk` - **Minor Release**: Increment the trailing letter of the previous release, or add an `a` to the previous milestone release, e.g. `./scripts/make-release release/M5a trunk` @@ -39,11 +43,11 @@ Including: After successfully executing the script you just have to sit tight and wait for all the jobs to complete. -## 3 +## 4 Smoke test of the new release. Try `brew upgrade unison-language`, launch it, launch `ui`. -## 4 +## 5 Write up release notes, template below.