diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 043fd697c7..3d94e12f2f 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -3839,8 +3839,8 @@ insertProjectBranch description causalHashId (ProjectBranch projectId branchId b execute [sql| - INSERT INTO project_branch (project_id, branch_id, name, causal_hash_id) - VALUES (:projectId, :branchId, :branchName, :causalHashId) + INSERT INTO project_branch (project_id, branch_id, name, causal_hash_id, last_accessed) + VALUES (:projectId, :branchId, :branchName, :causalHashId, strftime('%s', 'now', 'subsec')) |] whenJust maybeParentBranchId \parentBranchId -> execute @@ -4478,7 +4478,7 @@ setCurrentProjectPath projId branchId path = do execute [sql| UPDATE project_branch - SET last_accessed = strftime('%s', 'now') + SET last_accessed = strftime('%s', 'now', 'subsec') WHERE project_id = :projId AND branch_id = :branchId |] diff --git a/docs/configuration.md b/docs/configuration.md index 549f274a2a..87d38dca3a 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -10,6 +10,7 @@ * [`UNISON_SHARE_ACCESS_TOKEN`](#unison_share_access_token) * [`UNISON_READONLY`](#unison_readonly) * [`UNISON_ENTITY_VALIDATION`](#unison_entity_validation) + * [`UNISON_SYNC_VERSION`](#unison_sync_version) * [Local Codebase Server](#local-codebase-server) * [Codebase Configuration](#codebase-configuration) @@ -17,7 +18,7 @@ ### `UNISON_DEBUG` -Enable debugging output for various portions of the application. +Enable debugging output for various portions of the application. See `lib/unison-prelude/src/Unison/Debug.hs` for the full list of supported flags. E.g. @@ -62,7 +63,7 @@ Note for Windows users: Due to an outstanding issue with GHC's IO manager on Win Enabling the LSP on windows can cause UCM to hang on exit and may require the process to be killed by the operating system or via Ctrl-C. Note that this doesn't pose any risk of codebase corruption or cause any known issues, it's simply an annoyance. -If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable. +If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable. You can set this persistently in powershell using: @@ -117,6 +118,14 @@ Defaults to enabled. $ UNISON_ENTITY_VALIDATION="false" ucm ``` +### `UNISON_SYNC_VERSION` + +Allows enabling the experimental Sync Version 2 protocol when downloading code from Share. + +```sh +$ UNISON_ENTITY_VALIDATION="2" ucm +``` + ### `UNISON_PULL_WORKERS` Allows setting the number of workers to use when pulling from a codebase server. diff --git a/lib/unison-prelude/src/Unison/Prelude.hs b/lib/unison-prelude/src/Unison/Prelude.hs index 374f4a1812..ef48bc2556 100644 --- a/lib/unison-prelude/src/Unison/Prelude.hs +++ b/lib/unison-prelude/src/Unison/Prelude.hs @@ -25,6 +25,7 @@ module Unison.Prelude whenJustM, eitherToMaybe, maybeToEither, + eitherToThese, altSum, altMap, hoistMaybe, @@ -82,6 +83,7 @@ import Data.Text as X (Text) import Data.Text qualified as Text import Data.Text.Encoding as X (decodeUtf8, encodeUtf8) import Data.Text.IO qualified as Text +import Data.These (These (..)) import Data.Traversable as X (for) import Data.Typeable as X (Typeable) import Data.Void as X (Void) @@ -205,6 +207,9 @@ throwEitherM = throwEitherMWith id throwEitherMWith :: forall e e' m a. (MonadIO m, Exception e') => (e -> e') -> m (Either e a) -> m a throwEitherMWith f action = throwExceptT . withExceptT f $ (ExceptT action) +eitherToThese :: Either a b -> These a b +eitherToThese = either This That + tShow :: (Show a) => a -> Text tShow = Text.pack . show diff --git a/lib/unison-prelude/src/Unison/Util/Set.hs b/lib/unison-prelude/src/Unison/Util/Set.hs index 4e3c6ef9b9..ac5bb12454 100644 --- a/lib/unison-prelude/src/Unison/Util/Set.hs +++ b/lib/unison-prelude/src/Unison/Util/Set.hs @@ -5,6 +5,7 @@ module Unison.Util.Set mapMaybe, symmetricDifference, Unison.Util.Set.traverse, + Unison.Util.Set.for, flatMap, filterM, forMaybe, @@ -51,6 +52,9 @@ forMaybe xs f = traverse :: (Applicative f, Ord b) => (a -> f b) -> Set a -> f (Set b) traverse f = fmap Set.fromList . Prelude.traverse f . Set.toList +for :: (Ord b, Applicative f) => Set a -> (a -> f b) -> f (Set b) +for = flip Unison.Util.Set.traverse + flatMap :: (Ord b) => (a -> Set b) -> Set a -> Set b flatMap f = Set.unions . fmap f . Set.toList diff --git a/lib/unison-sqlite/package.yaml b/lib/unison-sqlite/package.yaml index 84d0201eab..b90bd2aa57 100644 --- a/lib/unison-sqlite/package.yaml +++ b/lib/unison-sqlite/package.yaml @@ -9,6 +9,7 @@ library: dependencies: - base + - containers - direct-sqlite - megaparsec - pretty-simple diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs index 48167980db..726cac860e 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs @@ -58,6 +58,7 @@ module Unison.Sqlite.Connection ) where +import Data.Map qualified as Map import Database.SQLite.Simple qualified as Sqlite import Database.SQLite.Simple.FromField qualified as Sqlite import Database.SQLite3 qualified as Direct.Sqlite @@ -71,7 +72,10 @@ import Unison.Sqlite.Connection.Internal (Connection (..)) import Unison.Sqlite.Exception import Unison.Sqlite.Sql (Sql (..)) import Unison.Sqlite.Sql qualified as Sql +import UnliftIO (atomically) import UnliftIO.Exception +import UnliftIO.STM (readTVar) +import UnliftIO.STM qualified as STM -- | Perform an action with a connection to a SQLite database. -- @@ -103,19 +107,47 @@ openConnection name file = do Just "" -> file _ -> "file:" <> file <> "?mode=ro" conn0 <- Sqlite.open sqliteURI `catch` rethrowAsSqliteConnectException name file - let conn = Connection {conn = conn0, file, name} + statementCache <- STM.newTVarIO Map.empty + let conn = Connection {conn = conn0, file, name, statementCache} execute conn [Sql.sql| PRAGMA foreign_keys = ON |] execute conn [Sql.sql| PRAGMA busy_timeout = 60000 |] + execute conn [Sql.sql| PRAGMA synchronous = normal |] + execute conn [Sql.sql| PRAGMA journal_size_limit = 6144000 |] + execute conn [Sql.sql| PRAGMA cache_size = -64000 |] + execute conn [Sql.sql| PRAGMA temp_store = 2 |] + pure conn -- Close a connection opened with 'openConnection'. closeConnection :: Connection -> IO () -closeConnection (Connection _ _ conn) = +closeConnection conn@(Connection {conn = conn0}) = do -- FIXME if this throws an exception, it won't be under `SomeSqliteException` -- Possible fixes: -- 1. Add close exception to the hierarchy, e.g. `SqliteCloseException` -- 2. Always ignore exceptions thrown by `close` (Mitchell prefers this one) - Sqlite.close conn + closeAllStatements conn + Sqlite.close conn0 + +withStatement :: Connection -> Text -> (Sqlite.Statement -> IO a) -> IO a +withStatement conn sql action = do + bracket (prepareStatement conn sql) Sqlite.reset action + where + prepareStatement :: Connection -> Text -> IO Sqlite.Statement + prepareStatement Connection {conn, statementCache} sql = do + cached <- atomically $ do + cache <- STM.readTVar statementCache + pure $ Map.lookup sql cache + case cached of + Just stmt -> pure stmt + Nothing -> do + stmt <- Sqlite.openStatement conn (coerce @Text @Sqlite.Query sql) + atomically $ STM.modifyTVar statementCache (Map.insert sql stmt) + pure stmt + +closeAllStatements :: Connection -> IO () +closeAllStatements Connection {statementCache} = do + cache <- atomically $ readTVar statementCache + for_ cache Sqlite.closeStatement -- An internal type, for making prettier debug logs @@ -152,7 +184,7 @@ logQuery (Sql sql params) result = -- Without results execute :: (HasCallStack) => Connection -> Sql -> IO () -execute conn@(Connection _ _ conn0) sql@(Sql s params) = do +execute conn sql@(Sql s params) = do logQuery sql Nothing doExecute `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException @@ -163,16 +195,16 @@ execute conn@(Connection _ _ conn0) sql@(Sql s params) = do } where doExecute :: IO () - doExecute = - Sqlite.withStatement conn0 (coerce s) \(Sqlite.Statement statement) -> do - bindParameters statement params - void (Direct.Sqlite.step statement) + doExecute = do + withStatement conn s \statement -> do + bindParameters (coerce statement) params + void (Direct.Sqlite.step $ coerce statement) -- | Execute one or more semicolon-delimited statements. -- -- This function does not support parameters, and is mostly useful for executing DDL and migrations. executeStatements :: (HasCallStack) => Connection -> Text -> IO () -executeStatements conn@(Connection _ _ (Sqlite.Connection database _tempNameCounter)) sql = do +executeStatements conn@(Connection {conn = Sqlite.Connection database _tempNameCounter}) sql = do logQuery (Sql sql []) Nothing Direct.Sqlite.exec database sql `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException @@ -185,7 +217,7 @@ executeStatements conn@(Connection _ _ (Sqlite.Connection database _tempNameCoun -- With results, without checks queryStreamRow :: (HasCallStack, Sqlite.FromRow a) => Connection -> Sql -> (IO (Maybe a) -> IO r) -> IO r -queryStreamRow conn@(Connection _ _ conn0) sql@(Sql s params) callback = +queryStreamRow conn sql@(Sql s params) callback = run `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException SqliteQueryExceptionInfo @@ -194,8 +226,8 @@ queryStreamRow conn@(Connection _ _ conn0) sql@(Sql s params) callback = sql } where - run = - bracket (Sqlite.openStatement conn0 (coerce s)) Sqlite.closeStatement \statement -> do + run = do + withStatement conn s \statement -> do Sqlite.bind statement params callback (Sqlite.nextRow statement) @@ -213,7 +245,7 @@ queryStreamCol = queryStreamRow queryListRow :: forall a. (Sqlite.FromRow a, HasCallStack) => Connection -> Sql -> IO [a] -queryListRow conn@(Connection _ _ conn0) sql@(Sql s params) = do +queryListRow conn sql@(Sql s params) = do result <- doQuery `catch` \(exception :: Sqlite.SQLError) -> @@ -228,7 +260,7 @@ queryListRow conn@(Connection _ _ conn0) sql@(Sql s params) = do where doQuery :: IO [a] doQuery = - Sqlite.withStatement conn0 (coerce s) \statement -> do + withStatement conn (coerce s) \statement -> do bindParameters (coerce statement) params let loop :: [a] -> IO [a] loop rows = @@ -347,7 +379,7 @@ queryOneColCheck conn s check = -- Rows modified rowsModified :: Connection -> IO Int -rowsModified (Connection _ _ conn) = +rowsModified (Connection {conn}) = Sqlite.changes conn -- Vacuum diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Connection/Internal.hs b/lib/unison-sqlite/src/Unison/Sqlite/Connection/Internal.hs index 5f80151f94..579c37cfb9 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Connection/Internal.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Connection/Internal.hs @@ -3,15 +3,19 @@ module Unison.Sqlite.Connection.Internal ) where +import Data.Map (Map) +import Data.Text (Text) import Database.SQLite.Simple qualified as Sqlite +import UnliftIO.STM (TVar) -- | A /non-thread safe/ connection to a SQLite database. data Connection = Connection { name :: String, file :: FilePath, - conn :: Sqlite.Connection + conn :: Sqlite.Connection, + statementCache :: TVar (Map Text Sqlite.Statement) } instance Show Connection where - show (Connection name file _conn) = + show (Connection name file _conn _statementCache) = "Connection { name = " ++ show name ++ ", file = " ++ show file ++ " }" diff --git a/lib/unison-sqlite/unison-sqlite.cabal b/lib/unison-sqlite/unison-sqlite.cabal index 3db0980a7c..13a9eb27cd 100644 --- a/lib/unison-sqlite/unison-sqlite.cabal +++ b/lib/unison-sqlite/unison-sqlite.cabal @@ -65,6 +65,7 @@ library ghc-options: -Wall build-depends: base + , containers , direct-sqlite , megaparsec , pretty-simple diff --git a/unison-cli/src/Unison/Cli/DownloadUtils.hs b/unison-cli/src/Unison/Cli/DownloadUtils.hs index 343ebfeeb5..936b2b3fba 100644 --- a/unison-cli/src/Unison/Cli/DownloadUtils.hs +++ b/unison-cli/src/Unison/Cli/DownloadUtils.hs @@ -4,6 +4,7 @@ module Unison.Cli.DownloadUtils ( downloadProjectBranchFromShare, downloadLooseCodeFromShare, + SyncVersion (..), ) where @@ -11,6 +12,7 @@ import Control.Concurrent.STM (atomically) import Control.Concurrent.STM.TVar (modifyTVar', newTVarIO, readTVar, readTVarIO) import Data.List.NonEmpty (pattern (:|)) import System.Console.Regions qualified as Console.Regions +import System.IO.Unsafe (unsafePerformIO) import U.Codebase.HashTags (CausalHash) import U.Codebase.Sqlite.Queries qualified as Queries import Unison.Cli.Monad (Cli) @@ -28,9 +30,23 @@ import Unison.Share.API.Hash qualified as Share import Unison.Share.Codeserver qualified as Codeserver import Unison.Share.Sync qualified as Share import Unison.Share.Sync.Types qualified as Share +import Unison.Share.SyncV2 qualified as SyncV2 import Unison.Share.Types (codeserverBaseURL) import Unison.Sync.Common qualified as Sync.Common import Unison.Sync.Types qualified as Share +import Unison.SyncV2.Types qualified as SyncV2 +import UnliftIO.Environment qualified as UnliftIO + +data SyncVersion = SyncV1 | SyncV2 + deriving (Eq, Show) + +-- | The version of the sync protocol to use. +syncVersion :: SyncVersion +syncVersion = unsafePerformIO do + UnliftIO.lookupEnv "UNISON_SYNC_VERSION" + <&> \case + Just "2" -> SyncV2 + _ -> SyncV1 -- | Download a project/branch from Share. downloadProjectBranchFromShare :: @@ -41,7 +57,6 @@ downloadProjectBranchFromShare :: downloadProjectBranchFromShare useSquashed branch = Cli.labelE \done -> do let remoteProjectBranchName = branch.branchName - let repoInfo = Share.RepoInfo (into @Text (ProjectAndBranch branch.projectName remoteProjectBranchName)) causalHashJwt <- case (useSquashed, branch.squashedBranchHead) of (Share.IncludeSquashedHead, Nothing) -> done Output.ShareExpectedSquashedHead @@ -49,16 +64,27 @@ downloadProjectBranchFromShare useSquashed branch = (Share.NoSquashedHead, _) -> pure branch.branchHead exists <- Cli.runTransaction (Queries.causalExistsByHash32 (Share.hashJWTHash causalHashJwt)) when (not exists) do - (result, numDownloaded) <- - Cli.with withEntitiesDownloadedProgressCallback \(downloadedCallback, getNumDownloaded) -> do - result <- Share.downloadEntities Share.hardCodedBaseUrl repoInfo causalHashJwt downloadedCallback - numDownloaded <- liftIO getNumDownloaded - pure (result, numDownloaded) - result & onLeft \err0 -> do - done case err0 of - Share.SyncError err -> Output.ShareErrorDownloadEntities err - Share.TransportError err -> Output.ShareErrorTransport err - Cli.respond (Output.DownloadedEntities numDownloaded) + case syncVersion of + SyncV1 -> do + let repoInfo = Share.RepoInfo (into @Text (ProjectAndBranch branch.projectName remoteProjectBranchName)) + Cli.with withEntitiesDownloadedProgressCallback \(downloadedCallback, getNumDownloaded) -> do + result <- Share.downloadEntities Share.hardCodedBaseUrl repoInfo causalHashJwt downloadedCallback + numDownloaded <- liftIO getNumDownloaded + result & onLeft \err0 -> do + done case err0 of + Share.SyncError err -> Output.ShareErrorDownloadEntities err + Share.TransportError err -> Output.ShareErrorTransport err + Cli.respond (Output.DownloadedEntities numDownloaded) + SyncV2 -> do + let branchRef = SyncV2.BranchRef (into @Text (ProjectAndBranch branch.projectName remoteProjectBranchName)) + let downloadedCallback = \_ -> pure () + let shouldValidate = not $ Codeserver.isCustomCodeserver Codeserver.defaultCodeserver + result <- SyncV2.syncFromCodeserver shouldValidate Share.hardCodedBaseUrl branchRef causalHashJwt downloadedCallback + result & onLeft \err0 -> do + done case err0 of + Share.SyncError pullErr -> + Output.ShareErrorPullV2 pullErr + Share.TransportError err -> Output.ShareErrorTransport err pure (Sync.Common.hash32ToCausalHash (Share.hashJWTHash causalHashJwt)) -- | Download loose code from Share. diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs index 72920b190d..8dc383882e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Merge2.hs @@ -78,6 +78,7 @@ import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Name (Name) import Unison.NameSegment (NameSegment) import Unison.NameSegment qualified as NameSegment +import Unison.Names (Names) import Unison.Parser.Ann (Ann) import Unison.Prelude import Unison.Project @@ -241,6 +242,22 @@ doMerge info = do let blob0 = Merge.makeMergeblob0 nametrees3 libdeps3 + names3 :: Merge.ThreeWay Names <- do + let causalHashes = + Merge.TwoOrThreeWay + { alice = info.alice.causalHash, + bob = info.bob.causalHash, + lca = info.lca.causalHash + } + branches <- for causalHashes \ch -> do + liftIO (Codebase.getBranchForHash env.codebase ch) >>= \case + Nothing -> done (Output.CouldntLoadBranch ch) + Just b -> pure b + let names = fmap (Branch.toNames . Branch.head) branches + pure Merge.ThreeWay {alice = names.alice, bob = names.bob, lca = fromMaybe mempty names.lca} + + respondRegion (Output.Literal "Loading definitions...") + -- Hydrate hydratedDefns :: Merge.ThreeWay @@ -260,14 +277,14 @@ doMerge info = do in bimap f g <$> blob0.defns ) - respondRegion (Output.Literal "Computing diff between branches...") + respondRegion (Output.Literal "Computing diffs...") blob1 <- - Merge.makeMergeblob1 blob0 hydratedDefns & onLeft \case + Merge.makeMergeblob1 blob0 names3 hydratedDefns & onLeft \case Merge.Alice reason -> done (Output.IncoherentDeclDuringMerge mergeTarget reason) Merge.Bob reason -> done (Output.IncoherentDeclDuringMerge mergeSource reason) - liftIO (debugFunctions.debugDiffs blob1.diffs) + liftIO (debugFunctions.debugDiffs blob1.diffsFromLCA) liftIO (debugFunctions.debugCombinedDiff blob1.diff) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs index f34a64302a..39af010bfe 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/SyncV2.hs @@ -2,16 +2,20 @@ module Unison.Codebase.Editor.HandleInput.SyncV2 ( handleSyncToFile, handleSyncFromFile, handleSyncFromCodebase, + handleSyncFromCodeserver, ) where import Control.Lens import Control.Monad.Reader (MonadReader (..)) +import U.Codebase.HashTags (CausalHash) import U.Codebase.Sqlite.Queries qualified as Q +import Unison.Cli.DownloadUtils (downloadProjectBranchFromShare) import Unison.Cli.Monad (Cli) import Unison.Cli.Monad qualified as Cli import Unison.Cli.MonadUtils qualified as Cli import Unison.Cli.ProjectUtils qualified as Project +import Unison.Cli.Share.Projects qualified as Projects import Unison.Codebase (CodebasePath) import Unison.Codebase qualified as Codebase import Unison.Codebase.Editor.Output qualified as Output @@ -69,3 +73,6 @@ handleSyncFromCodebase description srcCodebasePath srcBranch destBranch = do Cli.setProjectBranchRootToCausalHash (projectBranch ^. #branch) description causalHash Right (Right (Left syncErr)) -> do Cli.respond (Output.SyncPullError syncErr) + +handleSyncFromCodeserver :: Projects.IncludeSquashedHead -> Projects.RemoteProjectBranch -> Cli (Either Output.ShareError CausalHash) +handleSyncFromCodeserver = downloadProjectBranchFromShare diff --git a/unison-cli/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs index acaacd23c9..75de97cd1a 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -138,7 +138,8 @@ data Input | PushRemoteBranchI PushRemoteBranchInput | SyncToFileI FilePath (ProjectAndBranch (Maybe ProjectName) (Maybe ProjectBranchName)) | SyncFromFileI FilePath UnresolvedProjectBranch - | SyncFromCodebaseI FilePath (ProjectAndBranch ProjectName ProjectBranchName) UnresolvedProjectBranch + | -- | Sync from a codebase project branch to this codebase's project branch + SyncFromCodebaseI FilePath (ProjectAndBranch ProjectName ProjectBranchName) UnresolvedProjectBranch | ResetI (BranchId2 {- namespace to reset it to -}) (Maybe UnresolvedProjectBranch {- ProjectBranch to reset -}) | -- | used in Welcome module to give directions to user -- diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index 2519967aed..b3a7f1cc60 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -475,6 +475,7 @@ data ShareError = ShareErrorDownloadEntities Share.DownloadEntitiesError | ShareErrorGetCausalHashByPath Sync.GetCausalHashByPathError | ShareErrorPull Sync.PullError + | ShareErrorPullV2 SyncV2.PullError | ShareErrorTransport Sync.CodeserverTransportError | ShareErrorUploadEntities Share.UploadEntitiesError | ShareExpectedSquashedHead diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 24065284fd..b003e374a2 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -2209,11 +2209,13 @@ syncFromCodebase = args = [("codebase-location", Required, filePathArg), ("branch-to-sync", Required, projectAndBranchNamesArg suggestionsConfig), ("destination-branch", Optional, projectAndBranchNamesArg suggestionsConfig)], help = ( P.wrapColumn2 - [ (makeExample syncFromCodebase ["./codebase", "/feature", "/main"], "Sets the /feature branch to the contents of the codebase at ./codebase.") + [ ( makeExample syncFromCodebase ["./codebase", "srcProject/main", "destProject/main"], + "Imports srcProject/main from the specified codebase, then sets destProject/main to the imported branch." + ) ] ), parse = \case - [codebaseLocation, branchToSync, destinationBranch] -> Input.SyncFromCodebaseI <$> unsupportedStructuredArgument makeStandalone "a file name" codebaseLocation <*> handleBranchWithProject branchToSync <*> handleBranchWithOptionalProject destinationBranch + [codebaseLocation, srcBranch, destinationBranch] -> Input.SyncFromCodebaseI <$> unsupportedStructuredArgument makeStandalone "a file name" codebaseLocation <*> handleBranchWithProject srcBranch <*> handleBranchWithOptionalProject destinationBranch args -> wrongArgsLength "three arguments" args } where diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 95dda95cf9..2d61f2b9ab 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -2291,6 +2291,7 @@ prettyShareError = ShareErrorDownloadEntities err -> prettyDownloadEntitiesError err ShareErrorGetCausalHashByPath err -> prettyGetCausalHashByPathError err ShareErrorPull err -> prettyPullError err + ShareErrorPullV2 err -> prettyPullV2Error err ShareErrorTransport err -> prettyTransportError err ShareErrorUploadEntities err -> prettyUploadEntitiesError err ShareExpectedSquashedHead -> "The server failed to provide a squashed branch head when requested. Please report this as a bug to the Unison team." diff --git a/unison-cli/src/Unison/Share/SyncV2.hs b/unison-cli/src/Unison/Share/SyncV2.hs index bbce0d95e6..14870f208d 100644 --- a/unison-cli/src/Unison/Share/SyncV2.hs +++ b/unison-cli/src/Unison/Share/SyncV2.hs @@ -5,6 +5,7 @@ module Unison.Share.SyncV2 ( syncFromFile, syncToFile, syncFromCodebase, + syncFromCodeserver, ) where @@ -21,24 +22,37 @@ import Data.Attoparsec.ByteString.Char8 qualified as A8 import Data.ByteString qualified as BS import Data.ByteString.Lazy qualified as BL import Data.Conduit.Attoparsec qualified as C -import Data.Conduit.List qualified as C +import Data.Conduit.Combinators qualified as C +import Data.Conduit.List qualified as CL import Data.Conduit.Zlib qualified as C +import Data.Foldable qualified as Foldable import Data.Graph qualified as Graph import Data.Map qualified as Map +import Data.Proxy import Data.Set qualified as Set import Data.Text.IO qualified as Text +import Data.Text.Lazy qualified as Text.Lazy +import Data.Text.Lazy.Encoding qualified as Text.Lazy +import Data.Vector (Vector) +import Data.Vector qualified as Vector +import Network.HTTP.Client qualified as Http.Client +import Network.HTTP.Types qualified as HTTP +import Servant.API qualified as Servant +import Servant.Client.Streaming qualified as Servant import Servant.Conduit () +import Servant.Types.SourceT qualified as Servant import System.Console.Regions qualified as Console.Regions import U.Codebase.HashTags (CausalHash) import U.Codebase.Sqlite.Queries qualified as Q import U.Codebase.Sqlite.TempEntity (TempEntity) import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle) +import Unison.Auth.HTTPClient qualified as Auth import Unison.Cli.Monad (Cli) import Unison.Cli.Monad qualified as Cli import Unison.Codebase qualified as Codebase -import Unison.Debug qualified as Debug import Unison.Hash32 (Hash32) import Unison.Prelude +import Unison.Share.API.Hash qualified as Share import Unison.Share.ExpectedHashMismatches (expectedCausalHashMismatches, expectedComponentHashMismatches) import Unison.Share.Sync.Types import Unison.Sqlite qualified as Sqlite @@ -47,7 +61,9 @@ import Unison.Sync.Common qualified as Sync import Unison.Sync.EntityValidation qualified as EV import Unison.Sync.Types qualified as Share import Unison.Sync.Types qualified as Sync -import Unison.SyncV2.Types (CBORBytes) +import Unison.SyncV2.API (Routes (downloadEntitiesStream)) +import Unison.SyncV2.API qualified as SyncV2 +import Unison.SyncV2.Types (CBORBytes, CBORStream, DependencyType (..)) import Unison.SyncV2.Types qualified as SyncV2 import Unison.Util.Servant.CBOR qualified as CBOR import Unison.Util.Timing qualified as Timing @@ -57,20 +73,123 @@ type Stream i o = ConduitT i o StreamM () type SyncErr = SyncError SyncV2.PullError +-- The base monad we use within the conduit pipeline. type StreamM = (ExceptT SyncErr (C.ResourceT IO)) +-- | The number of entities to process in a single transaction. +-- +-- SQLite transactions have some fixed overhead, so setting this too low can really slow things down, +-- but going too high here means we may be waiting on the network to get a full batch when we could be starting work. batchSize :: Int batchSize = 5000 ------------------------------------------------------------------------------------------------------------------------ --- Download entities +-- Main methods +------------------------------------------------------------------------------------------------------------------------ + +-- | Sync a given causal hash and its dependencies to a sync-file. +syncToFile :: + Codebase.Codebase IO v a -> + -- | Root hash to sync + CausalHash -> + -- | Optional name of the branch begin synced + Maybe SyncV2.BranchRef -> + -- | Location of the sync-file + FilePath -> + IO (Either SyncErr ()) +syncToFile codebase rootHash mayBranchRef destFilePath = do + liftIO $ Codebase.withConnection codebase \conn -> do + C.runResourceT $ + withCodebaseEntityStream conn rootHash mayBranchRef \mayTotal stream -> do + withStreamProgressCallback (Just mayTotal) \countC -> runExceptT do + C.runConduit $ + stream + C..| countC + C..| C.map (BL.toStrict . CBOR.serialise) + C..| C.transPipe liftIO C.gzip + C..| C.sinkFile destFilePath + +syncFromFile :: + -- | Whether to validate entities as they're imported. + Bool -> + -- | Location of the sync-file + FilePath -> + Cli (Either (SyncError SyncV2.PullError) CausalHash) +syncFromFile shouldValidate syncFilePath = do + Cli.Env {codebase} <- ask + -- Every insert into SQLite checks the temp entity tables, but syncv2 doesn't actually use them, so it's faster + -- if we clear them out before starting a sync. + Cli.runTransaction Q.clearTempEntityTables + runExceptT do + mapExceptT liftIO $ Timing.time "File Sync" $ do + header <- mapExceptT C.runResourceT $ do + let stream = C.sourceFile syncFilePath C..| C.ungzip C..| decodeUnframedEntities + (header, rest) <- initializeStream stream + streamIntoCodebase shouldValidate codebase header rest + pure header + afterSyncChecks codebase (SyncV2.rootCausalHash header) + pure . hash32ToCausalHash $ SyncV2.rootCausalHash header + +syncFromCodebase :: + Bool -> + -- | The codebase to sync from. + Sqlite.Connection -> + (Codebase.Codebase IO v a) -> + -- | The hash to sync. + CausalHash -> + IO (Either (SyncError SyncV2.PullError) ()) +syncFromCodebase shouldValidate srcConn destCodebase causalHash = do + -- Every insert into SQLite checks the temp entity tables, but syncv2 doesn't actually use them, so it's faster + -- if we clear them out before starting a sync. + Sqlite.runTransaction srcConn Q.clearTempEntityTables + liftIO . C.runResourceT . runExceptT $ withCodebaseEntityStream srcConn causalHash Nothing \_total entityStream -> do + (header, rest) <- initializeStream entityStream + streamIntoCodebase shouldValidate destCodebase header rest + mapExceptT liftIO (afterSyncChecks destCodebase (causalHashToHash32 causalHash)) -validateAndSave :: Bool -> (Codebase.Codebase IO v a) -> [(Hash32, TempEntity)] -> StreamM () +syncFromCodeserver :: + Bool -> + -- | The Unison Share URL. + Servant.BaseUrl -> + -- | The branch to download from. + SyncV2.BranchRef -> + -- | The hash to download. + Share.HashJWT -> + -- | Callback that's given a number of entities we just downloaded. + (Int -> IO ()) -> + Cli (Either (SyncError SyncV2.PullError) ()) +syncFromCodeserver shouldValidate unisonShareUrl branchRef hashJwt _downloadedCallback = do + Cli.Env {authHTTPClient, codebase} <- ask + -- Every insert into SQLite checks the temp entity tables, but syncv2 doesn't actually use them, so it's faster + -- if we clear them out before starting a sync. + Cli.runTransaction Q.clearTempEntityTables + runExceptT do + knownHashes <- ExceptT $ negotiateKnownCausals unisonShareUrl branchRef hashJwt + let hash = Share.hashJWTHash hashJwt + ExceptT $ do + (Cli.runTransaction (Q.entityLocation hash)) >>= \case + Just Q.EntityInMainStorage -> pure $ Right () + _ -> do + Timing.time "Entity Download" $ do + liftIO . C.runResourceT . runExceptT $ httpStreamEntities + authHTTPClient + unisonShareUrl + SyncV2.DownloadEntitiesRequest {branchRef, causalHash = hashJwt, knownHashes} + \header stream -> do + streamIntoCodebase shouldValidate codebase header stream + mapExceptT liftIO (afterSyncChecks codebase hash) + +------------------------------------------------------------------------------------------------------------------------ +-- Helpers +------------------------------------------------------------------------------------------------------------------------ + +-- | Validate that the provided entities match their expected hashes, and if so, save them to the codebase. +validateAndSave :: Bool -> (Codebase.Codebase IO v a) -> Vector (Hash32, TempEntity) -> StreamM () validateAndSave shouldValidate codebase entities = do let validateEntities = runExceptT $ when shouldValidate (batchValidateEntities entities) - -- Validation is slow, run it in parallel with insertion, but don't commit the transaction until we're done - -- validation. + -- Validation is slow, so we run it in parallel with insertion (which can also be slow), + -- but we don't commit the transaction until we're done validation to avoid inserting invalid entities. ExceptT . liftIO $ IO.withAsync validateEntities \validationTask -> do Timing.time "Inserting entities" $ Codebase.runTransactionExceptT codebase do for_ entities \(hash, entity) -> do @@ -79,6 +198,26 @@ validateAndSave shouldValidate codebase entities = do Left err -> throwError err Right _ -> pure () +-- | Validate that a batch of entities matches the hashes they're keyed by, throwing an error if any of them fail validation. +batchValidateEntities :: Vector (Hash32, TempEntity) -> ExceptT SyncErr IO () +batchValidateEntities entities = do + mismatches <- fmap Vector.catMaybes $ liftIO $ IO.pooledForConcurrently entities \(hash, entity) -> do + IO.evaluate $ EV.validateTempEntity hash entity + for_ mismatches \case + err@(Share.EntityHashMismatch et (Share.HashMismatchForEntity {supplied, computed})) -> + let expectedMismatches = case et of + Share.TermComponentType -> expectedComponentHashMismatches + Share.DeclComponentType -> expectedComponentHashMismatches + Share.CausalType -> expectedCausalHashMismatches + _ -> mempty + in case Map.lookup supplied expectedMismatches of + Just expected + | expected == computed -> pure () + _ -> do + throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err + err -> do + throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err + -- | Syncs a stream which could send entities in any order. syncUnsortedStream :: Bool -> @@ -86,26 +225,50 @@ syncUnsortedStream :: Stream () SyncV2.EntityChunk -> StreamM () syncUnsortedStream shouldValidate codebase stream = do - Debug.debugLogM Debug.Temp $ "Syncing unsorted stream" - allResults <- C.runConduit $ stream C..| C.sinkList - allEntities <- ExceptT $ Timing.time "Unpacking chunks" $ liftIO $ Codebase.runTransactionExceptT codebase $ do unpackChunks allResults + allEntities <- + C.runConduit $ + stream + C..| CL.chunksOf batchSize + C..| unpackChunks codebase + C..| validateBatch + C..| C.concat + C..| C.sinkVector @Vector let sortedEntities = sortDependencyFirst allEntities - validateAndSave shouldValidate codebase sortedEntities + liftIO $ withEntitySavingCallback (Just $ Vector.length allEntities) \countC -> do + Codebase.runTransaction codebase $ for_ sortedEntities \(hash, entity) -> do + r <- Q.saveTempEntityInMain v2HashHandle hash entity + Sqlite.unsafeIO $ countC 1 + pure r + where + validateBatch :: Stream (Vector (Hash32, TempEntity)) (Vector (Hash32, TempEntity)) + validateBatch = C.iterM \entities -> do + when shouldValidate (mapExceptT lift $ batchValidateEntities entities) -- | Syncs a stream which sends entities which are already sorted in dependency order. +-- This allows us to stream them directly into the codebase as they're received. syncSortedStream :: Bool -> (Codebase.Codebase IO v a) -> Stream () SyncV2.EntityChunk -> StreamM () syncSortedStream shouldValidate codebase stream = do - Debug.debugLogM Debug.Temp $ "Syncing sorted stream" - let handler :: Stream [SyncV2.EntityChunk] o - handler = C.mapM_C \chunkBatch -> do - entityBatch <- mapExceptT lift . ExceptT $ Codebase.runTransactionExceptT codebase do for chunkBatch unpackChunk - validateAndSave shouldValidate codebase (catMaybes entityBatch) - C.runConduit $ stream C..| C.chunksOf batchSize C..| handler + let handler :: Stream (Vector (Hash32, TempEntity)) o + handler = C.mapM_C \entityBatch -> do + validateAndSave shouldValidate codebase entityBatch + C.runConduit $ + stream + C..| CL.chunksOf batchSize + C..| unpackChunks codebase + C..| handler + +-- | Topologically sort entities based on their dependencies, returning a list in dependency-first order. +sortDependencyFirst :: (Foldable f, Functor f) => f (Hash32, TempEntity) -> [(Hash32, TempEntity)] +sortDependencyFirst entities = do + let adjList = entities <&> \(hash32, entity) -> ((hash32, entity), hash32, Set.toList $ Share.entityDependencies (tempEntityToEntity entity)) + (graph, vertexInfo, _vertexForKey) = Graph.graphFromEdges (Foldable.toList adjList) + in Graph.reverseTopSort graph <&> \v -> (view _1 $ vertexInfo v) +-- | Unpack a single entity chunk, returning the entity if it's not already in the codebase, Nothing otherwise. unpackChunk :: SyncV2.EntityChunk -> ExceptT SyncErr Sqlite.Transaction (Maybe (Hash32, TempEntity)) unpackChunk = \case SyncV2.EntityChunk {hash, entityCBOR = entityBytes} -> do @@ -121,33 +284,23 @@ unpackChunk = \case Left err -> do throwError $ (SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err) Right entity -> pure entity -unpackChunks :: [SyncV2.EntityChunk] -> ExceptT SyncErr Sqlite.Transaction [(Hash32, TempEntity)] -unpackChunks xs = do +unpackChunks :: Codebase.Codebase IO v a -> Stream [SyncV2.EntityChunk] (Vector (Hash32, TempEntity)) +unpackChunks codebase = C.mapM \xs -> ExceptT . lift . Codebase.runTransactionExceptT codebase $ do for xs unpackChunk <&> catMaybes + <&> Vector.fromList -batchValidateEntities :: [(Hash32, TempEntity)] -> ExceptT SyncErr IO () -batchValidateEntities entities = do - mismatches <- fmap catMaybes $ liftIO $ IO.pooledForConcurrently entities \(hash, entity) -> do - IO.evaluate $ EV.validateTempEntity hash entity - for_ mismatches \case - err@(Share.EntityHashMismatch et (Share.HashMismatchForEntity {supplied, computed})) -> - let expectedMismatches = case et of - Share.TermComponentType -> expectedComponentHashMismatches - Share.DeclComponentType -> expectedComponentHashMismatches - Share.CausalType -> expectedCausalHashMismatches - _ -> mempty - in case Map.lookup supplied expectedMismatches of - Just expected - | expected == computed -> pure () - _ -> do - throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err - err -> do - throwError . SyncError . SyncV2.PullError'DownloadEntities . SyncV2.DownloadEntitiesEntityValidationFailure $ err - -streamIntoCodebase :: Bool -> Codebase.Codebase IO v a -> SyncV2.StreamInitInfo -> Stream () SyncV2.EntityChunk -> StreamM () +-- | Stream entities from one codebase into another. +streamIntoCodebase :: + -- | Whether to validate entities as they're imported. + Bool -> + Codebase.Codebase IO v a -> + SyncV2.StreamInitInfo -> + Stream () SyncV2.EntityChunk -> + StreamM () streamIntoCodebase shouldValidate codebase SyncV2.StreamInitInfo {version, entitySorting, numEntities = numEntities} stream = ExceptT do withStreamProgressCallback (fromIntegral <$> numEntities) \countC -> runExceptT do + -- Add a counter to the stream to track how many entities we've processed. let stream' = stream C..| countC case version of (SyncV2.Version 1) -> pure () @@ -157,6 +310,7 @@ streamIntoCodebase shouldValidate codebase SyncV2.StreamInitInfo {version, entit SyncV2.DependenciesFirst -> syncSortedStream shouldValidate codebase stream' SyncV2.Unsorted -> syncUnsortedStream shouldValidate codebase stream' +-- | A sanity-check to verify that the hash we expected to import from the stream was successfully loaded into the codebase. afterSyncChecks :: Codebase.Codebase IO v a -> Hash32 -> ExceptT (SyncError SyncV2.PullError) IO () afterSyncChecks codebase hash = do lift (didCausalSuccessfullyImport codebase hash) >>= \case @@ -171,53 +325,16 @@ afterSyncChecks codebase hash = do let expectedHash = hash32ToCausalHash hash isJust <$> (Codebase.runTransaction codebase $ Q.loadCausalByCausalHash expectedHash) --- | Topologically sort entities based on their dependencies. -sortDependencyFirst :: [(Hash32, TempEntity)] -> [(Hash32, TempEntity)] -sortDependencyFirst entities = do - let adjList = entities <&> \(hash32, entity) -> ((hash32, entity), hash32, Set.toList $ Share.entityDependencies (tempEntityToEntity entity)) - (graph, vertexInfo, _vertexForKey) = Graph.graphFromEdges adjList - in Graph.reverseTopSort graph <&> \v -> (view _1 $ vertexInfo v) - -syncFromFile :: - Bool -> - -- | Location of the sync-file - FilePath -> - Cli (Either (SyncError SyncV2.PullError) CausalHash) -syncFromFile shouldValidate syncFilePath = do - Cli.Env {codebase} <- ask - runExceptT do - Debug.debugLogM Debug.Temp $ "Kicking off sync" - mapExceptT liftIO $ Timing.time "File Sync" $ do - header <- mapExceptT C.runResourceT $ do - let stream = C.sourceFile syncFilePath C..| C.ungzip C..| decodeUnframedEntities - (header, rest) <- initializeStream stream - streamIntoCodebase shouldValidate codebase header rest - pure header - afterSyncChecks codebase (SyncV2.rootCausalHash header) - pure . hash32ToCausalHash $ SyncV2.rootCausalHash header - -syncFromCodebase :: - Bool -> - -- | The codebase to sync from. - Sqlite.Connection -> - (Codebase.Codebase IO v a) -> - -- | The hash to sync. - CausalHash -> - IO (Either (SyncError SyncV2.PullError) ()) -syncFromCodebase shouldValidate srcConn destCodebase causalHash = do - liftIO . C.runResourceT . runExceptT $ withEntityStream srcConn causalHash Nothing \_total entityStream -> do - (header, rest) <- initializeStream entityStream - streamIntoCodebase shouldValidate destCodebase header rest - mapExceptT liftIO (afterSyncChecks destCodebase (causalHashToHash32 causalHash)) - -withEntityStream :: +-- | Load and stream entities for a given causal hash from a codebase into a stream. +withCodebaseEntityStream :: (MonadIO m) => Sqlite.Connection -> CausalHash -> Maybe SyncV2.BranchRef -> + -- | Callback to call with the total count of entities and the stream. (Int -> Stream () SyncV2.DownloadEntitiesChunk -> m r) -> m r -withEntityStream conn rootHash mayBranchRef callback = do +withCodebaseEntityStream conn rootHash mayBranchRef callback = do entities <- liftIO $ withEntityLoadingCallback $ \counter -> do Sqlite.runTransaction conn (depsForCausal rootHash counter) liftIO $ Text.hPutStrLn IO.stderr $ "Finished loading entities, writing sync-file." @@ -244,34 +361,21 @@ withEntityStream conn rootHash mayBranchRef callback = do & (initialChunk :) let stream = C.yieldMany contents callback totalEntities stream - -syncToFile :: - Codebase.Codebase IO v a -> - CausalHash -> - Maybe SyncV2.BranchRef -> - FilePath -> - IO (Either SyncErr ()) -syncToFile codebase rootHash mayBranchRef destFilePath = do - liftIO $ Codebase.withConnection codebase \conn -> do - C.runResourceT $ - withEntityStream conn rootHash mayBranchRef \mayTotal stream -> do - withStreamProgressCallback (Just mayTotal) \countC -> runExceptT do - C.runConduit $ stream C..| countC C..| C.map (BL.toStrict . CBOR.serialise) C..| C.transPipe liftIO C.gzip C..| C.sinkFile destFilePath - --- | Collect all dependencies of a given causal hash. -depsForCausal :: CausalHash -> (Int -> IO ()) -> Sqlite.Transaction (Map Hash32 (Sync.Entity Text Hash32 Hash32)) -depsForCausal causalHash counter = do - flip execStateT mempty $ expandEntities (causalHashToHash32 causalHash) where - expandEntities :: Hash32 -> ((StateT (Map Hash32 (Sync.Entity Text Hash32 Hash32)) Sqlite.Transaction)) () - expandEntities hash32 = do - gets (Map.member hash32) >>= \case - True -> pure () - False -> do - entity <- lift $ Sync.expectEntity hash32 - modify (Map.insert hash32 entity) - lift . Sqlite.unsafeIO $ counter 1 - traverseOf_ Sync.entityHashes_ expandEntities entity + -- Collect all dependencies of a given causal hash. + depsForCausal :: CausalHash -> (Int -> IO ()) -> Sqlite.Transaction (Map Hash32 (Sync.Entity Text Hash32 Hash32)) + depsForCausal causalHash counter = do + flip execStateT mempty $ expandEntities (causalHashToHash32 causalHash) + where + expandEntities :: Hash32 -> ((StateT (Map Hash32 (Sync.Entity Text Hash32 Hash32)) Sqlite.Transaction)) () + expandEntities hash32 = do + gets (Map.member hash32) >>= \case + True -> pure () + False -> do + entity <- lift $ Sync.expectEntity hash32 + modify (Map.insert hash32 entity) + lift . Sqlite.unsafeIO $ counter 1 + traverseOf_ Sync.entityHashes_ expandEntities entity -- | Gets the framed chunks from a NetString framed stream. _unNetString :: ConduitT ByteString ByteString StreamM () @@ -290,8 +394,8 @@ _decodeFramedEntity bs = do Left err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err Right chunk -> pure chunk --- Expects a stream of tightly-packed CBOR entities without any framing/separators. -decodeUnframedEntities :: ConduitT ByteString SyncV2.DownloadEntitiesChunk StreamM () +-- | Unpacks a stream of tightly-packed CBOR entities without any framing/separators. +decodeUnframedEntities :: forall a. (CBOR.Serialise a) => Stream ByteString a decodeUnframedEntities = C.transPipe (mapExceptT (lift . stToIO)) $ do C.await >>= \case Nothing -> pure () @@ -299,13 +403,13 @@ decodeUnframedEntities = C.transPipe (mapExceptT (lift . stToIO)) $ do d <- newDecoder loop bs d where - newDecoder :: ConduitT ByteString SyncV2.DownloadEntitiesChunk (ExceptT SyncErr (ST s)) (Maybe ByteString -> ST s (CBOR.IDecode s (SyncV2.DownloadEntitiesChunk))) + newDecoder :: ConduitT ByteString a (ExceptT SyncErr (ST s)) (Maybe ByteString -> ST s (CBOR.IDecode s a)) newDecoder = do (lift . lift) CBOR.deserialiseIncremental >>= \case CBOR.Done _ _ _ -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorStreamFailure "Invalid initial decoder" CBOR.Fail _ _ err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err CBOR.Partial k -> pure k - loop :: ByteString -> (Maybe ByteString -> ST s (CBOR.IDecode s (SyncV2.DownloadEntitiesChunk))) -> ConduitT ByteString SyncV2.DownloadEntitiesChunk (ExceptT SyncErr (ST s)) () + loop :: ByteString -> (Maybe ByteString -> ST s (CBOR.IDecode s a)) -> ConduitT ByteString a (ExceptT SyncErr (ST s)) () loop bs k = do (lift . lift) (k (Just bs)) >>= \case CBOR.Fail _ _ err -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorDeserializationFailure err @@ -338,11 +442,80 @@ decodeUnframedEntities = C.transPipe (mapExceptT (lift . stToIO)) $ do k <- newDecoder loop rem k --- | Peel the header off the stream and parse the remaining entity chunks. +------------------------------------------------------------------------------------------------------------------------ +-- Servant stuff + +type SyncAPI = ("ucm" Servant.:> "v2" Servant.:> "sync" Servant.:> SyncV2.API) + +syncAPI :: Proxy SyncAPI +syncAPI = Proxy @SyncAPI + +downloadEntitiesStreamClientM :: SyncV2.DownloadEntitiesRequest -> Servant.ClientM (Servant.SourceT IO (CBORStream SyncV2.DownloadEntitiesChunk)) +causalDependenciesStreamClientM :: SyncV2.CausalDependenciesRequest -> Servant.ClientM (Servant.SourceT IO (CBORStream SyncV2.CausalDependenciesChunk)) +SyncV2.Routes + { downloadEntitiesStream = downloadEntitiesStreamClientM, + causalDependenciesStream = causalDependenciesStreamClientM + } = Servant.client syncAPI + +-- | Helper for running clientM that returns a stream of entities. +-- You MUST consume the stream within the callback, it will be closed when the callback returns. +withConduit :: forall r chunk. (CBOR.Serialise chunk) => Servant.ClientEnv -> (Stream () chunk -> StreamM r) -> Servant.ClientM (Servant.SourceIO (CBORStream chunk)) -> StreamM r +withConduit clientEnv callback clientM = do + ExceptT $ withRunInIO \runInIO -> do + Servant.withClientM clientM clientEnv $ \case + Left err -> pure . Left . TransportError $ (handleClientError clientEnv err) + Right sourceT -> do + conduit <- liftIO $ Servant.fromSourceIO sourceT + (runInIO . runExceptT $ callback (conduit C..| unpackCBORBytesStream)) + +unpackCBORBytesStream :: (CBOR.Serialise a) => Stream (CBORStream a) a +unpackCBORBytesStream = + C.map (BL.toStrict . coerce @_ @BL.ByteString) C..| decodeUnframedEntities + +handleClientError :: Servant.ClientEnv -> Servant.ClientError -> CodeserverTransportError +handleClientError clientEnv err = + case err of + Servant.FailureResponse _req resp -> + case HTTP.statusCode $ Servant.responseStatusCode resp of + 401 -> Unauthenticated (Servant.baseUrl clientEnv) + -- The server should provide semantically relevant permission-denied messages + -- when possible, but this should catch any we miss. + 403 -> PermissionDenied (Text.Lazy.toStrict . Text.Lazy.decodeUtf8 $ Servant.responseBody resp) + 408 -> Timeout + 429 -> RateLimitExceeded + 504 -> Timeout + _ -> UnexpectedResponse resp + Servant.DecodeFailure msg resp -> DecodeFailure msg resp + Servant.UnsupportedContentType _ct resp -> UnexpectedResponse resp + Servant.InvalidContentTypeHeader resp -> UnexpectedResponse resp + Servant.ConnectionError _ -> UnreachableCodeserver (Servant.baseUrl clientEnv) + +-- | Stream entities from the codeserver. +httpStreamEntities :: + Auth.AuthenticatedHttpClient -> + Servant.BaseUrl -> + SyncV2.DownloadEntitiesRequest -> + (SyncV2.StreamInitInfo -> Stream () SyncV2.EntityChunk -> StreamM ()) -> + StreamM () +httpStreamEntities (Auth.AuthenticatedHttpClient httpClient) unisonShareUrl req callback = do + let clientEnv = + (Servant.mkClientEnv httpClient unisonShareUrl) + { Servant.makeClientRequest = \url request -> + -- Disable client-side timeouts + (Servant.defaultMakeClientRequest url request) + <&> \r -> + r + { Http.Client.responseTimeout = Http.Client.responseTimeoutNone + } + } + (downloadEntitiesStreamClientM req) & withConduit clientEnv \stream -> do + (init, entityStream) <- initializeStream stream + callback init entityStream + +-- | Peel the header off the stream and parse the remaining entity chunks into EntityChunks initializeStream :: Stream () SyncV2.DownloadEntitiesChunk -> StreamM (SyncV2.StreamInitInfo, Stream () SyncV2.EntityChunk) initializeStream stream = do (streamRemainder, init) <- stream C.$$+ C.headC - Debug.debugM Debug.Temp "Got initial chunk: " init case init of Nothing -> throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorMissingInitialChunk Just chunk -> do @@ -351,7 +524,6 @@ initializeStream stream = do let entityStream = C.unsealConduitT streamRemainder C..| C.mapM parseEntity pure $ (info, entityStream) SyncV2.EntityC _ -> do - Debug.debugLogM Debug.Temp $ "Got unexpected entity chunk" throwError . SyncError . SyncV2.PullError'Sync $ SyncV2.SyncErrorMissingInitialChunk SyncV2.ErrorC (SyncV2.ErrorChunk err) -> throwError . SyncError . SyncV2.PullError'DownloadEntities $ err where @@ -361,35 +533,112 @@ initializeStream stream = do SyncV2.ErrorC (SyncV2.ErrorChunk err) -> throwError . SyncError $ SyncV2.PullError'DownloadEntities err SyncV2.InitialC {} -> throwError . SyncError $ SyncV2.PullError'Sync SyncV2.SyncErrorMisplacedInitialChunk --- Provide the given action a callback that display to the terminal. -withStreamProgressCallback :: (MonadIO m, MonadUnliftIO n) => Maybe Int -> (ConduitT i i m () -> n a) -> n a -withStreamProgressCallback total action = do - entitiesDownloadedVar <- IO.newTVarIO (0 :: Int) - IO.withRunInIO \toIO -> do - Console.Regions.displayConsoleRegions do - Console.Regions.withConsoleRegion Console.Regions.Linear \region -> do - Console.Regions.setConsoleRegion region do - entitiesDownloaded <- IO.readTVar entitiesDownloadedVar - pure $ - "\n Processed " - <> tShow entitiesDownloaded - <> maybe "" (\total -> " / " <> tShow total) total - <> " entities...\n\n" - toIO $ action $ C.awaitForever \i -> do - liftIO $ IO.atomically (IO.modifyTVar' entitiesDownloadedVar (+ 1)) - C.yield i +------------------------------------------------------------------------------------------------------------------------ +-- Causal Dependency negotiation +------------------------------------------------------------------------------------------------------------------------ -withEntityLoadingCallback :: (MonadUnliftIO m) => ((Int -> m ()) -> m a) -> m a -withEntityLoadingCallback action = do +httpStreamCausalDependencies :: + forall r. + Auth.AuthenticatedHttpClient -> + Servant.BaseUrl -> + SyncV2.CausalDependenciesRequest -> + (Stream () SyncV2.CausalDependenciesChunk -> StreamM r) -> + StreamM r +httpStreamCausalDependencies (Auth.AuthenticatedHttpClient httpClient) unisonShareUrl req callback = do + let clientEnv = + (Servant.mkClientEnv httpClient unisonShareUrl) + { Servant.makeClientRequest = \url request -> + -- Disable client-side timeouts + (Servant.defaultMakeClientRequest url request) + <&> \r -> + r + { Http.Client.responseTimeout = Http.Client.responseTimeoutNone + } + } + (causalDependenciesStreamClientM req) & withConduit clientEnv callback + +-- | Ask Share for the dependencies of a given hash jwt, +-- then filter them to get the set of causals which we have and don't need sent. +negotiateKnownCausals :: + -- | The Unison Share URL. + Servant.BaseUrl -> + -- | The branch to download from. + SyncV2.BranchRef -> + -- | The hash to download. + Share.HashJWT -> + Cli (Either (SyncError SyncV2.PullError) (Set Hash32)) +negotiateKnownCausals unisonShareUrl branchRef hashJwt = do + Cli.Env {authHTTPClient, codebase} <- ask + liftIO $ Text.hPutStrLn IO.stderr $ " 🔎 Identifying missing entities..." + Timing.time "Causal Negotiation" $ do + liftIO . C.runResourceT . runExceptT $ httpStreamCausalDependencies + authHTTPClient + unisonShareUrl + SyncV2.CausalDependenciesRequest {branchRef, rootCausal = hashJwt} + \stream -> do + Set.fromList <$> C.runConduit (stream C..| C.map unpack C..| findKnownDeps codebase C..| C.sinkList) + where + -- Go through the dependencies of the remote root from top-down, yielding all causal hashes that we already + -- have until we find one in the causal spine we already have, then yield that one and stop since we'll implicitly + -- have all of its dependencies. + findKnownDeps :: Codebase.Codebase IO v a -> Stream (Hash32, DependencyType) Hash32 + findKnownDeps codebase = do + C.await >>= \case + Just (hash, LibDependency) -> do + -- We yield all lib dependencies we have, it's possible we don't have any of the causal spine in common, but _do_ have + -- some of the libraries we can still save a lot of work. + whenM (lift $ haveCausalHash codebase hash) (C.yield hash) + -- We continue regardless. + findKnownDeps codebase + Just (hash, CausalSpineDependency) -> do + lift (haveCausalHash codebase hash) >>= \case + True -> do + -- If we find a causal hash we have in the spine, we don't need to look further, + -- we can pass it on, then hang up the stream. + C.yield hash + False -> do + -- Otherwise we keep looking, maybe we'll have one further in. + findKnownDeps codebase + Nothing -> pure () + unpack :: SyncV2.CausalDependenciesChunk -> (Hash32, DependencyType) + unpack = \case + SyncV2.CausalHashDepC {causalHash, dependencyType} -> (causalHash, dependencyType) + haveCausalHash :: Codebase.Codebase IO v a -> Hash32 -> StreamM Bool + haveCausalHash codebase causalHash = do + liftIO $ Codebase.runTransaction codebase do + Q.causalExistsByHash32 causalHash + +------------------------------------------------------------------------------------------------------------------------ +-- Progress Tracking +------------------------------------------------------------------------------------------------------------------------ + +counterProgress :: (MonadIO m, MonadUnliftIO n) => (Int -> Text) -> ((Int -> m ()) -> n a) -> n a +counterProgress msgBuilder action = do counterVar <- IO.newTVarIO (0 :: Int) IO.withRunInIO \toIO -> do Console.Regions.displayConsoleRegions do Console.Regions.withConsoleRegion Console.Regions.Linear \region -> do Console.Regions.setConsoleRegion region do - processed <- IO.readTVar counterVar - pure $ - "\n Loading " - <> tShow processed - <> " entities...\n\n" + num <- IO.readTVar counterVar + pure $ msgBuilder num toIO $ action $ \i -> do liftIO $ IO.atomically (IO.modifyTVar' counterVar (+ i)) + +-- | Track how many entities have been downloaded using a counter stream. +withStreamProgressCallback :: (MonadIO m, MonadUnliftIO n) => Maybe Int -> (ConduitT i i m () -> n a) -> n a +withStreamProgressCallback total action = do + let msg n = "\n 📦 Unpacked " <> tShow n <> maybe "" (\total -> " / " <> tShow total) total <> " entities...\n\n" + let action' f = action (C.iterM \_i -> f 1) + counterProgress msg action' + +-- | Track how many entities have been saved. +withEntitySavingCallback :: (MonadUnliftIO m) => Maybe Int -> ((Int -> m ()) -> m a) -> m a +withEntitySavingCallback total action = do + let msg n = "\n 💾 Saved " <> tShow n <> maybe "" (\total -> " / " <> tShow total) total <> " new entities...\n\n" + counterProgress msg action + +-- | Track how many entities have been loaded. +withEntityLoadingCallback :: (MonadUnliftIO m) => ((Int -> m ()) -> m a) -> m a +withEntityLoadingCallback action = do + let msg n = "\n 📦 Unpacked " <> tShow n <> " entities...\n\n" + counterProgress msg action diff --git a/unison-core/src/Unison/Util/Defn.hs b/unison-core/src/Unison/Util/Defn.hs index d897491de4..26a0fdd222 100644 --- a/unison-core/src/Unison/Util/Defn.hs +++ b/unison-core/src/Unison/Util/Defn.hs @@ -3,7 +3,28 @@ module Unison.Util.Defn ) where +import Data.Bifoldable (Bifoldable (..)) +import Data.Bifunctor (Bifunctor (..)) +import Data.Bitraversable (Bitraversable (..)) +import GHC.Generics (Generic) + -- | A "definition" is either a term or a type. data Defn term typ = TermDefn term | TypeDefn typ + deriving stock (Generic, Functor, Foldable, Traversable, Show, Eq, Ord) + +instance Bifunctor Defn where + bimap f g = \case + TermDefn x -> TermDefn (f x) + TypeDefn y -> TypeDefn (g y) + +instance Bifoldable Defn where + bifoldMap f g = \case + TermDefn x -> f x + TypeDefn y -> g y + +instance Bitraversable Defn where + bitraverse f g = \case + TermDefn x -> TermDefn <$> f x + TypeDefn y -> TypeDefn <$> g y diff --git a/unison-core/src/Unison/Util/Defns.hs b/unison-core/src/Unison/Util/Defns.hs index fed00742b4..cacaac8d1f 100644 --- a/unison-core/src/Unison/Util/Defns.hs +++ b/unison-core/src/Unison/Util/Defns.hs @@ -4,6 +4,8 @@ module Unison.Util.Defns DefnsF2, DefnsF3, DefnsF4, + terms_, + types_, alignDefnsWith, defnsAreEmpty, fromTerms, @@ -19,6 +21,7 @@ module Unison.Util.Defns ) where +import Control.Lens (Lens) import Data.Align (Semialign, alignWith) import Data.Bifoldable (Bifoldable, bifoldMap) import Data.Bitraversable (Bitraversable, bitraverse) @@ -31,7 +34,7 @@ data Defns terms types = Defns { terms :: terms, types :: types } - deriving stock (Generic, Functor, Show) + deriving stock (Generic, Functor, Show, Eq, Ord) deriving (Monoid, Semigroup) via GenericSemigroupMonoid (Defns terms types) instance Bifoldable Defns where @@ -46,6 +49,12 @@ instance Bitraversable Defns where bitraverse f g (Defns x y) = Defns <$> f x <*> g y +terms_ :: Lens (Defns terms types) (Defns terms' types) terms terms' +terms_ f (Defns x y) = (\x' -> Defns x' y) <$> f x + +types_ :: Lens (Defns terms types) (Defns terms types') types types' +types_ f (Defns x y) = (\y' -> Defns x y') <$> f y + -- | A common shape of definitions - terms and types are stored in the same structure. type DefnsF f terms types = Defns (f terms) (f types) diff --git a/unison-core/src/Unison/Util/Nametree.hs b/unison-core/src/Unison/Util/Nametree.hs index e87bdde344..50ae6d1510 100644 --- a/unison-core/src/Unison/Util/Nametree.hs +++ b/unison-core/src/Unison/Util/Nametree.hs @@ -3,6 +3,7 @@ module Unison.Util.Nametree Nametree (..), traverseNametreeWithName, unfoldNametree, + unionWith, -- ** Flattening and unflattening flattenNametree, @@ -33,6 +34,16 @@ data Nametree a = Nametree } deriving stock (Functor, Foldable, Traversable, Generic, Show) +unionWith :: (a -> a -> a) -> Nametree a -> Nametree a -> Nametree a +unionWith f (Nametree x xs) (Nametree y ys) = + Nametree (f x y) (Map.unionWith (unionWith f) xs ys) + +instance (Semigroup a) => Semigroup (Nametree a) where + (<>) = unionWith (<>) + +instance (Monoid a) => Monoid (Nametree a) where + mempty = Nametree mempty mempty + instance Semialign Nametree where alignWith :: (These a b -> c) -> Nametree a -> Nametree b -> Nametree c alignWith f (Nametree x xs) (Nametree y ys) = diff --git a/unison-merge/package.yaml b/unison-merge/package.yaml index 53b339cf9f..1ef15a7954 100644 --- a/unison-merge/package.yaml +++ b/unison-merge/package.yaml @@ -7,6 +7,7 @@ ghc-options: -Wall dependencies: - base - containers + - either - lens - mtl - nonempty-containers diff --git a/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs b/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs index 697e693d6b..97fe824740 100644 --- a/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs +++ b/unison-merge/src/Unison/Merge/DeclCoherencyCheck.hs @@ -132,7 +132,7 @@ data IncoherentDeclReason -- Foo.Bar#Foo IncoherentDeclReason'NestedDeclAlias !Name !Name -- shorter name, longer name | IncoherentDeclReason'StrayConstructor !TypeReferenceId !Name - deriving stock (Show) + deriving stock (Eq, Show) checkDeclCoherency :: (HasCallStack) => diff --git a/unison-merge/src/Unison/Merge/Diff.hs b/unison-merge/src/Unison/Merge/Diff.hs index 39be392c28..37625c66c7 100644 --- a/unison-merge/src/Unison/Merge/Diff.hs +++ b/unison-merge/src/Unison/Merge/Diff.hs @@ -1,12 +1,18 @@ module Unison.Merge.Diff ( nameBasedNamespaceDiff, + humanizeDiffs, ) where +import Data.Either.Combinators (mapRight) +import Data.List.NonEmpty qualified as NEL +import Data.List.NonEmpty qualified as NEList import Data.Map.Strict qualified as Map -import Data.Semialign (alignWith) +import Data.Semialign (Unalign (..), alignWith) import Data.Set qualified as Set +import Data.Set.NonEmpty qualified as NESet import Data.These (These (..)) +import Data.Zip qualified as Zip import U.Codebase.Reference (TypeReference) import Unison.ConstructorReference (GConstructorReference (..)) import Unison.DataDeclaration (Decl) @@ -14,20 +20,24 @@ import Unison.DataDeclaration qualified as DataDeclaration import Unison.DeclNameLookup (DeclNameLookup) import Unison.DeclNameLookup qualified as DeclNameLookup import Unison.Hash (Hash (Hash)) -import Unison.HashQualifiedPrime qualified as HQ' import Unison.Merge.DiffOp (DiffOp (..)) +import Unison.Merge.HumanDiffOp (HumanDiffOp (..)) import Unison.Merge.PartialDeclNameLookup (PartialDeclNameLookup (..)) import Unison.Merge.Synhash qualified as Synhash import Unison.Merge.Synhashed (Synhashed (..)) +import Unison.Merge.Synhashed qualified as Synhashed import Unison.Merge.ThreeWay (ThreeWay (..)) import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Merge.TwoWay (TwoWay (..)) import Unison.Merge.Updated (Updated (..)) import Unison.Name (Name) +import Unison.Names (Names) +import Unison.Names qualified as Names import Unison.Parser.Ann (Ann) import Unison.Prelude hiding (catMaybes) import Unison.PrettyPrintEnv (PrettyPrintEnv (..)) -import Unison.PrettyPrintEnv qualified as Ppe +import Unison.PrettyPrintEnv qualified as PPE +import Unison.PrettyPrintEnvDecl qualified as PPED import Unison.Reference (Reference' (..), TermReference, TermReferenceId, TypeReferenceId) import Unison.Referent (Referent) import Unison.Referent qualified as Referent @@ -36,7 +46,10 @@ import Unison.Syntax.Name qualified as Name import Unison.Term (Term) import Unison.Util.BiMultimap (BiMultimap) import Unison.Util.BiMultimap qualified as BiMultimap -import Unison.Util.Defns (Defns (..), DefnsF2, DefnsF3, zipDefnsWith) +import Unison.Util.Defns (Defns (..), DefnsF, DefnsF2, DefnsF3, zipDefnsWith) +import Unison.Util.Defns qualified as Defns +import Unison.Util.Relation (Relation) +import Unison.Util.Relation qualified as Rel -- | @nameBasedNamespaceDiff db declNameLookups defns@ returns Alice's and Bob's name-based namespace diffs, each in the -- form: @@ -50,40 +63,131 @@ nameBasedNamespaceDiff :: (HasCallStack) => TwoWay DeclNameLookup -> PartialDeclNameLookup -> + ThreeWay PPED.PrettyPrintEnvDecl -> ThreeWay (Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name)) -> Defns (Map TermReferenceId (Term Symbol Ann)) (Map TypeReferenceId (Decl Symbol Ann)) -> - TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference) -nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup defns hydratedDefns = - let lcaHashes = synhashLcaDefns ppe lcaDeclNameLookup defns.lca hydratedDefns - hashes = synhashDefns ppe hydratedDefns <$> declNameLookups <*> ThreeWay.forgetLca defns - in diffHashedNamespaceDefns lcaHashes <$> hashes + ( -- Core diffs, i.e. adds, deletes, and updates which have different synhashes. + TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + -- Propagated updates, i.e. updates which have the same synhash but different Unison hashes. + TwoWay (DefnsF2 (Map Name) Updated Referent TypeReference) + ) +nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup ppeds defns hydratedDefns = + let lcaHashes = synhashLcaDefns synhashPPE lcaDeclNameLookup defns.lca hydratedDefns + aliceHashes = synhashDefns synhashPPE hydratedDefns declNameLookups.alice defns.alice + bobHashes = synhashDefns synhashPPE hydratedDefns declNameLookups.bob defns.bob + in (diffHashedNamespaceDefns lcaHashes <$> TwoWay {alice = aliceHashes, bob = bobHashes}) + & Zip.unzip where - ppe :: PrettyPrintEnv - ppe = - -- The order between Alice and Bob isn't important here for syntactic hashing; not sure right now if it matters - -- that the LCA is added last - deepNamespaceDefinitionsToPpe defns.alice - `Ppe.addFallback` deepNamespaceDefinitionsToPpe defns.bob - `Ppe.addFallback` deepNamespaceDefinitionsToPpe defns.lca + synhashPPE :: PPE.PrettyPrintEnv + synhashPPE = + let ThreeWay {lca = lcaPPE, alice = alicePPE, bob = bobPPE} = PPED.unsuffixifiedPPE <$> ppeds + in alicePPE `PPE.addFallback` bobPPE `PPE.addFallback` lcaPPE diffHashedNamespaceDefns :: DefnsF2 (Map Name) Synhashed term typ -> DefnsF2 (Map Name) Synhashed term typ -> - DefnsF3 (Map Name) DiffOp Synhashed term typ -diffHashedNamespaceDefns = - zipDefnsWith f f + ( -- Core diffs, i.e. adds, deletes, and updates which have different synhashes. + DefnsF3 (Map Name) DiffOp Synhashed term typ, + -- Propagated updates, i.e. updates which have the same synhash but different Unison hashes. + DefnsF2 (Map Name) Updated term typ + ) +diffHashedNamespaceDefns d1 d2 = + zipDefnsWith f f d1 d2 + & splitPropagated where - f :: Map Name (Synhashed ref) -> Map Name (Synhashed ref) -> Map Name (DiffOp (Synhashed ref)) + f :: + Map Name (Synhashed ref) -> + Map Name (Synhashed ref) -> + (Map Name (DiffOp (Synhashed ref)), Map Name (Updated ref)) f old new = - Map.mapMaybe id (alignWith g old new) + unalign (eitherToThese . mapRight (fmap Synhashed.value) <$> alignWith g old new) - g :: (Eq x) => These x x -> Maybe (DiffOp x) + g :: (Eq x) => These x x -> Either (DiffOp x) (Updated x) g = \case - This old -> Just (DiffOp'Delete old) - That new -> Just (DiffOp'Add new) + This old -> Left (DiffOp'Delete old) + That new -> Left (DiffOp'Add new) These old new - | old == new -> Nothing - | otherwise -> Just (DiffOp'Update Updated {old, new}) + | old == new -> Right Updated {old, new} + | otherwise -> Left (DiffOp'Update Updated {old, new}) + + splitPropagated :: + Defns + ( Map Name (DiffOp (Synhashed term)), + Map Name (Updated term) + ) + (Map Name (DiffOp (Synhashed typ)), Map Name (Updated typ)) -> + (DefnsF3 (Map Name) DiffOp Synhashed term typ, DefnsF2 (Map Name) Updated term typ) + splitPropagated Defns {terms, types} = + (Defns {terms = fst terms, types = fst types}, Defns {terms = snd terms, types = snd types}) + +-- | Post-process a diff to identify relationships humans might care about, such as whether a given addition could be +-- interpreted as an alias of an existing definition, or whether an add and deletion could be a rename. +humanizeDiffs :: + ThreeWay Names -> + TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference) -> + TwoWay (DefnsF2 (Map Name) Updated Referent TypeReference) -> + TwoWay (DefnsF2 (Map Name) HumanDiffOp Referent TypeReference) +humanizeDiffs names3 diffs propagatedUpdates = + zipWithF3 nameRelations diffs propagatedUpdates \relation diffOps propagatedUpdates -> + Defns.zipDefnsWith4 computeHumanDiffOp computeHumanDiffOp lcaRelation relation diffOps propagatedUpdates + where + zipWithF3 :: (Zip.Zip f) => f a -> f b -> f c -> (a -> b -> c -> d) -> f d + zipWithF3 a b c f = Zip.zipWith (\(x, y) z -> f x y z) (Zip.zip a b) c + + namesToRelations :: Names -> (DefnsF (Relation Name) Referent TypeReference) + namesToRelations names = Defns {terms = Names.terms names, types = Names.types names} + + lcaRelation :: DefnsF (Relation Name) Referent TypeReference + lcaRelation = namesToRelations names3.lca + + nameRelations :: TwoWay (DefnsF (Relation Name) Referent TypeReference) + nameRelations = namesToRelations <$> ThreeWay.forgetLca names3 + + computeHumanDiffOp :: + forall ref. + (Show ref, Ord ref) => + Relation Name ref -> + Relation Name ref -> + Map Name (DiffOp (Synhashed ref)) -> + Map Name (Updated ref) -> + Map Name (HumanDiffOp ref) + computeHumanDiffOp oldRelation newRelation diffs propagatedUpdates = alignWith go diffs propagatedUpdates + where + go :: These (DiffOp (Synhashed ref)) (Updated ref) -> (HumanDiffOp ref) + go = \case + This diff -> humanizeDiffOp (Synhashed.value <$> diff) + That updated -> HumanDiffOp'PropagatedUpdate updated + These diff updated -> error (reportBug "E488729" ("The impossible happened, an update in merge was detected as both a propagated AND core update " ++ show diff ++ " and " ++ show updated)) + + humanizeDiffOp :: DiffOp ref -> HumanDiffOp ref + humanizeDiffOp = \case + DiffOp'Add ref -> + -- This name is newly added. We need to check if it's a new definition, an alias, or a rename. + case Set.toList (Rel.lookupRan ref oldRelation) of + -- No old names for this ref, so it's a new addition not an alias + [] -> HumanDiffOp'Add ref + -- There are old names for this ref, but not old refs for this name, so it's + -- either a new alias or a rename. + -- + -- If at least one old name for this ref no longer exists, we treat it like a + -- rename. + (n : ns) -> do + let existingNames = NESet.fromList (n NEList.:| ns) + case NESet.nonEmptySet (Rel.lookupRan ref newRelation) of + Nothing -> error (reportBug "E458329" ("Expected to find at least one name for ref in new namespace, since we found the ref by the name.")) + Just allNewNames -> + case NESet.nonEmptySet (NESet.difference existingNames allNewNames) of + -- If all the old names still exist in the new namespace, it's a new alias. + Nothing -> HumanDiffOp'AliasOf ref existingNames + -- Otherwise, treat it as a rename. + Just namesWhichDisappeared -> + HumanDiffOp'RenamedFrom ref namesWhichDisappeared + DiffOp'Delete ref -> + case NEL.nonEmpty $ Set.toList (Rel.lookupRan ref newRelation) of + -- No names for this ref, it was removed. + Nothing -> HumanDiffOp'Delete ref + Just newNames -> HumanDiffOp'RenamedTo ref (NESet.fromList newNames) + DiffOp'Update Updated {old, new} -> HumanDiffOp'Update Updated {old, new} ------------------------------------------------------------------------------------------------------------------------ -- Syntactic hashing @@ -183,19 +287,6 @@ synhashDefnsWith hashTerm hashType = do hashType1 name typ = Synhashed (hashType name typ) typ ------------------------------------------------------------------------------------------------------------------------- --- Pretty-print env helpers - -deepNamespaceDefinitionsToPpe :: Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name) -> PrettyPrintEnv -deepNamespaceDefinitionsToPpe Defns {terms, types} = - PrettyPrintEnv (arbitraryName terms) (arbitraryName types) - where - arbitraryName :: (Ord ref) => BiMultimap ref Name -> ref -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)] - arbitraryName names ref = - BiMultimap.lookupDom ref names - & Set.lookupMin - & maybe [] \name -> [(HQ'.NameOnly name, HQ'.NameOnly name)] - ------------------------------------------------------------------------------------------------------------------------ -- Looking up terms and decls that we expect to be there diff --git a/unison-merge/src/Unison/Merge/DiffOp.hs b/unison-merge/src/Unison/Merge/DiffOp.hs index db980f480b..9a17b3031f 100644 --- a/unison-merge/src/Unison/Merge/DiffOp.hs +++ b/unison-merge/src/Unison/Merge/DiffOp.hs @@ -14,4 +14,4 @@ data DiffOp a = DiffOp'Add !a | DiffOp'Delete !a | DiffOp'Update !(Updated a) - deriving stock (Functor, Show) + deriving stock (Foldable, Functor, Show, Traversable) diff --git a/unison-merge/src/Unison/Merge/HumanDiffOp.hs b/unison-merge/src/Unison/Merge/HumanDiffOp.hs new file mode 100644 index 0000000000..1a4c5e4299 --- /dev/null +++ b/unison-merge/src/Unison/Merge/HumanDiffOp.hs @@ -0,0 +1,28 @@ +module Unison.Merge.HumanDiffOp + ( HumanDiffOp (..), + ) +where + +import Data.Set.NonEmpty (NESet) +import Unison.Merge.Updated (Updated) +import Unison.Name (Name) + +-- | A diff operation is one of: +-- +-- * An add (where nothing was) +-- * A delete (of the thing that was) +-- * An update (from old to new) +-- * A propagated update (from old to new) +-- * An alias of some definition(s) on the other side +-- * A rename from some definition(s) on the other side +data HumanDiffOp ref + = HumanDiffOp'Add !ref + | HumanDiffOp'Delete !ref + | HumanDiffOp'Update !(Updated ref) + | HumanDiffOp'PropagatedUpdate !(Updated ref) + | HumanDiffOp'AliasOf !ref !(NESet Name {- existing names -}) + | -- The definition at this location was renamed from the given set of names to the current place + HumanDiffOp'RenamedFrom !ref !(NESet Name) + | -- The definition at this location was renamed to the given set of names from the current place + HumanDiffOp'RenamedTo !ref !(NESet Name) + deriving stock (Show) diff --git a/unison-merge/src/Unison/Merge/Internal/Types.hs b/unison-merge/src/Unison/Merge/Internal/Types.hs new file mode 100644 index 0000000000..d1c29ebb01 --- /dev/null +++ b/unison-merge/src/Unison/Merge/Internal/Types.hs @@ -0,0 +1,60 @@ +-- | Internal types module to house types that would require mutual recursion at the module level if defined separately +module Unison.Merge.Internal.Types + ( ThreeWay (..), + TwoOrThreeWay (..), + ) +where + +import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith)) +import Data.These (These (..)) +import Unison.Prelude + +data ThreeWay a = ThreeWay + { lca :: !a, + alice :: !a, + bob :: !a + } + deriving stock (Foldable, Functor, Generic, Traversable) + +instance Applicative ThreeWay where + pure :: a -> ThreeWay a + pure x = + ThreeWay x x x + + (<*>) :: ThreeWay (a -> b) -> ThreeWay a -> ThreeWay b + ThreeWay f g h <*> ThreeWay x y z = + ThreeWay (f x) (g y) (h z) + +instance Semialign ThreeWay where + alignWith :: (These a b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c + alignWith f (ThreeWay a b c) (ThreeWay x y z) = + ThreeWay (f (These a x)) (f (These b y)) (f (These c z)) + +instance Unzip ThreeWay where + unzipWith :: (c -> (a, b)) -> ThreeWay c -> (ThreeWay a, ThreeWay b) + unzipWith f (ThreeWay a b c) = + let (i, x) = f a + (j, y) = f b + (k, z) = f c + in (ThreeWay i j k, ThreeWay x y z) + +instance Zip ThreeWay where + zipWith :: (a -> b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c + zipWith f (ThreeWay a b c) (ThreeWay x y z) = + ThreeWay (f a x) (f b y) (f c z) + +data TwoOrThreeWay a = TwoOrThreeWay + { lca :: Maybe a, + alice :: a, + bob :: a + } + deriving stock (Foldable, Functor, Generic, Traversable) + +instance Applicative TwoOrThreeWay where + pure :: a -> TwoOrThreeWay a + pure x = + TwoOrThreeWay (Just x) x x + + (<*>) :: TwoOrThreeWay (a -> b) -> TwoOrThreeWay a -> TwoOrThreeWay b + TwoOrThreeWay f g h <*> TwoOrThreeWay x y z = + TwoOrThreeWay (f <*> x) (g y) (h z) diff --git a/unison-merge/src/Unison/Merge/Libdeps.hs b/unison-merge/src/Unison/Merge/Libdeps.hs index ec0b9899d4..ff580a7b9b 100644 --- a/unison-merge/src/Unison/Merge/Libdeps.hs +++ b/unison-merge/src/Unison/Merge/Libdeps.hs @@ -2,6 +2,7 @@ module Unison.Merge.Libdeps ( LibdepDiffOp (..), diffLibdeps, + mergeLibdepsDiffs, applyLibdepsDiff, getTwoFreshLibdepNames, ) @@ -15,6 +16,7 @@ import Data.These (These (..)) import Unison.Merge.DiffOp (DiffOp (..)) import Unison.Merge.EitherWay qualified as EitherWay import Unison.Merge.ThreeWay (ThreeWay (..)) +import Unison.Merge.ThreeWay qualified as ThreeWay import Unison.Merge.TwoDiffOps (TwoDiffOps (..)) import Unison.Merge.TwoDiffOps qualified as TwoDiffOps import Unison.Merge.TwoWay (TwoWay (..)) @@ -33,45 +35,43 @@ data LibdepDiffOp a | AddBothLibdeps !a !a | DeleteLibdep --- | Perform a three-way diff on two collections of library dependencies. +-- | Perform two two-way diffs on two collections of library dependencies. This is only half of a three-way diff: use +-- 'mergeLibdepsDiffs' to complete it. diffLibdeps :: + forall k v. (Ord k, Eq v) => -- | Library dependencies. ThreeWay (Map k v) -> - -- | Library dependencies diff. - Map k (LibdepDiffOp v) + -- | Library dependencies diffs. + TwoWay (Map k (DiffOp v)) diffLibdeps libdeps = - mergeDiffs (twoWayDiff libdeps.lca libdeps.alice) (twoWayDiff libdeps.lca libdeps.bob) - --- `twoWayDiff old new` computes a diff between old thing `old` and new thing `new`. --- --- Values present in `old` but not `new` are tagged as "deleted"; similar for "added" and "updated". -twoWayDiff :: (Ord k, Eq v) => Map k v -> Map k v -> Map k (DiffOp v) -twoWayDiff = - Map.merge - (Map.mapMissing \_ -> DiffOp'Delete) - (Map.mapMissing \_ -> DiffOp'Add) - ( Map.zipWithMaybeMatched \_ old new -> - if old == new - then Nothing - else Just (DiffOp'Update Updated {old, new}) - ) + f <$> ThreeWay.forgetLca libdeps + where + f :: Map k v -> Map k (DiffOp v) + f = + Map.merge + (Map.mapMissing \_ -> DiffOp'Delete) + (Map.mapMissing \_ -> DiffOp'Add) + ( Map.zipWithMaybeMatched \_ old new -> + if old == new + then Nothing + else Just (DiffOp'Update Updated {old, new}) + ) + libdeps.lca -- Merge two library dependency diffs together: -- -- * Keep all adds/updates (allowing conflicts as necessary, which will be resolved later) -- * Ignore deletes that only one party makes (because the other party may expect the dep to still be there) -mergeDiffs :: +mergeLibdepsDiffs :: forall k v. (Ord k, Eq v) => - -- The LCA->Alice library dependencies diff. - Map k (DiffOp v) -> - -- The LCA->Bob library dependencies diff. - Map k (DiffOp v) -> + -- The LCA->Alice and LCA->Bob library dependencies diffs. + TwoWay (Map k (DiffOp v)) -> -- The merged library dependencies diff. Map k (LibdepDiffOp v) -mergeDiffs alice bob = - catMaybes (alignWith combineDiffOps alice bob) +mergeLibdepsDiffs diffs = + catMaybes (alignWith combineDiffOps diffs.alice diffs.bob) combineDiffOps :: (Eq a) => These (DiffOp a) (DiffOp a) -> Maybe (LibdepDiffOp a) combineDiffOps = diff --git a/unison-merge/src/Unison/Merge/Mergeblob1.hs b/unison-merge/src/Unison/Merge/Mergeblob1.hs index aef0ec7973..df86ed9f7d 100644 --- a/unison-merge/src/Unison/Merge/Mergeblob1.hs +++ b/unison-merge/src/Unison/Merge/Mergeblob1.hs @@ -1,20 +1,26 @@ module Unison.Merge.Mergeblob1 ( Mergeblob1 (..), + hydratedDefnsLabeledDependencies, makeMergeblob1, ) where +import Control.Lens import Data.List qualified as List import Data.Map.Strict qualified as Map +import Data.Set qualified as Set import Unison.DataDeclaration (Decl) import Unison.DataDeclaration qualified as DataDeclaration +import Unison.DataDeclaration.Dependencies qualified as Decl import Unison.DeclNameLookup (DeclNameLookup) +import Unison.LabeledDependency qualified as LD import Unison.Merge.CombineDiffs (CombinedDiffOp, combineDiffs) import Unison.Merge.DeclCoherencyCheck (IncoherentDeclReason, checkDeclCoherency, lenientCheckDeclCoherency) -import Unison.Merge.Diff (nameBasedNamespaceDiff) +import Unison.Merge.Diff (humanizeDiffs, nameBasedNamespaceDiff) import Unison.Merge.DiffOp (DiffOp) import Unison.Merge.EitherWay (EitherWay (..)) -import Unison.Merge.Libdeps (LibdepDiffOp, applyLibdepsDiff, diffLibdeps, getTwoFreshLibdepNames) +import Unison.Merge.HumanDiffOp (HumanDiffOp) +import Unison.Merge.Libdeps (applyLibdepsDiff, diffLibdeps, getTwoFreshLibdepNames, mergeLibdepsDiffs) import Unison.Merge.Mergeblob0 (Mergeblob0 (..)) import Unison.Merge.PartialDeclNameLookup (PartialDeclNameLookup) import Unison.Merge.PartitionCombinedDiffs (partitionCombinedDiffs) @@ -25,13 +31,20 @@ import Unison.Merge.TwoWay (TwoWay (..)) import Unison.Merge.Unconflicts (Unconflicts) import Unison.Name (Name) import Unison.NameSegment (NameSegment) +import Unison.Names (Names) import Unison.Parser.Ann (Ann) import Unison.Prelude +import Unison.PrettyPrintEnv.Names qualified as PPE +import Unison.PrettyPrintEnvDecl qualified as PPED +import Unison.PrettyPrintEnvDecl.Names qualified as PPED import Unison.Reference (TermReference, TermReferenceId, TypeReference, TypeReferenceId) +import Unison.Reference qualified as Reference import Unison.Referent (Referent) import Unison.Symbol (Symbol) import Unison.Term (Term) +import Unison.Term qualified as Term import Unison.Type (Type) +import Unison.Type qualified as Type import Unison.Util.BiMultimap (BiMultimap) import Unison.Util.Defns (Defns (..), DefnsF, DefnsF2, DefnsF3) @@ -40,7 +53,8 @@ data Mergeblob1 libdep = Mergeblob1 declNameLookups :: TwoWay DeclNameLookup, defns :: ThreeWay (Defns (BiMultimap Referent Name) (BiMultimap TypeReference Name)), diff :: DefnsF2 (Map Name) CombinedDiffOp Referent TypeReference, - diffs :: TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + diffsFromLCA :: TwoWay (DefnsF3 (Map Name) DiffOp Synhashed Referent TypeReference), + humanDiffsFromLCA :: TwoWay (DefnsF2 (Map Name) HumanDiffOp Referent TypeReference), hydratedDefns :: ThreeWay ( DefnsF @@ -49,16 +63,42 @@ data Mergeblob1 libdep = Mergeblob1 (TypeReferenceId, Decl Symbol Ann) ), lcaDeclNameLookup :: PartialDeclNameLookup, - libdeps :: Map NameSegment libdep, - libdepsDiff :: Map NameSegment (LibdepDiffOp libdep), lcaLibdeps :: Map NameSegment libdep, + libdeps :: Map NameSegment libdep, + libdepsDiffs :: TwoWay (Map NameSegment (DiffOp libdep)), unconflicts :: DefnsF Unconflicts Referent TypeReference } +-- | Get a names object for all the hydrated definitions AND their direct dependencies +hydratedDefnsLabeledDependencies :: + DefnsF + (Map Name) + (TermReferenceId, (Term Symbol Ann, Type Symbol Ann)) + (TypeReferenceId, Decl Symbol Ann) -> + Set LD.LabeledDependency +hydratedDefnsLabeledDependencies defns = + let termDeps :: Set LD.LabeledDependency + termDeps = + foldOf + ( folded + . beside + (to Reference.DerivedId . to LD.TermReference . to Set.singleton) + (beside (to Term.labeledDependencies) (to Type.labeledDependencies)) + ) + defns.terms + + typeDeps :: Set LD.LabeledDependency + typeDeps = + defns.types + & foldMap \(typeRefId, typeDecl) -> + Decl.labeledDeclDependenciesIncludingSelfAndFieldAccessors (Reference.DerivedId typeRefId) typeDecl + in Set.union termDeps typeDeps + makeMergeblob1 :: forall libdep. (Eq libdep) => Mergeblob0 libdep -> + ThreeWay Names {- Names for _at least_ every reference in 'hydratedDefnDependencies' -} -> ThreeWay ( DefnsF (Map Name) @@ -66,7 +106,9 @@ makeMergeblob1 :: (TypeReferenceId, Decl Symbol Ann) ) -> Either (EitherWay IncoherentDeclReason) (Mergeblob1 libdep) -makeMergeblob1 blob hydratedDefns = do +makeMergeblob1 blob names3 hydratedDefns = do + let ppeds3 :: ThreeWay PPED.PrettyPrintEnvDecl + ppeds3 = names3 <&> \names -> PPED.makePPED (PPE.namer names) (PPE.suffixifyByHash names) -- Make one big constructor count lookup for all type decls let numConstructors = Map.empty @@ -94,10 +136,11 @@ makeMergeblob1 blob hydratedDefns = do lenientCheckDeclCoherency blob.nametrees.lca numConstructors -- Diff LCA->Alice and LCA->Bob - let diffs = + let (diffsFromLCA, propagatedUpdates) = nameBasedNamespaceDiff declNameLookups lcaDeclNameLookup + ppeds3 blob.defns Defns { terms = @@ -111,21 +154,22 @@ makeMergeblob1 blob hydratedDefns = do } -- Combine the LCA->Alice and LCA->Bob diffs together - let diff = - combineDiffs diffs + let diff = combineDiffs diffsFromLCA + + let humanDiffsFromLCA = humanizeDiffs names3 diffsFromLCA propagatedUpdates -- Partition the combined diff into the conflicted things and the unconflicted things let (conflicts, unconflicts) = partitionCombinedDiffs (ThreeWay.forgetLca blob.defns) declNameLookups diff -- Diff and merge libdeps - let libdepsDiff :: Map NameSegment (LibdepDiffOp libdep) - libdepsDiff = + let libdepsDiffs :: TwoWay (Map NameSegment (DiffOp libdep)) + libdepsDiffs = diffLibdeps blob.libdeps let libdeps :: Map NameSegment libdep libdeps = - applyLibdepsDiff getTwoFreshLibdepNames blob.libdeps libdepsDiff + applyLibdepsDiff getTwoFreshLibdepNames blob.libdeps (mergeLibdepsDiffs libdepsDiffs) pure Mergeblob1 @@ -133,11 +177,12 @@ makeMergeblob1 blob hydratedDefns = do declNameLookups, defns = blob.defns, diff, - diffs, + diffsFromLCA, + humanDiffsFromLCA, hydratedDefns, lcaDeclNameLookup, - libdeps, - libdepsDiff, lcaLibdeps = blob.libdeps.lca, + libdeps, + libdepsDiffs, unconflicts } diff --git a/unison-merge/src/Unison/Merge/Mergeblob2.hs b/unison-merge/src/Unison/Merge/Mergeblob2.hs index 629d8d2146..b42594e438 100644 --- a/unison-merge/src/Unison/Merge/Mergeblob2.hs +++ b/unison-merge/src/Unison/Merge/Mergeblob2.hs @@ -65,7 +65,7 @@ data Mergeblob2Error makeMergeblob2 :: Mergeblob1 libdep -> Either Mergeblob2Error (Mergeblob2 libdep) makeMergeblob2 blob = do -- Bail early if it looks like we can't proceed with the merge, because Alice or Bob has one or more conflicted alias - for_ ((,) <$> TwoWay Alice Bob <*> blob.diffs) \(who, diff) -> + for_ ((,) <$> TwoWay Alice Bob <*> blob.diffsFromLCA) \(who, diff) -> whenJust (findConflictedAlias blob.defns.lca diff) $ Left . Mergeblob2Error'ConflictedAlias . who diff --git a/unison-merge/src/Unison/Merge/ThreeWay.hs b/unison-merge/src/Unison/Merge/ThreeWay.hs index cc9d24c47d..aa49f7b9d3 100644 --- a/unison-merge/src/Unison/Merge/ThreeWay.hs +++ b/unison-merge/src/Unison/Merge/ThreeWay.hs @@ -1,48 +1,18 @@ module Unison.Merge.ThreeWay ( ThreeWay (..), forgetLca, + toTwoOrThreeWay, ) where -import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith)) -import Data.These (These (..)) +import Unison.Merge.Internal.Types (ThreeWay (..)) +import Unison.Merge.TwoOrThreeWay (TwoOrThreeWay (..)) import Unison.Merge.TwoWay (TwoWay (..)) -import Unison.Prelude - -data ThreeWay a = ThreeWay - { lca :: !a, - alice :: !a, - bob :: !a - } - deriving stock (Foldable, Functor, Generic, Traversable) - -instance Applicative ThreeWay where - pure :: a -> ThreeWay a - pure x = - ThreeWay x x x - - (<*>) :: ThreeWay (a -> b) -> ThreeWay a -> ThreeWay b - ThreeWay f g h <*> ThreeWay x y z = - ThreeWay (f x) (g y) (h z) - -instance Semialign ThreeWay where - alignWith :: (These a b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c - alignWith f (ThreeWay a b c) (ThreeWay x y z) = - ThreeWay (f (These a x)) (f (These b y)) (f (These c z)) - -instance Unzip ThreeWay where - unzipWith :: (c -> (a, b)) -> ThreeWay c -> (ThreeWay a, ThreeWay b) - unzipWith f (ThreeWay a b c) = - let (i, x) = f a - (j, y) = f b - (k, z) = f c - in (ThreeWay i j k, ThreeWay x y z) - -instance Zip ThreeWay where - zipWith :: (a -> b -> c) -> ThreeWay a -> ThreeWay b -> ThreeWay c - zipWith f (ThreeWay a b c) (ThreeWay x y z) = - ThreeWay (f a x) (f b y) (f c z) forgetLca :: ThreeWay a -> TwoWay a forgetLca ThreeWay {alice, bob} = TwoWay {alice, bob} + +toTwoOrThreeWay :: ThreeWay a -> TwoOrThreeWay a +toTwoOrThreeWay ThreeWay {alice, bob, lca} = + TwoOrThreeWay {alice, bob, lca = Just lca} diff --git a/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs b/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs index 556ff0fd2d..cec3725c11 100644 --- a/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs +++ b/unison-merge/src/Unison/Merge/TwoOrThreeWay.hs @@ -1,13 +1,12 @@ module Unison.Merge.TwoOrThreeWay ( TwoOrThreeWay (..), + toThreeWay, ) where +import Unison.Merge.Internal.Types (ThreeWay (..), TwoOrThreeWay (..)) import Unison.Prelude -data TwoOrThreeWay a = TwoOrThreeWay - { lca :: Maybe a, - alice :: a, - bob :: a - } - deriving stock (Foldable, Functor, Generic, Traversable) +toThreeWay :: a -> TwoOrThreeWay a -> ThreeWay a +toThreeWay x TwoOrThreeWay {alice, bob, lca} = + ThreeWay {alice, bob, lca = fromMaybe x lca} diff --git a/unison-merge/src/Unison/Merge/Updated.hs b/unison-merge/src/Unison/Merge/Updated.hs index 00b64ed98b..6dd5fc41b8 100644 --- a/unison-merge/src/Unison/Merge/Updated.hs +++ b/unison-merge/src/Unison/Merge/Updated.hs @@ -10,4 +10,4 @@ data Updated a = Updated { old :: a, new :: a } - deriving stock (Functor, Generic, Show) + deriving stock (Foldable, Functor, Generic, Show, Traversable) diff --git a/unison-merge/unison-merge.cabal b/unison-merge/unison-merge.cabal index e53e024a67..91d98ef5f7 100644 --- a/unison-merge/unison-merge.cabal +++ b/unison-merge/unison-merge.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.36.0. +-- This file has been generated from package.yaml by hpack version 0.37.0. -- -- see: https://github.com/sol/hpack @@ -25,6 +25,8 @@ library Unison.Merge.EitherWay Unison.Merge.EitherWayI Unison.Merge.FindConflictedAlias + Unison.Merge.HumanDiffOp + Unison.Merge.Internal.Types Unison.Merge.Libdeps Unison.Merge.Mergeblob0 Unison.Merge.Mergeblob1 @@ -83,6 +85,7 @@ library build-depends: base , containers + , either , lens , mtl , nonempty-containers diff --git a/unison-runtime/src/Unison/Runtime/Machine.hs b/unison-runtime/src/Unison/Runtime/Machine.hs index 8976269e17..179310e5da 100644 --- a/unison-runtime/src/Unison/Runtime/Machine.hs +++ b/unison-runtime/src/Unison/Runtime/Machine.hs @@ -33,6 +33,7 @@ import Control.Lens import Data.Atomics qualified as Atomic import Data.Bits import Data.Functor.Classes (Eq1 (..), Ord1 (..)) +import Data.List qualified as List import Data.IORef (IORef) import Data.IORef qualified as IORef import Data.Map.Strict qualified as M @@ -2361,6 +2362,12 @@ preEvalTopLevelConstants cacheableCombs newCombs cc = do atomically $ do modifyTVar evaluatedCacheableCombsVar $ EC.mapInsert w (EC.mapSingleton 0 $ CachedVal w val) apply0 (Just hook) cc activeThreads w + `catch` \e -> + -- ignore sandboxing exceptions during pre-eval, in case they + -- don't matter for the final result. + if isSandboxingException e + then pure () + else throwIO e evaluatedCacheableCombs <- readTVarIO evaluatedCacheableCombsVar let allNew = evaluatedCacheableCombs <> newCombs @@ -2368,6 +2375,19 @@ preEvalTopLevelConstants cacheableCombs newCombs cc = do -- new cached versions. atomically $ modifyTVar (combs cc) (\existingCombs -> (resolveCombs (Just $ EC.mapDifference existingCombs allNew) allNew) <> existingCombs) +-- Checks if a runtime exception is due to sandboxing. +-- +-- This is used above during pre-evaluation, to ignore sandboxing +-- exceptions for top-level constant dependencies of docs and such, in +-- case the docs don't actually evaluate them. +isSandboxingException :: RuntimeExn -> Bool +isSandboxingException (PE _ (P.toPlainUnbroken -> msg)) = + List.isPrefixOf sdbx1 msg || List.isPrefixOf sdbx2 msg + where + sdbx1 = "attempted to use sandboxed operation" + sdbx2 = "Attempted to use disallowed builtin in sandboxed" +isSandboxingException _ = False + expandSandbox :: Map Reference (Set Reference) -> [(Reference, SuperGroup Symbol)] -> diff --git a/unison-share-api/src/Unison/SyncV2/API.hs b/unison-share-api/src/Unison/SyncV2/API.hs new file mode 100644 index 0000000000..ef80d3d1cf --- /dev/null +++ b/unison-share-api/src/Unison/SyncV2/API.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE DataKinds #-} + +module Unison.SyncV2.API + ( API, + api, + Routes (..), + ) +where + +import Data.Proxy +import GHC.Generics (Generic) +import Servant.API +import Unison.SyncV2.Types +import Unison.Util.Servant.CBOR (CBOR) + +api :: Proxy API +api = Proxy + +type API = NamedRoutes Routes + +type DownloadEntitiesStream = + -- | The causal hash the client needs. The server should provide it and all of its dependencies + ReqBody '[CBOR, JSON] DownloadEntitiesRequest + :> StreamPost NoFraming OctetStream (SourceIO (CBORStream DownloadEntitiesChunk)) + +-- | Get the relevant dependencies of a causal, including the causal spine and the causal hashes of any library roots. +type CausalDependenciesStream = + ReqBody '[CBOR, JSON] CausalDependenciesRequest + :> StreamPost NoFraming OctetStream (SourceIO (CBORStream CausalDependenciesChunk)) + +data Routes mode = Routes + { downloadEntitiesStream :: mode :- "entities" :> "download" :> DownloadEntitiesStream, + causalDependenciesStream :: mode :- "entities" :> "dependencies" :> CausalDependenciesStream + } + deriving stock (Generic) diff --git a/unison-share-api/src/Unison/SyncV2/Types.hs b/unison-share-api/src/Unison/SyncV2/Types.hs index 2f4432ee74..0a716a5c37 100644 --- a/unison-share-api/src/Unison/SyncV2/Types.hs +++ b/unison-share-api/src/Unison/SyncV2/Types.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} + module Unison.SyncV2.Types ( DownloadEntitiesRequest (..), DownloadEntitiesChunk (..), @@ -6,7 +8,11 @@ module Unison.SyncV2.Types StreamInitInfo (..), SyncError (..), DownloadEntitiesError (..), + CausalDependenciesRequest (..), + CausalDependenciesChunk (..), + DependencyType (..), CBORBytes (..), + CBORStream (..), EntityKind (..), serialiseCBORBytes, deserialiseOrFailCBORBytes, @@ -23,6 +29,7 @@ import Codec.Serialise qualified as CBOR import Codec.Serialise.Decoding qualified as CBOR import Control.Exception (Exception) import Data.Aeson (FromJSON (..), ToJSON (..), object, withObject, (.:), (.=)) +import Data.Aeson qualified as Aeson import Data.Map (Map) import Data.Map qualified as Map import Data.Set (Set) @@ -32,7 +39,6 @@ import Data.Word (Word16, Word64) import U.Codebase.HashTags (CausalHash) import U.Codebase.Sqlite.TempEntity (TempEntity) import Unison.Core.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName) -import Unison.Debug qualified as Debug import Unison.Hash32 (Hash32) import Unison.Prelude (From (..)) import Unison.Server.Orphans () @@ -186,7 +192,7 @@ optionalDecodeMapKey k m = Nothing -> pure Nothing Just bs -> Just <$> decodeUnknownCBORBytes bs --- | Serialised as a map to allow for future expansion +-- | Serialised as a map to be future compatible, allowing for future expansion. instance Serialise StreamInitInfo where encode (StreamInitInfo {version, entitySorting, numEntities, rootCausalHash, rootBranchRef}) = CBOR.encode @@ -199,18 +205,11 @@ instance Serialise StreamInitInfo where <> maybe [] (\br -> [("br", serialiseUnknownCBORBytes br)]) rootBranchRef ) decode = do - Debug.debugLogM Debug.Temp "Decoding StreamInitInfo" - Debug.debugLogM Debug.Temp "Decoding Map" m <- CBOR.decode - Debug.debugLogM Debug.Temp "Decoding Version" version <- decodeMapKey "v" m - Debug.debugLogM Debug.Temp "Decoding Entity Sorting" entitySorting <- decodeMapKey "es" m - Debug.debugLogM Debug.Temp "Decoding Number of Entities" numEntities <- (optionalDecodeMapKey "ne" m) - Debug.debugLogM Debug.Temp "Decoding Root Causal Hash" rootCausalHash <- decodeMapKey "rc" m - Debug.debugLogM Debug.Temp "Decoding Branch Ref" rootBranchRef <- optionalDecodeMapKey "br" m pure StreamInitInfo {version, entitySorting, numEntities, rootCausalHash, rootBranchRef} @@ -306,3 +305,85 @@ instance Serialise EntityKind where 3 -> pure TypeEntity 4 -> pure PatchEntity _ -> fail "invalid tag" + +------------------------------------------------------------------------------------------------------------------------ +-- Causal Dependencies + +data CausalDependenciesRequest = CausalDependenciesRequest + { branchRef :: BranchRef, + rootCausal :: HashJWT + } + deriving stock (Show, Eq, Ord) + +instance ToJSON CausalDependenciesRequest where + toJSON (CausalDependenciesRequest branchRef rootCausal) = + object + [ "branch_ref" .= branchRef, + "root_causal" .= rootCausal + ] + +instance FromJSON CausalDependenciesRequest where + parseJSON = Aeson.withObject "CausalDependenciesRequest" \obj -> do + branchRef <- obj .: "branch_ref" + rootCausal <- obj .: "root_causal" + pure CausalDependenciesRequest {..} + +instance Serialise CausalDependenciesRequest where + encode (CausalDependenciesRequest {branchRef, rootCausal}) = + encode branchRef <> encode rootCausal + decode = CausalDependenciesRequest <$> decode <*> decode + +data DependencyType + = -- This is a top-level history node of the root we're pulling. + CausalSpineDependency + | -- This is the causal root of a library dependency. + LibDependency + deriving (Show, Eq, Ord) + +instance Serialise DependencyType where + encode = \case + CausalSpineDependency -> CBOR.encodeWord8 0 + LibDependency -> CBOR.encodeWord8 1 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure CausalSpineDependency + 1 -> pure LibDependency + _ -> fail "invalid tag" + +instance ToJSON DependencyType where + toJSON = \case + CausalSpineDependency -> "causal_spine" + LibDependency -> "lib" + +instance FromJSON DependencyType where + parseJSON = Aeson.withText "DependencyType" \case + "causal_spine" -> pure CausalSpineDependency + "lib" -> pure LibDependency + _ -> fail "invalid DependencyType" + +-- | A chunk of the download entities response stream. +data CausalDependenciesChunk + = CausalHashDepC {causalHash :: Hash32, dependencyType :: DependencyType} + deriving (Show, Eq, Ord) + +data CausalDependenciesChunkTag = CausalHashDepChunkTag + deriving (Show, Eq, Ord) + +instance Serialise CausalDependenciesChunkTag where + encode = \case + CausalHashDepChunkTag -> CBOR.encodeWord8 0 + decode = do + tag <- CBOR.decodeWord8 + case tag of + 0 -> pure CausalHashDepChunkTag + _ -> fail "invalid tag" + +instance Serialise CausalDependenciesChunk where + encode = \case + (CausalHashDepC {causalHash, dependencyType}) -> do + encode CausalHashDepChunkTag <> CBOR.encode causalHash <> CBOR.encode dependencyType + decode = do + tag <- decode + case tag of + CausalHashDepChunkTag -> CausalHashDepC <$> CBOR.decode <*> CBOR.decode diff --git a/unison-share-api/src/Unison/Util/Servant/CBOR.hs b/unison-share-api/src/Unison/Util/Servant/CBOR.hs index 18fd94904c..580b1a7124 100644 --- a/unison-share-api/src/Unison/Util/Servant/CBOR.hs +++ b/unison-share-api/src/Unison/Util/Servant/CBOR.hs @@ -5,6 +5,7 @@ module Unison.Util.Servant.CBOR ( CBOR, UnknownCBORBytes, CBORBytes (..), + CBORStream (..), deserialiseOrFailCBORBytes, serialiseCBORBytes, decodeCBORBytes, @@ -86,3 +87,14 @@ serialiseUnknownCBORBytes = CBORBytes . CBOR.serialise data Unknown type UnknownCBORBytes = CBORBytes Unknown + +-- | Wrapper for a stream of CBOR data. Each chunk may not be a complete CBOR value, but the concatenation of all the chunks is a valid CBOR stream. +newtype CBORStream a = CBORStream BL.ByteString + deriving (Serialise) via (BL.ByteString) + deriving (Eq, Show, Ord) + +instance MimeRender OctetStream (CBORStream a) where + mimeRender Proxy (CBORStream bs) = bs + +instance MimeUnrender OctetStream (CBORStream a) where + mimeUnrender Proxy bs = Right (CBORStream bs) diff --git a/unison-share-api/unison-share-api.cabal b/unison-share-api/unison-share-api.cabal index d56eb5fb7a..d30617278b 100644 --- a/unison-share-api/unison-share-api.cabal +++ b/unison-share-api/unison-share-api.cabal @@ -50,6 +50,7 @@ library Unison.Sync.Common Unison.Sync.EntityValidation Unison.Sync.Types + Unison.SyncV2.API Unison.SyncV2.Types Unison.Util.Find Unison.Util.Servant.CBOR diff --git a/unison-src/transcripts/idempotent/api-list-projects-branches.md b/unison-src/transcripts/idempotent/api-list-projects-branches.md index 2f3bc28b22..0599cbf799 100644 --- a/unison-src/transcripts/idempotent/api-list-projects-branches.md +++ b/unison-src/transcripts/idempotent/api-list-projects-branches.md @@ -8,17 +8,15 @@ lump many of those together. Doing it this way ensures both the creation timesta the same direction so we don't end up with flaky non-deterministic tests. ``` ucm :hide -scratch/main> project.create-empty project-cherry +project-apple/main> project.create-empty project-cherry -scratch/main> project.create-empty project-banana - -scratch/main> project.create-empty project-apple +project-apple/main> project.create-empty project-banana project-apple/main> branch a-branch-cherry -project-apple/main> branch a-branch-banana +project-apple/a-branch-cherry> branch a-branch-banana -project-apple/main> branch a-branch-apple +project-apple/a-branch-banana> branch a-branch-apple ``` ``` api diff --git a/unison-src/transcripts/idempotent/api-list-projects-branches.output.md b/unison-src/transcripts/idempotent/api-list-projects-branches.output.md new file mode 100644 index 0000000000..0599cbf799 --- /dev/null +++ b/unison-src/transcripts/idempotent/api-list-projects-branches.output.md @@ -0,0 +1,74 @@ +# List Projects And Branches Test + +I create projects and branches in reverse alphabetical order, and starting with `z` +to place them after `main` alphabetically. +This is because the results from the listing endpoints is sorted by (timestamp, name); but +the default sqlite timestamp only has second-level precision and the transcript will sometimes +lump many of those together. Doing it this way ensures both the creation timestamp and name sort +the same direction so we don't end up with flaky non-deterministic tests. + +``` ucm :hide +project-apple/main> project.create-empty project-cherry + +project-apple/main> project.create-empty project-banana + +project-apple/main> branch a-branch-cherry + +project-apple/a-branch-cherry> branch a-branch-banana + +project-apple/a-branch-banana> branch a-branch-apple +``` + +``` api +-- Should list all projects +GET /api/projects + [ + { + "activeBranchRef": "a-branch-apple", + "projectName": "project-apple" + }, + { + "activeBranchRef": "main", + "projectName": "project-banana" + }, + { + "activeBranchRef": "main", + "projectName": "project-cherry" + }, + { + "activeBranchRef": "main", + "projectName": "scratch" + } + ] +-- Can query for some infix of the project name +GET /api/projects?query=bana + [ + { + "activeBranchRef": "main", + "projectName": "project-banana" + } + ] +-- Should list all branches +GET /api/projects/project-apple/branches + [ + { + "branchName": "a-branch-apple" + }, + { + "branchName": "a-branch-banana" + }, + { + "branchName": "a-branch-cherry" + }, + { + "branchName": "main" + } + ] +-- Can query for some infix of the project name +GET /api/projects/project-apple/branches?query=bana + [ + { + "branchName": "a-branch-banana" + } + ] +``` diff --git a/unison-src/transcripts/idempotent/fix-5326.md b/unison-src/transcripts/idempotent/fix-5326.md index 267648cb4c..5d06bcbcf9 100644 --- a/unison-src/transcripts/idempotent/fix-5326.md +++ b/unison-src/transcripts/idempotent/fix-5326.md @@ -193,7 +193,9 @@ scratch/main> merge /foo Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... diff --git a/unison-src/transcripts/idempotent/fix5506.md b/unison-src/transcripts/idempotent/fix5506.md new file mode 100644 index 0000000000..3157d04ddd --- /dev/null +++ b/unison-src/transcripts/idempotent/fix5506.md @@ -0,0 +1,62 @@ +``` ucm :hide +scratch/main> builtins.mergeio +``` + +``` unison + +stdOut = stdHandle StdOut + +putText h t = match putBytes.impl h (Text.toUtf8 t) with + Left e -> raise e + _ -> () + +printLine t = + putText stdOut t + putText stdOut "\n" +``` + +``` ucm :added-by-ucm + Loading changes detected in scratch.u. + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + printLine : Text ->{IO, Exception} () + putText : Handle -> Text ->{IO, Exception} () + stdOut : Handle +``` + +``` ucm +scratch/main> update + + Okay, I'm searching the branch for code that needs to be + updated... + + Done. +``` + +``` unison + +hmmm = {{ I'll try {printLine}. That's a good trick. }} +``` + +``` ucm :added-by-ucm + Loading changes detected in scratch.u. + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + hmmm : Doc2 +``` + +``` ucm +scratch/main> display hmmm + + I'll try printLine. That's a good trick. +``` diff --git a/unison-src/transcripts/merge.output.md b/unison-src/transcripts/merge.output.md index cc8f6aa7d7..f1ea01fb1d 100644 --- a/unison-src/transcripts/merge.output.md +++ b/unison-src/transcripts/merge.output.md @@ -68,7 +68,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -137,7 +139,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -220,7 +224,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -325,7 +331,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -439,7 +447,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -524,7 +534,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -603,7 +615,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -843,7 +857,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -934,7 +950,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1036,7 +1054,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1141,7 +1161,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1224,7 +1246,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1307,7 +1331,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1386,7 +1412,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1481,7 +1509,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1600,7 +1630,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1692,7 +1724,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1794,7 +1828,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -1967,7 +2003,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2033,7 +2071,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2098,7 +2138,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2164,7 +2206,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge: @@ -2231,7 +2275,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... On scratch/alice, the type A.inner.X is an alias of A. I'm not able to perform a merge when a type exists nested under an @@ -2288,7 +2334,9 @@ scratch/alice> merge bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Sorry, I wasn't able to perform the merge, because I need all constructor names to be nested somewhere beneath the @@ -2494,7 +2542,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2619,7 +2669,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2728,7 +2780,9 @@ scratch/main> merge topic Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -2875,7 +2929,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3047,7 +3103,9 @@ scratch/bob> merge /alice Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3063,7 +3121,9 @@ scratch/carol> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3217,7 +3277,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3297,7 +3359,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... @@ -3496,7 +3560,9 @@ scratch/alice> merge /bob Loading branches... - Computing diff between branches... + Loading definitions... + + Computing diffs... Loading dependents of changes... diff --git a/unison-src/transcripts/project-outputs/docs/configuration.output.md b/unison-src/transcripts/project-outputs/docs/configuration.output.md index 0bf4d06de5..bcae1f8b5a 100644 --- a/unison-src/transcripts/project-outputs/docs/configuration.output.md +++ b/unison-src/transcripts/project-outputs/docs/configuration.output.md @@ -9,6 +9,7 @@ - [`UNISON_SHARE_ACCESS_TOKEN`](#unison_share_access_token) - [`UNISON_READONLY`](#unison_readonly) - [`UNISON_ENTITY_VALIDATION`](#unison_entity_validation) + - [`UNISON_SYNC_VERSION`](#unison_sync_version) - [Local Codebase Server](#local-codebase-server) - [Codebase Configuration](#codebase-configuration) @@ -116,6 +117,14 @@ Defaults to enabled. $ UNISON_ENTITY_VALIDATION="false" ucm ``` +### `UNISON_SYNC_VERSION` + +Allows enabling the experimental Sync Version 2 protocol when downloading code from Share. + +``` sh +$ UNISON_ENTITY_VALIDATION="2" ucm +``` + ### `UNISON_PULL_WORKERS` Allows setting the number of workers to use when pulling from a codebase server.