From aec0debf1ff9af1130ef5db7dc0a7af7fde8ec10 Mon Sep 17 00:00:00 2001 From: khorshuheng Date: Tue, 4 Mar 2025 15:57:12 +0800 Subject: [PATCH] feat: api for duplicate view --- Cargo.lock | 14 +- Cargo.toml | 14 +- libs/client-api/src/http_view.rs | 22 +- libs/shared-entity/src/dto/workspace_dto.rs | 5 + src/api/workspace.rs | 32 ++ src/biz/collab/database.rs | 85 ++++ src/biz/collab/folder_view.rs | 38 +- src/biz/workspace/duplicate.rs | 447 ++++++++++++++++++++ src/biz/workspace/mod.rs | 1 + tests/workspace/page_view.rs | 51 ++- 10 files changed, 691 insertions(+), 18 deletions(-) create mode 100644 src/biz/workspace/duplicate.rs diff --git a/Cargo.lock b/Cargo.lock index 280b5f52b..6b35709b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1896,7 +1896,7 @@ dependencies = [ [[package]] name = "collab" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "arc-swap", @@ -1921,7 +1921,7 @@ dependencies = [ [[package]] name = "collab-database" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "async-trait", @@ -1961,7 +1961,7 @@ dependencies = [ [[package]] name = "collab-document" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "arc-swap", @@ -1982,7 +1982,7 @@ dependencies = [ [[package]] name = "collab-entity" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "bytes", @@ -2002,7 +2002,7 @@ dependencies = [ [[package]] name = "collab-folder" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "arc-swap", @@ -2024,7 +2024,7 @@ dependencies = [ [[package]] name = "collab-importer" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "async-recursion", @@ -2130,7 +2130,7 @@ dependencies = [ [[package]] name = "collab-user" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c2442a93704e14508ccee325da8d56ef0b34c7ce#c2442a93704e14508ccee325da8d56ef0b34c7ce" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=45239d2ae871cc355ea2cc1d5d578e21c8263242#45239d2ae871cc355ea2cc1d5d578e21c8263242" dependencies = [ "anyhow", "collab", diff --git a/Cargo.toml b/Cargo.toml index 72127620e..feb9a926d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -303,13 +303,13 @@ lto = false [patch.crates-io] # It's diffcult to resovle different version with the same crate used in AppFlowy Frontend and the Client-API crate. # So using patch to workaround this issue. -collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } -collab-importer = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c2442a93704e14508ccee325da8d56ef0b34c7ce" } +collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } +collab-importer = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "45239d2ae871cc355ea2cc1d5d578e21c8263242" } [features] history = [] diff --git a/libs/client-api/src/http_view.rs b/libs/client-api/src/http_view.rs index ffc12c8ff..2581efd74 100644 --- a/libs/client-api/src/http_view.rs +++ b/libs/client-api/src/http_view.rs @@ -1,6 +1,7 @@ use client_api_entity::workspace_dto::{ AppendBlockToPageParams, CreatePageDatabaseViewParams, CreatePageParams, CreateSpaceParams, - MovePageParams, Page, PageCollab, PublishPageParams, Space, UpdatePageParams, UpdateSpaceParams, + DuplicatePageParams, MovePageParams, Page, PageCollab, PublishPageParams, Space, + UpdatePageParams, UpdateSpaceParams, }; use reqwest::Method; use serde_json::json; @@ -277,4 +278,23 @@ impl Client { .await?; AppResponse::<()>::from_response(resp).await?.into_error() } + + pub async fn duplicate_view_and_children( + &self, + workspace_id: Uuid, + view_id: &str, + params: &DuplicatePageParams, + ) -> Result<(), AppResponseError> { + let url = format!( + "{}/api/workspace/{}/page-view/{}/duplicate", + self.base_url, workspace_id, view_id + ); + let resp = self + .http_client_with_auth(Method::POST, &url) + .await? + .json(params) + .send() + .await?; + AppResponse::<()>::from_response(resp).await?.into_error() + } } diff --git a/libs/shared-entity/src/dto/workspace_dto.rs b/libs/shared-entity/src/dto/workspace_dto.rs index cfc013b0f..56ca35f5b 100644 --- a/libs/shared-entity/src/dto/workspace_dto.rs +++ b/libs/shared-entity/src/dto/workspace_dto.rs @@ -206,6 +206,11 @@ pub struct MovePageParams { pub prev_view_id: Option, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DuplicatePageParams { + pub suffix: Option, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CreatePageDatabaseViewParams { pub layout: ViewLayout, diff --git a/src/api/workspace.rs b/src/api/workspace.rs index 79732224a..637d66cb1 100644 --- a/src/api/workspace.rs +++ b/src/api/workspace.rs @@ -8,6 +8,7 @@ use crate::biz::collab::ops::{ use crate::biz::collab::utils::collab_from_doc_state; use crate::biz::user::user_verify::verify_token; use crate::biz::workspace; +use crate::biz::workspace::duplicate::duplicate_view_tree_and_collab; use crate::biz::workspace::ops::{ create_comment_on_published_view, create_reaction_on_comment, get_comments_on_published_view, get_reactions_on_published_view, remove_comment_on_published_view, remove_reaction_on_comment, @@ -184,6 +185,10 @@ pub fn workspace_scope() -> Scope { web::resource("/{workspace_id}/page-view/{view_id}/move") .route(web::post().to(move_page_handler)), ) + .service( + web::resource("/{workspace_id}/page-view/{view_id}/duplicate") + .route(web::post().to(duplicate_page_handler)), + ) .service( web::resource("/{workspace_id}/page-view/{view_id}/database-view") .route(web::post().to(post_page_database_view_handler)), @@ -1284,6 +1289,33 @@ async fn move_page_handler( Ok(Json(AppResponse::Ok())) } +async fn duplicate_page_handler( + user_uuid: UserUuid, + path: web::Path<(Uuid, Uuid)>, + payload: Json, + state: Data, + server: Data, + req: HttpRequest, +) -> Result>> { + let uid = state.user_cache.get_user_uid(&user_uuid).await?; + let (workspace_uuid, view_id) = path.into_inner(); + let user = realtime_user_for_web_request(req.headers(), uid)?; + let suffix = payload.suffix.as_deref().unwrap_or(" (Copy)").to_string(); + duplicate_view_tree_and_collab( + &state.metrics.appflowy_web_metrics, + server, + user, + state.collab_access_control_storage.clone(), + &state.pg_pool, + workspace_uuid, + view_id, + &suffix, + ) + .await + .unwrap(); + Ok(Json(AppResponse::Ok())) +} + async fn move_page_to_trash_handler( user_uuid: UserUuid, path: web::Path<(Uuid, String)>, diff --git a/src/biz/collab/database.rs b/src/biz/collab/database.rs index 820b64d68..34b4fb5b1 100644 --- a/src/biz/collab/database.rs +++ b/src/biz/collab/database.rs @@ -1,7 +1,13 @@ +use std::sync::Arc; + use app_error::AppError; +use appflowy_collaborate::collab::storage::CollabAccessControlStorage; +use async_trait::async_trait; +use collab::preclude::Collab; use collab_database::{ database::{gen_database_group_id, gen_field_id}, entity::FieldType, + error::DatabaseError, fields::{ date_type_option::DateTypeOption, default_field_settings_for_fields, select_type_option::SingleSelectTypeOption, Field, TypeOptionData, @@ -10,7 +16,16 @@ use collab_database::{ BoardLayoutSetting, CalendarLayoutSetting, DatabaseLayout, FieldSettingsByFieldIdMap, Group, GroupSetting, GroupSettingMap, LayoutSettings, }, + workspace_database::{ + DatabaseCollabPersistenceService, DatabaseCollabService, EncodeCollabByOid, + }, }; +use collab_entity::{CollabType, EncodedCollab}; +use collab_folder::CollabOrigin; +use database::collab::GetCollabOrigin; +use uuid::Uuid; + +use super::utils::{batch_get_latest_collab_encoded, get_latest_collab_encoded}; pub struct LinkedViewDependencies { pub layout_settings: LayoutSettings, @@ -154,6 +169,76 @@ fn create_card_status_field() -> Field { .with_type_option_data(field_type, default_select_type_option.into()) } +#[derive(Clone)] +pub struct PostgresDatabaseCollabService { + pub workspace_id: Uuid, + pub collab_storage: Arc, +} + +impl PostgresDatabaseCollabService { + pub async fn get_collab(&self, oid: &str, collab_type: CollabType) -> EncodedCollab { + get_latest_collab_encoded( + &self.collab_storage, + GetCollabOrigin::Server, + &self.workspace_id.to_string(), + oid, + collab_type, + ) + .await + .unwrap() + } +} + +#[async_trait] +impl DatabaseCollabService for PostgresDatabaseCollabService { + async fn build_collab( + &self, + object_id: &str, + object_type: CollabType, + encoded_collab: Option<(EncodedCollab, bool)>, + ) -> Result { + match encoded_collab { + None => Collab::new_with_source( + CollabOrigin::Empty, + object_id, + self.get_collab(object_id, object_type).await.into(), + vec![], + false, + ) + .map_err(|err| DatabaseError::Internal(err.into())), + Some((encoded_collab, _)) => Collab::new_with_source( + CollabOrigin::Empty, + object_id, + encoded_collab.into(), + vec![], + false, + ) + .map_err(|err| DatabaseError::Internal(err.into())), + } + } + + async fn get_collabs( + &self, + object_ids: Vec, + collab_type: CollabType, + ) -> Result { + let encoded_collabs = batch_get_latest_collab_encoded( + &self.collab_storage, + GetCollabOrigin::Server, + &self.workspace_id.to_string(), + &object_ids, + collab_type, + ) + .await + .unwrap(); + Ok(encoded_collabs) + } + + fn persistence(&self) -> Option> { + None + } +} + #[cfg(test)] mod tests { use collab_database::{ diff --git a/src/biz/collab/folder_view.rs b/src/biz/collab/folder_view.rs index 8e9104368..95219efc5 100644 --- a/src/biz/collab/folder_view.rs +++ b/src/biz/collab/folder_view.rs @@ -2,7 +2,9 @@ use std::collections::HashSet; use app_error::AppError; use chrono::DateTime; -use collab_folder::{Folder, SectionItem, SpacePermission, ViewLayout as CollabFolderViewLayout}; +use collab_folder::{ + Folder, SectionItem, SpacePermission, View, ViewLayout as CollabFolderViewLayout, +}; use shared_entity::dto::workspace_dto::{ self, FavoriteFolderView, FolderView, FolderViewMinimal, RecentFolderView, TrashFolderView, ViewLayout, @@ -263,6 +265,40 @@ pub fn section_items_to_trash_folder_view( .collect() } +pub struct ViewTree { + pub view: View, + pub children: Vec, +} + +pub fn get_view_and_children(folder: &Folder, view_id: &str) -> Option { + let private_space_and_trash_views = private_space_and_trash_view_ids(folder); + get_view_and_children_recursive(folder, &private_space_and_trash_views, view_id) +} + +fn get_view_and_children_recursive( + folder: &Folder, + private_space_and_trash_views: &PrivateSpaceAndTrashViews, + view_id: &str, +) -> Option { + if private_space_and_trash_views + .view_ids_in_trash + .contains(view_id) + { + return None; + } + + folder.get_view(view_id).map(|view| ViewTree { + view: View::clone(&view), + children: view + .children + .iter() + .filter_map(|child_view_id| { + get_view_and_children_recursive(folder, private_space_and_trash_views, child_view_id) + }) + .collect(), + }) +} + pub fn check_if_view_ancestors_fulfil_condition( view_id: &str, collab_folder: &Folder, diff --git a/src/biz/workspace/duplicate.rs b/src/biz/workspace/duplicate.rs new file mode 100644 index 000000000..87bb53377 --- /dev/null +++ b/src/biz/workspace/duplicate.rs @@ -0,0 +1,447 @@ +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use actix_web::web::Data; +use anyhow::anyhow; +use app_error::AppError; +use appflowy_collaborate::collab::storage::CollabAccessControlStorage; +use collab_database::{ + database::{ + gen_database_id, gen_database_view_id, gen_row_id, timestamp, Database, DatabaseContext, + DatabaseData, + }, + entity::{CreateDatabaseParams, CreateViewParams}, + rows::CreateRowParams, + views::OrderObjectPosition, + workspace_database::WorkspaceDatabase, +}; +use collab_document::document::Document; +use collab_entity::{CollabType, EncodedCollab}; +use collab_folder::{Folder, RepeatedViewIdentifier, View, ViewIdentifier}; +use collab_rt_entity::user::RealtimeUser; +use database::collab::{select_workspace_database_oid, CollabStorage, GetCollabOrigin}; +use database_entity::dto::{CollabParams, QueryCollab, QueryCollabResult}; +use itertools::Itertools; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::{ + api::{metrics::AppFlowyWebMetrics, ws::RealtimeServerAddr}, + biz::collab::{ + database::PostgresDatabaseCollabService, + utils::{collab_from_doc_state, get_latest_collab_encoded, get_latest_collab_folder}, + }, +}; + +use super::page_view::{update_workspace_database_data, update_workspace_folder_data}; + +#[allow(clippy::too_many_arguments)] +pub async fn duplicate_view_tree_and_collab( + appflowy_web_metrics: &AppFlowyWebMetrics, + server: Data, + user: RealtimeUser, + collab_storage: Arc, + pg_pool: &PgPool, + workspace_id: Uuid, + view_id: Uuid, + suffix: &str, +) -> Result<(), AppError> { + let uid = user.uid; + let mut folder: Folder = get_latest_collab_folder( + &collab_storage, + GetCollabOrigin::User { uid }, + &workspace_id.to_string(), + ) + .await?; + let trash_sections: HashSet = folder + .get_all_trash_sections() + .iter() + .map(|s| s.id.clone()) + .collect(); + let views: Vec = folder + .get_view_recursively(&view_id.to_string()) + .into_iter() + .filter(|view| !trash_sections.contains(&view.id)) + .collect(); + let duplicate_context = duplicate_views(&views, suffix)?; + + let ws_db_oid = select_workspace_database_oid(pg_pool, &workspace_id) + .await + .map_err(|err| { + AppError::Internal(anyhow::anyhow!( + "Unable to find workspace database oid for {}: {}", + workspace_id, + err + )) + })?; + let encoded_ws_db = get_latest_collab_encoded( + &collab_storage, + GetCollabOrigin::User { uid }, + &workspace_id.to_string(), + &ws_db_oid, + CollabType::WorkspaceDatabase, + ) + .await + .map_err(|err| { + AppError::Internal(anyhow::anyhow!( + "Unable to get latest workspace database collab {}: {}", + &ws_db_oid, + err + )) + })?; + let ws_db_collab = + collab_from_doc_state(encoded_ws_db.doc_state.to_vec(), &ws_db_oid).map_err(|err| { + AppError::Internal(anyhow::anyhow!( + "Unable to decode workspace database collab {}: {}", + &ws_db_oid, + err + )) + })?; + let mut ws_db = WorkspaceDatabase::open(ws_db_collab).map_err(|err| { + AppError::Internal(anyhow::anyhow!( + "Failed to open workspace database body: {}", + err + )) + })?; + + duplicate_database( + appflowy_web_metrics, + server.clone(), + user.clone(), + collab_storage.clone(), + workspace_id, + &duplicate_context, + &mut ws_db, + ) + .await?; + + duplicate_document( + collab_storage.clone(), + workspace_id, + uid, + &duplicate_context, + ) + .await?; + + let encoded_folder_update = { + let mut txn = folder.collab.transact_mut(); + for view in &duplicate_context.duplicated_views { + folder.body.views.insert(&mut txn, view.clone(), None); + } + txn.encode_update_v1() + }; + update_workspace_folder_data( + appflowy_web_metrics, + server, + user, + workspace_id, + encoded_folder_update, + ) + .await?; + Ok(()) +} + +fn duplicate_database_data_with_context( + context: &DuplicateContext, + data: &DatabaseData, +) -> CreateDatabaseParams { + let database_id = gen_database_id(); + let timestamp = timestamp(); + + let create_row_params = data + .rows + .iter() + .map(|row| CreateRowParams { + id: gen_row_id(), + database_id: database_id.clone(), + created_at: timestamp, + modified_at: timestamp, + cells: row.cells.clone(), + height: row.height, + visibility: row.visibility, + row_position: OrderObjectPosition::End, + }) + .collect(); + + let create_view_params = data + .views + .iter() + .map(|view| CreateViewParams { + database_id: database_id.clone(), + view_id: context + .view_id_mapping + .get(&view.id) + .cloned() + .unwrap_or_else(gen_database_view_id), + name: view.name.clone(), + layout: view.layout, + layout_settings: view.layout_settings.clone(), + filters: view.filters.clone(), + group_settings: view.group_settings.clone(), + sorts: view.sorts.clone(), + field_settings: view.field_settings.clone(), + created_at: timestamp, + modified_at: timestamp, + ..Default::default() + }) + .collect(); + + CreateDatabaseParams { + database_id, + rows: create_row_params, + fields: data.fields.clone(), + views: create_view_params, + } +} + +async fn duplicate_database( + appflowy_web_metrics: &AppFlowyWebMetrics, + server: Data, + user: RealtimeUser, + collab_storage: Arc, + workspace_id: Uuid, + duplicate_context: &DuplicateContext, + workspace_database: &mut WorkspaceDatabase, +) -> Result<(), AppError> { + let uid = user.uid; + let collab_service = Arc::new(PostgresDatabaseCollabService { + workspace_id, + collab_storage: collab_storage.clone(), + }); + for database_view_id in &duplicate_context.database_view_ids { + let database_id = workspace_database + .get_database_meta_with_view_id(database_view_id) + .ok_or_else(|| { + AppError::Internal(anyhow!("Database view id {} not found", database_view_id)) + })? + .database_id + .clone(); + let database_context = DatabaseContext { + collab_service: collab_service.clone(), + notifier: Default::default(), + }; + let database = Database::open(&database_id, database_context) + .await + .map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to open database: {}", err)))?; + let database_data = database.get_database_data().await; + let params = duplicate_database_data_with_context(duplicate_context, &database_data); + let database_context = DatabaseContext { + collab_service: collab_service.clone(), + notifier: Default::default(), + }; + let duplicated_database = Database::create_with_view(params, database_context) + .await + .map_err(|err| { + AppError::Internal(anyhow::anyhow!("Failed to duplicate database: {}", err)) + })?; + let duplicated_view_ids = duplicated_database + .get_all_database_views_meta() + .iter() + .map(|meta| meta.id.clone()) + .collect_vec(); + let encoded_database = duplicated_database + .encode_database_collabs() + .await + .map_err(|err| { + AppError::Internal(anyhow::anyhow!( + "Failed to encode database collabs: {}", + err + )) + })?; + let mut collab_params_list = vec![]; + collab_params_list.push(CollabParams { + object_id: duplicated_database.get_database_id().clone(), + encoded_collab_v1: encoded_database + .encoded_database_collab + .encoded_collab + .encode_to_bytes()? + .into(), + collab_type: CollabType::Database, + }); + for row in encoded_database.encoded_row_collabs { + collab_params_list.push(CollabParams { + object_id: row.object_id.clone(), + encoded_collab_v1: row.encoded_collab.encode_to_bytes()?.into(), + collab_type: CollabType::DatabaseRow, + }); + } + collab_storage + .batch_insert_new_collab(&workspace_id.to_string(), &uid, collab_params_list) + .await?; + let encoded_update = { + let mut txn = workspace_database.collab.transact_mut(); + workspace_database.body.add_database( + &mut txn, + duplicated_database.object_id(), + duplicated_view_ids, + ); + txn.encode_update_v1() + }; + update_workspace_database_data( + appflowy_web_metrics, + server.clone(), + user.clone(), + workspace_id, + workspace_database.collab.object_id(), + encoded_update, + ) + .await?; + } + Ok(()) +} + +async fn duplicate_document( + collab_storage: Arc, + workspace_id: Uuid, + uid: i64, + duplicate_context: &DuplicateContext, +) -> Result<(), AppError> { + let queries = duplicate_context + .document_view_ids + .iter() + .map(|id| QueryCollab { + object_id: id.clone(), + collab_type: CollabType::Document, + }) + .collect(); + let query_results = collab_storage + .batch_get_collab(&uid, &workspace_id.to_string(), queries, true) + .await; + let mut collab_params_list = vec![]; + for (collab_id, query_result) in query_results { + match query_result { + QueryCollabResult::Success { encode_collab_v1 } => { + let encoded_collab = EncodedCollab::decode_from_bytes(&encode_collab_v1) + .map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to decode collab: {}", err)))?; + let new_collab_id = duplicate_context + .view_id_mapping + .get(&collab_id) + .ok_or_else(|| { + AppError::Internal(anyhow::anyhow!( + "Failed to find new collab id for {}", + collab_id + )) + })?; + let new_collab_param = + duplicate_document_encoded_collab(&collab_id, new_collab_id, encoded_collab)?; + collab_params_list.push(new_collab_param); + }, + QueryCollabResult::Failed { error: _ } => { + tracing::warn!("Failed to read collab {} during duplication", collab_id); + }, + } + } + collab_storage + .batch_insert_new_collab(&workspace_id.to_string(), &uid, collab_params_list) + .await?; + Ok(()) +} + +struct DuplicateContext { + view_id_mapping: HashMap, + duplicated_views: Vec, + database_view_ids: HashSet, + document_view_ids: HashSet, +} + +fn duplicate_views(views: &[View], suffix: &str) -> Result { + let root_parent_id = views + .first() + .ok_or(AppError::Internal(anyhow!( + "No views available for duplication" + )))? + .parent_view_id + .clone(); + let mut view_id_mapping: HashMap = HashMap::new(); + let mut duplicated_views = vec![]; + let mut database_view_ids = HashSet::new(); + let mut document_view_ids = HashSet::new(); + for view in views { + let duplicated_view_id = Uuid::new_v4().to_string(); + view_id_mapping.insert(view.id.clone(), duplicated_view_id); + } + for (index, view) in views.iter().enumerate() { + let orig_parent_view_id = view.parent_view_id.clone(); + let duplicated_parent_view_id = if orig_parent_view_id == root_parent_id { + orig_parent_view_id + } else { + view_id_mapping + .get(&view.parent_view_id) + .cloned() + .ok_or(AppError::Internal(anyhow::anyhow!( + "Failed to find duplicated parent view id {}", + view.parent_view_id + )))? + }; + let mut duplicated_view = view.clone(); + let mut duplicated_children = vec![]; + for child in view.children.items.iter() { + let new_view_id = view_id_mapping + .get(&child.id) + .cloned() + .ok_or(AppError::Internal(anyhow::anyhow!( + "Failed to find duplicated child view id {}", + child.id + )))?; + duplicated_children.push(ViewIdentifier { id: new_view_id }); + } + duplicated_view.id = view_id_mapping + .get(&view.id) + .cloned() + .ok_or(AppError::Internal(anyhow::anyhow!( + "Failed to find duplicated view id {}", + view.id + )))?; + duplicated_view.parent_view_id = duplicated_parent_view_id.clone(); + if index == 0 { + duplicated_view.name = format!("{}{}", duplicated_view.name, suffix); + } + duplicated_view.created_at = timestamp(); + duplicated_view.is_favorite = false; + duplicated_view.last_edited_time = 0; + duplicated_view.children = RepeatedViewIdentifier { + items: duplicated_children, + }; + + duplicated_views.push(duplicated_view); + match &view.layout { + layout if layout.is_document() => { + document_view_ids.insert(view.id.clone()); + }, + layout if layout.is_database() => { + database_view_ids.insert(view.id.clone()); + }, + _ => (), + } + } + Ok(DuplicateContext { + view_id_mapping, + duplicated_views, + database_view_ids, + document_view_ids, + }) +} + +fn duplicate_document_encoded_collab( + orig_object_id: &str, + new_object_id: &str, + encoded_collab: EncodedCollab, +) -> Result { + let collab = collab_from_doc_state(encoded_collab.doc_state.to_vec(), orig_object_id)?; + let document = Document::open(collab).unwrap(); + let data = document.get_document_data().unwrap(); + let duplicated_document = Document::create(new_object_id, data) + .map_err(|err| AppError::Internal(anyhow::anyhow!("Failed to create document: {}", err)))?; + let encoded_collab: EncodedCollab = duplicated_document + .encode_collab_v1(|c| CollabType::Document.validate_require_data(c)) + .map_err(|err| { + AppError::Internal(anyhow::anyhow!("Failed to encode document collab: {}", err)) + })?; + Ok(CollabParams { + object_id: new_object_id.to_string(), + encoded_collab_v1: encoded_collab.encode_to_bytes()?.into(), + collab_type: CollabType::Document, + }) +} diff --git a/src/biz/workspace/mod.rs b/src/biz/workspace/mod.rs index 2249096d2..250f6b09c 100644 --- a/src/biz/workspace/mod.rs +++ b/src/biz/workspace/mod.rs @@ -1,3 +1,4 @@ +pub mod duplicate; pub mod ops; pub mod page_view; pub mod publish; diff --git a/tests/workspace/page_view.rs b/tests/workspace/page_view.rs index 3bde3700b..9d8d14e9c 100644 --- a/tests/workspace/page_view.rs +++ b/tests/workspace/page_view.rs @@ -10,8 +10,8 @@ use collab_folder::{CollabOrigin, Folder}; use serde_json::{json, Value}; use shared_entity::dto::workspace_dto::{ AppendBlockToPageParams, CreatePageDatabaseViewParams, CreatePageParams, CreateSpaceParams, - IconType, MovePageParams, PublishPageParams, SpacePermission, UpdatePageParams, - UpdateSpaceParams, ViewIcon, ViewLayout, + DuplicatePageParams, IconType, MovePageParams, PublishPageParams, SpacePermission, + UpdatePageParams, UpdateSpaceParams, ViewIcon, ViewLayout, }; use tokio::time::sleep; use uuid::Uuid; @@ -951,6 +951,53 @@ async fn publish_page() { assert_eq!(published_view.children.len(), 0); } +#[tokio::test] +async fn duplicate_view() { + let registered_user = generate_unique_registered_user().await; + let mut app_client = TestClient::user_with_new_device(registered_user.clone()).await; + let web_client = TestClient::user_with_new_device(registered_user.clone()).await; + let workspace_id = app_client.workspace_id().await; + app_client.open_workspace_collab(&workspace_id).await; + app_client + .wait_object_sync_complete(&workspace_id) + .await + .unwrap(); + let workspace_uuid = Uuid::parse_str(&workspace_id).unwrap(); + let folder_view = web_client + .api_client + .get_workspace_folder(&workspace_id, Some(2), None) + .await + .unwrap(); + let general_space = &folder_view + .children + .into_iter() + .find(|v| v.name == "General") + .unwrap(); + web_client + .api_client + .duplicate_view_and_children( + workspace_uuid, + &general_space.view_id, + &DuplicatePageParams { + suffix: Some(" (Copy)".to_string()), + }, + ) + .await + .unwrap(); + let folder = get_latest_folder(&app_client, &workspace_id).await; + let duplicated_space_id = folder + .get_view(&workspace_id) + .unwrap() + .children + .iter() + .find(|v| folder.get_view(&v.id).unwrap().name == "General (Copy)") + .unwrap() + .id + .clone(); + let duplicated_views = folder.get_view_recursively(&duplicated_space_id); + assert_eq!(duplicated_views.len(), 6); +} + #[tokio::test] async fn create_database_page_view() { let registered_user = generate_unique_registered_user().await;