diff --git a/MemorialHouse/MHApplication/MHApplication.xcodeproj/project.pbxproj b/MemorialHouse/MHApplication/MHApplication.xcodeproj/project.pbxproj index 0cd07868..10ac92b6 100644 --- a/MemorialHouse/MHApplication/MHApplication.xcodeproj/project.pbxproj +++ b/MemorialHouse/MHApplication/MHApplication.xcodeproj/project.pbxproj @@ -235,8 +235,9 @@ INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "기록소는 사진 권한을 필요로 합니다. 허용 안 함 시 일부 기능이 동작하지 않을 수 있습니다."; INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; + INFOPLIST_KEY_UIUserInterfaceStyle = Light; IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", @@ -272,8 +273,9 @@ INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "기록소는 사진 권한을 필요로 합니다. 허용 안 함 시 일부 기능이 동작하지 않을 수 있습니다."; INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; + INFOPLIST_KEY_UIUserInterfaceStyle = Light; IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", diff --git a/MemorialHouse/MHApplication/MHApplication/Resource/Info.plist b/MemorialHouse/MHApplication/MHApplication/Resource/Info.plist index 0eb786dc..4b6ec74d 100644 --- a/MemorialHouse/MHApplication/MHApplication/Resource/Info.plist +++ b/MemorialHouse/MHApplication/MHApplication/Resource/Info.plist @@ -2,6 +2,8 @@ + UIUserInterfaceStyle + Light UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/MemorialHouse/MHApplication/MHApplication/Source/App/SceneDelegate.swift b/MemorialHouse/MHApplication/MHApplication/Source/App/SceneDelegate.swift index 018b59d1..a0e3c8f8 100644 --- a/MemorialHouse/MHApplication/MHApplication/Source/App/SceneDelegate.swift +++ b/MemorialHouse/MHApplication/MHApplication/Source/App/SceneDelegate.swift @@ -233,6 +233,12 @@ final class SceneDelegate: UIResponder, UIWindowSceneDelegate { DeleteMediaUseCase.self, object: DefaultDeleteMediaUseCase(repository: mediaRepository) ) + + // MARK: - TemporaryStoreMedia UseCase + DIContainer.shared.register( + TemporaryStoreMediaUseCase.self, + object: DefaultTemporaryStoreMediaUseCase(repository: mediaRepository) + ) } private func registerViewModelFactoryDependency() throws { @@ -329,5 +335,14 @@ final class SceneDelegate: UIResponder, UIWindowSceneDelegate { ReadPageViewModelFactory.self, object: ReadPageViewModelFactory(fetchMediaUseCase: fetchMediaUseCase) ) + + // MARK: - CreateMediaViewModel + let temporaryStoreMediaUseCase = try DIContainer.shared.resolve(TemporaryStoreMediaUseCase.self) + DIContainer.shared.register( + CreateAudioViewModelFactory.self, + object: CreateAudioViewModelFactory( + temporaryStoreMediaUseCase: temporaryStoreMediaUseCase + ) + ) } } diff --git a/MemorialHouse/MHData/MHData/LocalStorage/FileManager/MHFileManager.swift b/MemorialHouse/MHData/MHData/LocalStorage/FileManager/MHFileManager.swift index f09c30ed..7a855caf 100644 --- a/MemorialHouse/MHData/MHData/LocalStorage/FileManager/MHFileManager.swift +++ b/MemorialHouse/MHData/MHData/LocalStorage/FileManager/MHFileManager.swift @@ -194,6 +194,22 @@ extension MHFileManager: FileStorage { return .success(originDataPath) } + public func makeDirectory(through path: String) async -> Result { + guard let originDirectory = fileManager.urls( + for: directoryType, + in: .userDomainMask + ).first?.appending(path: path) + else { return .failure(.directorySettingFailure) } + guard ( + try? fileManager.createDirectory( + at: originDirectory, + withIntermediateDirectories: true + ) + ) != nil else { return .failure(.directorySettingFailure)} + + return .success(()) + } + public func getFileNames(at path: String) async -> Result<[String], MHDataError> { guard let originDirectory = fileManager.urls( for: directoryType, diff --git a/MemorialHouse/MHData/MHData/LocalStorage/FileStorage.swift b/MemorialHouse/MHData/MHData/LocalStorage/FileStorage.swift index dd981155..e6a9087f 100644 --- a/MemorialHouse/MHData/MHData/LocalStorage/FileStorage.swift +++ b/MemorialHouse/MHData/MHData/LocalStorage/FileStorage.swift @@ -70,6 +70,8 @@ public protocol FileStorage: Sendable { /// - Returns: 파일 URL을 반환합니다. func getURL(at path: String, fileName name: String) async -> Result + func makeDirectory(through path: String) async -> Result + /// 지정된 경로의 파일 목록을 반환합니다. /// Documents폴더를 기준으로 파일 이름 목록을 반환합니다. /// path는 디렉토리여야 합니다. diff --git a/MemorialHouse/MHData/MHData/Repository/LocalMediaRepository.swift b/MemorialHouse/MHData/MHData/Repository/LocalMediaRepository.swift index d3292f84..62c18233 100644 --- a/MemorialHouse/MHData/MHData/Repository/LocalMediaRepository.swift +++ b/MemorialHouse/MHData/MHData/Repository/LocalMediaRepository.swift @@ -6,7 +6,7 @@ import AVFoundation public struct LocalMediaRepository: MediaRepository, Sendable { private let storage: FileStorage - private let temporaryPath = "temp" // TODO: - 지워질 것임! + private let temporaryPath = "temporary" private let snapshotFileName = ".snapshot" public init(storage: FileStorage) { @@ -68,9 +68,9 @@ public struct LocalMediaRepository: MediaRepository, Sendable { to bookID: UUID ) async -> Result { let path = bookID.uuidString - let fileName = mediaDescription.id.uuidString + let fileName = fileName(of: mediaDescription) - return await storage.move(at: "temp", fileName: fileName, to: path) + return await storage.move(at: temporaryPath, fileName: fileName, to: path) } public func getURL( @@ -85,6 +85,10 @@ public struct LocalMediaRepository: MediaRepository, Sendable { return await storage.getURL(at: path, fileName: fileName) } + public func makeTemporaryDirectory() async -> Result { + return await storage.makeDirectory(through: temporaryPath) + } + public func moveAllTemporaryMedia(to bookID: UUID) async -> Result { let path = bookID.uuidString diff --git a/MemorialHouse/MHDomain/MHDomain/Repository/MediaRepository.swift b/MemorialHouse/MHDomain/MHDomain/Repository/MediaRepository.swift index ca64badd..5dccad0a 100644 --- a/MemorialHouse/MHDomain/MHDomain/Repository/MediaRepository.swift +++ b/MemorialHouse/MHDomain/MHDomain/Repository/MediaRepository.swift @@ -6,6 +6,7 @@ public protocol MediaRepository: Sendable { func create(media mediaDescription: MediaDescription, from: URL, to bookID: UUID?) async -> Result func fetch(media mediaDescription: MediaDescription, from bookID: UUID?) async -> Result func getURL(media mediaDescription: MediaDescription, from bookID: UUID?) async -> Result + func makeTemporaryDirectory() async -> Result func delete(media mediaDescription: MediaDescription, at bookID: UUID?) async -> Result func moveTemporaryMedia(_ mediaDescription: MediaDescription, to bookID: UUID) async -> Result func moveAllTemporaryMedia(to bookID: UUID) async -> Result diff --git a/MemorialHouse/MHDomain/MHDomain/UseCase/DefaultMediaUseCase.swift b/MemorialHouse/MHDomain/MHDomain/UseCase/DefaultMediaUseCase.swift index 3aab7d18..eb6f5f12 100644 --- a/MemorialHouse/MHDomain/MHDomain/UseCase/DefaultMediaUseCase.swift +++ b/MemorialHouse/MHDomain/MHDomain/UseCase/DefaultMediaUseCase.swift @@ -57,6 +57,7 @@ public struct DefaultDeleteMediaUseCase: DeleteMediaUseCase { } public struct DefaultPersistentlyStoreMediaUseCase: PersistentlyStoreMediaUseCase { + // MARK: - Property let repository: MediaRepository @@ -76,4 +77,25 @@ public struct DefaultPersistentlyStoreMediaUseCase: PersistentlyStoreMediaUseCas try await repository.deleteMediaBySnapshot(for: bookID).get() } + + public func excute(media: MediaDescription, to bookID: UUID) async throws { + try await repository.moveTemporaryMedia(media, to: bookID).get() + } + +} + +public struct DefaultTemporaryStoreMediaUseCase: TemporaryStoreMediaUseCase { + // MARK: - Property + let repository: MediaRepository + + // MARK: - Initializer + public init(repository: MediaRepository) { + self.repository = repository + } + + // MARK: - Method + public func execute(media: MediaDescription) async throws -> URL { + try await repository.makeTemporaryDirectory().get() + return try await repository.getURL(media: media, from: nil).get() + } } diff --git a/MemorialHouse/MHDomain/MHDomain/UseCase/Interface/MediaUseCase.swift b/MemorialHouse/MHDomain/MHDomain/UseCase/Interface/MediaUseCase.swift index cb434449..53615b00 100644 --- a/MemorialHouse/MHDomain/MHDomain/UseCase/Interface/MediaUseCase.swift +++ b/MemorialHouse/MHDomain/MHDomain/UseCase/Interface/MediaUseCase.swift @@ -20,4 +20,10 @@ public protocol PersistentlyStoreMediaUseCase: Sendable { /// mediaList가 없을 경우 현재 디렉토리의 스냅샷 기준으로 저장합니다. /// mediaList가 있을 경우 해당 목록을 기준으로 저장합니다. func execute(to bookID: UUID, mediaList: [MediaDescription]?) async throws + + func excute(media: MediaDescription, to bookID: UUID) async throws +} + +public protocol TemporaryStoreMediaUseCase: Sendable { + func execute(media: MediaDescription) async throws -> URL } diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewController.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/View/CreateAudioViewController.swift similarity index 83% rename from MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewController.swift rename to MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/View/CreateAudioViewController.swift index 5894df10..c7b7789e 100644 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewController.swift +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/View/CreateAudioViewController.swift @@ -11,7 +11,6 @@ final class CreateAudioViewController: UIViewController { private var cancellables = Set() // auido private var audioRecorder: AVAudioRecorder? - private var isRecording = false // auido metering private var upBarLayers: [CALayer] = [] private var downBarLayers: [CALayer] = [] @@ -29,8 +28,6 @@ final class CreateAudioViewController: UIViewController { AVNumberOfChannelsKey: 2, AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue ] - // UUID - private let identifier: UUID = UUID() // MARK: - UI Component // title and buttons @@ -101,7 +98,8 @@ final class CreateAudioViewController: UIViewController { } required init?(coder: NSCoder) { - self.viewModel = CreateAudioViewModel() + guard let viewModelFactory = try? DIContainer.shared.resolve(CreateAudioViewModelFactory.self) else { return nil } + self.viewModel = viewModelFactory.make { _ in } super.init(nibName: nil, bundle: nil) } @@ -111,17 +109,13 @@ final class CreateAudioViewController: UIViewController { setup() bind() - configureAudioSession() configureAddSubviews() configureConstraints() configureAddActions() + input.send(.viewDidLoad) } - override func viewDidDisappear(_ animated: Bool) { - self.input.send(.viewDidDisappear) - } - - // MARK: - setup + // MARK: - Setup private func setup() { view.backgroundColor = .white setupBars() @@ -157,54 +151,29 @@ final class CreateAudioViewController: UIViewController { } } - private func requestMicrophonePermission() { - AVAudioSession.sharedInstance().requestRecordPermission { @Sendable granted in - if !granted { - Task { @MainActor in - let alert = UIAlertController( - title: "마이크 권한 필요", - message: "설정에서 마이크 권한을 허용해주세요.", - preferredStyle: .alert - ) - alert.addAction(UIAlertAction(title: "OK", style: .default)) - self.present(alert, animated: true, completion: nil) - } - } - } - } - // MARK: - bind private func bind() { let output = viewModel?.transform(input: input.eraseToAnyPublisher()) - output?.sink(receiveValue: { [weak self] event in - switch event { - case .updatedAudioFileURL: - // TODO: - update audio file url - MHLogger.debug("updated audio file URL") - case .savedAudioFile: - // TODO: - show audio player - MHLogger.debug("saved audio file") - case .deleteTemporaryAudioFile: - // TODO: - delete temporary audio file - MHLogger.debug("delete temporary audio file") - case .audioStart: - self?.startRecording() - case .audioStop: - self?.stopRecording() - } - }).store(in: &cancellables) + output?.receive(on: DispatchQueue.main) + .sink(receiveValue: { [weak self] event in + switch event { + case let .audioFileURL(url): + self?.configureAudioSession(for: url) + case .audioStart: + self?.startRecording() + case .audioStop: + self?.stopRecording() + case .recordCompleted: + self?.dismiss(animated: true) + } + }).store(in: &cancellables) } - // MARK: - configure - - private func configureAudioSession() { - let fileName = "\(identifier).m4a" - let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] - let audioFileURL = documentDirectory.appendingPathComponent(fileName) - + // MARK: - Configuration + private func configureAudioSession(for url: URL) { try? audioSession.setCategory(.record, mode: .default) - audioRecorder = try? AVAudioRecorder(url: audioFileURL, settings: audioRecordersettings) + audioRecorder = try? AVAudioRecorder(url: url, settings: audioRecordersettings) audioRecorder?.isMeteringEnabled = true } @@ -263,6 +232,57 @@ final class CreateAudioViewController: UIViewController { timeTextLabel.setWidthAndHeight(width: 60, height: 16) } + private func configureAddActions() { + addTappedEventToAudioButton() + addTappedEventToCancelButton() + addTappedEventToSaveButton() + } + + private func addTappedEventToAudioButton() { + audioButton.addAction( + UIAction { [weak self] _ in + self?.input.send(.audioButtonTapped) + }, for: .touchUpInside + ) + } + + private func addTappedEventToCancelButton() { + cancelButton.addAction( + UIAction { [weak self] _ in + self?.input.send(.recordCancelled) + }, for: .touchUpInside + ) + } + + private func addTappedEventToSaveButton() { + saveButton.addAction( + UIAction { [weak self] _ in + self?.input.send(.saveButtonTapped) + }, for: .touchUpInside + ) + } + + // MARK: - Helper + private func requestMicrophonePermission() { + let alert = UIAlertController( + title: "마이크 권한 필요", + message: "설정에서 마이크 권한을 허용해주세요.", + preferredStyle: .alert + ) + alert.addAction(UIAlertAction(title: "OK", style: .default) { [weak self] _ in + self?.dismiss(animated: true) + }) + Task { + AVAudioSession.sharedInstance().requestRecordPermission { @Sendable granted in + Task { @MainActor in + if !granted { + self.present(alert, animated: true, completion: nil) + } + } + } + } + } + private func startRecording() { try? audioSession.setActive(true) @@ -356,29 +376,4 @@ final class CreateAudioViewController: UIViewController { let seconds = recordingSeconds % 60 timeTextLabel.text = String(format: "%02d:%02d", minutes, seconds) } - - private func configureAddActions() { - addTappedEventToAudioButton() - addTappedEventToCancelButton() - addTappedEventToSaveButton() - } - - private func addTappedEventToAudioButton() { - audioButton.addAction(UIAction { [weak self] _ in - self?.input.send(.audioButtonTapped) - }, for: .touchUpInside) - } - private func addTappedEventToCancelButton() { - cancelButton.addAction( - UIAction { [weak self]_ in - self?.dismiss(animated: true) - }, - for: .touchUpInside) - } - private func addTappedEventToSaveButton() { - saveButton.addAction(UIAction { _ in - self.input.send(.saveButtonTapped) - self.dismiss(animated: true) - }, for: .touchUpInside) - } } diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModel.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModel.swift new file mode 100644 index 00000000..d392feac --- /dev/null +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModel.swift @@ -0,0 +1,86 @@ +import Foundation +import Combine +import MHCore +import MHDomain + +public final class CreateAudioViewModel: ViewModelType { + // MARK: - Type + enum Input { + case viewDidLoad + case audioButtonTapped + case saveButtonTapped + case recordCancelled + } + enum Output { + case audioFileURL(url: URL) + case audioStart + case audioStop + case recordCompleted + } + + // MARK: - Property + private let output = PassthroughSubject() + private var cancellables = Set() + private var audioIsRecoding: Bool = false + private let completion: (MediaDescription?) -> Void + private let temporaryStoreMediaUsecase: TemporaryStoreMediaUseCase + private var mediaDescription: MediaDescription? + + // MARK: - Initializer + init( + temporaryStoreMediaUsecase: TemporaryStoreMediaUseCase, + completion: @escaping (MediaDescription?) -> Void + ) { + self.temporaryStoreMediaUsecase = temporaryStoreMediaUsecase + self.completion = completion + } + + // MARK: - Method + func transform(input: AnyPublisher) -> AnyPublisher { + input.sink { [weak self] event in + switch event { + case .viewDidLoad: + Task { await self?.viewDidLoad() } + case .audioButtonTapped: + self?.audioButtonTapped() + case .saveButtonTapped: + self?.completeRecord(withCompletion: true) + case .recordCancelled: + self?.completeRecord(withCompletion: false) + } + }.store(in: &cancellables) + + return output.eraseToAnyPublisher() + } + + // MARK: - Helper + private func viewDidLoad() async { + let mediaDescription = MediaDescription(type: .audio) + self.mediaDescription = mediaDescription + do { + let url = try await temporaryStoreMediaUsecase.execute(media: mediaDescription) + output.send(.audioFileURL(url: url)) + } catch { + MHLogger.error("Error in store audio file url: \(error.localizedDescription)") + completion(nil) + output.send(.recordCompleted) + } + } + private func audioButtonTapped() { + switch audioIsRecoding { + case false: + output.send(.audioStart) + case true: + output.send(.audioStop) + } + audioIsRecoding.toggle() + } + + private func completeRecord(withCompletion: Bool) { + if audioIsRecoding { + output.send(.audioStop) + } + output.send(.recordCompleted) + completion(mediaDescription) + } +} diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModelFactory.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModelFactory.swift new file mode 100644 index 00000000..0e6c6f16 --- /dev/null +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Audio/ViewModel/CreateAudioViewModelFactory.swift @@ -0,0 +1,18 @@ +import MHFoundation +import MHDomain + +public struct CreateAudioViewModelFactory { + private let temporaryStoreMediaUseCase: TemporaryStoreMediaUseCase + + public init(temporaryStoreMediaUseCase: TemporaryStoreMediaUseCase) { + self.temporaryStoreMediaUseCase = temporaryStoreMediaUseCase + } + + public func make(completion: @escaping (MediaDescription?) -> Void) -> CreateAudioViewModel { + CreateAudioViewModel( + temporaryStoreMediaUsecase: temporaryStoreMediaUseCase, + completion: completion + ) + } +} + diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewModel.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewModel.swift deleted file mode 100644 index 21859780..00000000 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/CreateAudioViewModel.swift +++ /dev/null @@ -1,69 +0,0 @@ -import Foundation -import Combine -import MHCore - -public final class CreateAudioViewModel: ViewModelType { - enum Input { - case audioSessionOpened(url: URL?) - case audioButtonTapped - case saveButtonTapped - case viewDidDisappear - } - enum Output { - case updatedAudioFileURL - case audioStart - case audioStop - case savedAudioFile - case deleteTemporaryAudioFile - } - - private let output = PassthroughSubject() - private var cancellables = Set() - private var identifier: UUID? - private var audioTemporaryFileURL: URL? - private var audioIsRecoding: Bool = false - - func transform(input: AnyPublisher) -> AnyPublisher { - input.sink { [weak self] event in - switch event { - case .audioSessionOpened(let url): - self?.updateURL(url: url) - case .audioButtonTapped: - self?.audioButtonTapped() - case .saveButtonTapped: - self?.saveAudioFile() - case .viewDidDisappear: - self?.deleteAudioTemporaryFile() - } - }.store(in: &cancellables) - - return output.eraseToAnyPublisher() - } - - private func updateURL(url: URL?) { - self.audioTemporaryFileURL = url - output.send(.updatedAudioFileURL) - } - - private func audioButtonTapped() { - MHLogger.debug("audio button tapped in view model") - switch audioIsRecoding { - case false: - output.send(.audioStart) - case true: - output.send(.audioStop) - } - audioIsRecoding.toggle() - } - - private func saveAudioFile() { - // TODO: - save audio file in the file system - output.send(.savedAudioFile) - } - - private func deleteAudioTemporaryFile() { - guard let audioTemporaryFileURL else { return } - try? FileManager.default.removeItem(at: audioTemporaryFileURL) - output.send(.deleteTemporaryAudioFile) - } -} diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/Enum/AudioPlayState.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/Enum/AudioPlayState.swift new file mode 100644 index 00000000..05b18a6b --- /dev/null +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/Enum/AudioPlayState.swift @@ -0,0 +1,4 @@ +enum AudioPlayState { + case play + case pause +} diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/View/MHAudioPlayerView.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/View/MHAudioPlayerView.swift new file mode 100644 index 00000000..dfed73de --- /dev/null +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/View/MHAudioPlayerView.swift @@ -0,0 +1,221 @@ +import UIKit +import Combine +import AVFAudio +import MHCore +import MHDomain + +final class MHAudioPlayerView: UIView { + // MARK: - Property + // data bind + private var viewModel: AudioPlayerViewModel? + private let input = PassthroughSubject() + private var cancellables = Set() + // audio + private nonisolated(unsafe) var audioPlayer: AVAudioPlayer? + private var audioPlayState: AudioPlayState = .pause { + didSet { + switch audioPlayState { + case .play: + startTimer() + case .pause: + stopTimer() + } + } + } + private var timer: Timer? + + // MARK: - ViewComponent + private let backgroundBorderView: UIView = { + let backgroundBorderView = UIView() + backgroundBorderView.backgroundColor = .baseBackground + backgroundBorderView.layer.borderWidth = 3 + backgroundBorderView.layer.cornerRadius = 25 + backgroundBorderView.layer.borderColor = UIColor.captionPlaceHolder.cgColor + + return backgroundBorderView + }() + private let audioProgressView: UIView = { + let backgroundView = UIView() + backgroundView.backgroundColor = .mhPink + backgroundView.layer.cornerRadius = 21 + + return backgroundView + }() + private var progressViewWidthConstraint: NSLayoutConstraint? + private var progressViewConstraints: [NSLayoutConstraint] = [] + private let audioStateButton: UIButton = { + let button = UIButton() + button.setImage(UIImage(systemName: "play.fill"), for: .normal) + return button + }() + private let playImage = UIImage(systemName: "play.fill") + private let pauseImage = UIImage(systemName: "pause.fill") + private let audioPlayTimeLabel: UILabel = { + let label = UILabel() + label.text = "00:00" + label.font = UIFont.ownglyphBerry(size: 21) + label.textAlignment = .left + label.textColor = .dividedLine + + return label + }() + + public override init(frame: CGRect) { + super.init(frame: frame) + + setup() + bind() + configureAddSubview() + configureContstraints() + configureAddActions() + } + + public required init?(coder: NSCoder) { + super.init(frame: .zero) + } + + // MARK: - setup + private func setup() { + backgroundColor = .baseBackground + let audioSession = AVAudioSession.sharedInstance() + try? audioSession.setCategory(.playback, mode: .default, options: []) + try? audioSession.setActive(true) + } + + // MARK: - bind + private func bind() { + let output = viewModel?.transform(input: input.eraseToAnyPublisher()) + output?.sink(receiveValue: { [weak self] event in + switch event { + case .getAudioState(let state): + self?.updateAudioPlayImage(audioPlayState: state) + } + }).store(in: &cancellables) + } + + private func configureAddSubview() { + addSubview(backgroundBorderView) + addSubview(audioProgressView) + addSubview(audioStateButton) + addSubview(audioPlayTimeLabel) + } + + private func configureContstraints() { + backgroundBorderView.setAnchor( + top: topAnchor, constantTop: 25, + leading: leadingAnchor, + bottom: bottomAnchor, constantBottom: 25, + trailing: trailingAnchor + ) + + audioProgressView.translatesAutoresizingMaskIntoConstraints = false + progressViewWidthConstraint = audioProgressView.widthAnchor.constraint(equalToConstant: 290) + NSLayoutConstraint.activate([ + audioProgressView.topAnchor.constraint(equalTo: topAnchor, constant: 29), + audioProgressView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 4), + audioProgressView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -29), + progressViewWidthConstraint ?? audioProgressView.widthAnchor.constraint(equalToConstant: 0) + ]) + + audioStateButton.setAnchor( + top: topAnchor, constantTop: 25, + leading: leadingAnchor, + bottom: bottomAnchor, constantBottom: 25, + width: 50 + ) + + audioPlayTimeLabel.setAnchor( + top: topAnchor, constantTop: 25, + leading: audioStateButton.trailingAnchor, + bottom: bottomAnchor, constantBottom: 25, + width: 100 + ) + } + + private func configureAddActions() { + audioStateButton.addAction(UIAction { [weak self] _ in + guard let audioPlayState = self?.audioPlayState else { return } + self?.updateAudioPlayImage(audioPlayState: audioPlayState) + + self?.input.send(.audioStateButtonTapped) + }, for: .touchUpInside) + } + + private func updateAudioPlayImage(audioPlayState state: AudioPlayState) { + switch state { + case .play: + audioStateButton.setImage(playImage, for: .normal) + audioPlayer?.pause() + audioPlayState = .pause + case .pause: + audioStateButton.setImage(pauseImage, for: .normal) + audioPlayer?.play() + audioPlayState = .play + } + } + + private func updatePlayAudioProgress() { + guard let audioPlayer else { return } + let width = ceil(Float(audioPlayer.currentTime) / Float(audioPlayer.duration) * Float(299)) + + progressViewWidthConstraint?.constant = CGFloat(width) + self.layoutIfNeeded() + } + + private func startTimer() { + timer?.invalidate() + timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in + guard let audioPlayer = self?.audioPlayer else { return } + Task { @MainActor in + if audioPlayer.isPlaying { + self?.updatePlayAudioProgress() + self?.setTimeLabel(seconds: Int(audioPlayer.currentTime)) + } + } + } + } + + private func stopTimer() { + timer?.invalidate() + timer = nil + } + + private func setTimeLabel(seconds recordingSeconds: Int?) { + guard let recordingSeconds = recordingSeconds else { return } + let minutes = recordingSeconds / 60 + let seconds = recordingSeconds % 60 + audioPlayTimeLabel.text = String(format: "%02d:%02d", minutes, seconds) + } +} + +extension MHAudioPlayerView: AVAudioPlayerDelegate { + nonisolated func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) { + Task { @MainActor in + self.audioPlayState = .pause + self.audioStateButton.setImage(playImage, for: .normal) + + stopTimer() + + progressViewWidthConstraint?.constant = CGFloat(290) + UIView.animate(withDuration: 0) { + self.layoutIfNeeded() + } + } + } +} + +extension MHAudioPlayerView: @preconcurrency MediaAttachable { + func configureSource(with mediaDescription: MediaDescription, data: Data) { + audioPlayer = try? AVAudioPlayer(data: data) + guard let audioPlayer else { return } + audioPlayer.delegate = self + self.setTimeLabel(seconds: Int(audioPlayer.duration.rounded())) + } + + func configureSource(with mediaDescription: MediaDescription, url: URL) { + audioPlayer = try? AVAudioPlayer(contentsOf: url) + guard let audioPlayer else { return } + audioPlayer.delegate = self + self.setTimeLabel(seconds: Int(audioPlayer.duration.rounded())) + } +} diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/ViewModel/AudioPlayerViewModel.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/ViewModel/AudioPlayerViewModel.swift new file mode 100644 index 00000000..f59f4832 --- /dev/null +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/Audio/Player/ViewModel/AudioPlayerViewModel.swift @@ -0,0 +1,38 @@ +import MHFoundation +import Combine + +final public class AudioPlayerViewModel: ViewModelType { + enum Input { + case audioStateButtonTapped + } + enum Output { + case getAudioState(AudioPlayState) + } + + private let output = PassthroughSubject() + private var cancellables = Set() + private var audioPlayState: AudioPlayState = .pause + + func transform(input: AnyPublisher) -> AnyPublisher { + input.sink { [weak self] event in + switch event { + case .audioStateButtonTapped: + self?.audioStateChanged() + } + }.store(in: &cancellables) + + return output.eraseToAnyPublisher() + } + + private func audioStateChanged() { + switch audioPlayState { + case .pause: + audioPlayState = .play + output.send(.getAudioState(.play)) + case .play: + audioPlayState = .pause + output.send(.getAudioState(.pause)) + } + return + } +} diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditBookViewController.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditBookViewController.swift index 3f7faaad..e73f9a37 100644 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditBookViewController.swift +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditBookViewController.swift @@ -286,7 +286,20 @@ final class EditBookViewController: UIViewController { addVideoButton.addAction(addVideoAction, for: .touchUpInside) let addAudioAction = UIAction { [weak self] _ in - // TODO: - 오디오 추가 로직 + guard let self else { return } + guard let audioViewModelFactory = try? DIContainer.shared.resolve(CreateAudioViewModelFactory.self) else { return } + let audioViewModel = audioViewModelFactory.make { [weak self] mediaDescription in + guard let mediaDescription else { return } + self?.input.send(.didAddMediaInTemporary(media: mediaDescription)) + } + let audioViewController = CreateAudioViewController(viewModel: audioViewModel) + + if let sheet = audioViewController.sheetPresentationController { + sheet.detents = [.custom { detent in 0.35 * detent.maximumDetentValue }] + sheet.prefersGrabberVisible = true + } + + present(audioViewController, animated: true) } addAudioButton.addAction(addAudioAction, for: .touchUpInside) diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditPageCell.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditPageCell.swift index c46c7b2a..1588f70f 100644 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditPageCell.swift +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/View/EditPageCell.swift @@ -144,6 +144,12 @@ final class EditPageCell: UITableViewCell { description: description ) input.send(.didRequestMediaDataForURL(media: description)) + case .audio: + mediaAttachment = MediaAttachment( + view: MHAudioPlayerView(), + description: description + ) + input.send(.didRequestMediaDataForURL(media: description)) default: break } @@ -180,9 +186,8 @@ final class EditPageCell: UITableViewCell { description: media ) case .audio: - // TODO: - audio 추가 필요 attachment = MediaAttachment( - view: MHPolaroidPhotoView(), + view: MHAudioPlayerView(), description: media ) default: @@ -207,9 +212,8 @@ final class EditPageCell: UITableViewCell { description: media ) case .audio: - // TODO: - audio 추가 필요 attachment = MediaAttachment( - view: MHPolaroidPhotoView(), + view: MHAudioPlayerView(), description: media ) default: diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/ViewModel/EditBookViewModel.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/ViewModel/EditBookViewModel.swift index 1f8ff43b..ed8d9141 100644 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/ViewModel/EditBookViewModel.swift +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/EditBook/ViewModel/EditBookViewModel.swift @@ -7,6 +7,7 @@ final class EditBookViewModel: ViewModelType { // MARK: - Type enum Input { case viewDidLoad + case didAddMediaInTemporary(media: MediaDescription) case didAddMediaWithData(type: MediaType, attributes: [String: any Sendable]?, data: Data) case didAddMediaInURL(type: MediaType, attributes: [String: any Sendable]?, url: URL) case addPageButtonTapped @@ -59,6 +60,8 @@ final class EditBookViewModel: ViewModelType { switch event { case .viewDidLoad: Task { await self?.fetchBook() } + case let .didAddMediaInTemporary(media): + Task { await self?.addMedia(media) } case let .didAddMediaWithData(type, attributes, data): Task { await self?.addMedia(type: type, attributes: attributes, with: data) } case let .didAddMediaInURL(type, attributes, url): @@ -117,7 +120,16 @@ final class EditBookViewModel: ViewModelType { MHLogger.error(error.localizedDescription + #function) } } - + private func addMedia(_ description: MediaDescription) async { + do { + try await storeMediaUseCase.excute(media: description, to: bookID) + let url: URL = try await fetchMediaUseCase.execute(media: description, in: bookID) + editPageViewModels[currentPageIndex].addMedia(media: description, url: url) + } catch { + output.send(.error(message: "미디어를 추가하는데 실패했습니다.")) + MHLogger.error(error.localizedDescription + #function) + } + } private func addEmptyPage() { let editPageViewModel = EditPageViewModel( fetchMediaUseCase: fetchMediaUseCase, diff --git a/MemorialHouse/MHPresentation/MHPresentation/Source/ReadPage/View/ReadPageViewController.swift b/MemorialHouse/MHPresentation/MHPresentation/Source/ReadPage/View/ReadPageViewController.swift index df521286..395771bf 100644 --- a/MemorialHouse/MHPresentation/MHPresentation/Source/ReadPage/View/ReadPageViewController.swift +++ b/MemorialHouse/MHPresentation/MHPresentation/Source/ReadPage/View/ReadPageViewController.swift @@ -105,7 +105,9 @@ final class ReadPageViewController: UIViewController { attachmentMetaData: [Int: MediaDescription] ) -> NSAttributedString { let mutableAttributedString = NSMutableAttributedString(string: text) - attachmentMetaData.forEach { location, description in + attachmentMetaData.forEach { + location, + description in // TODO: - MediaType 별로 바꿔줘야함 var mediaAttachment: MediaAttachment? switch description.type { @@ -121,6 +123,12 @@ final class ReadPageViewController: UIViewController { description: description ) input.send(.didRequestMediaDataForURL(media: description)) + case .audio: + mediaAttachment = MediaAttachment( + view: MHAudioPlayerView(), + description: description + ) + input.send(.didRequestMediaDataForURL(media: description)) default: break }