diff --git a/.github/workflows/all-dev-tn-new.yml b/.github/workflows/all-dev-tn-new.yml index 97a56fc7..234eb2f2 100644 --- a/.github/workflows/all-dev-tn-new.yml +++ b/.github/workflows/all-dev-tn-new.yml @@ -3,7 +3,7 @@ name: ALL new tn dev Deployment on: push: branches: - - all-1.3-tn-dev-hotfix + - all-1.3-feedback-change jobs: deploy: diff --git a/.github/workflows/all-dev-tn.yml b/.github/workflows/all-dev-tn.yml index 704ff20f..33c477cb 100644 --- a/.github/workflows/all-dev-tn.yml +++ b/.github/workflows/all-dev-tn.yml @@ -3,7 +3,7 @@ name: ALL tn dev Deployment on: push: branches: - - all-1.2-tn-dev + - all-1.3.1-dev jobs: deploy: diff --git a/.gitignore b/.gitignore index bf1a9da0..52c9b6e8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ lerna-debug.log* .env #build build - +*.env* # Diagnostic reports (https://nodejs.org/api/report.html) report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json diff --git a/src/App.js b/src/App.js index 76eac97c..037205a8 100644 --- a/src/App.js +++ b/src/App.js @@ -6,7 +6,7 @@ import FingerprintJS from "@fingerprintjs/fingerprintjs"; import routes from "./routes"; import { AppContent } from "./views"; import theme from "./assets/styles/theme"; -import { initialize } from "./services/telementryService"; +import { initialize, end } from "./services/telementryService"; import { startEvent } from "./services/callTelemetryIntract"; import "@project-sunbird/telemetry-sdk/index.js"; import { getParameter } from "./utils/constants"; @@ -72,6 +72,20 @@ const App = () => { setFp(); }, []); + useEffect(() => { + const handleBeforeUnload = (event) => { + window.telemetry && window.telemetry.syncEvents && window.telemetry.syncEvents(); + }; + + // Add the event listener + window.addEventListener("beforeunload", handleBeforeUnload); + + // Cleanup the event listener on component unmount + return () => { + window.removeEventListener("beforeunload", handleBeforeUnload); + }; + }, []); + useEffect(() => { let virtualId; diff --git a/src/components/Assesment/Assesment.jsx b/src/components/Assesment/Assesment.jsx index 36a7598f..593bbea4 100644 --- a/src/components/Assesment/Assesment.jsx +++ b/src/components/Assesment/Assesment.jsx @@ -598,10 +598,12 @@ const Assesment = ({ discoverStart }) => { } localStorage.setItem("lang", lang || "ta"); - const getPointersDetails = await axios.get( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${usernameDetails?.data?.result?.virtualID}/${session_id}?language=${lang}` - ); - setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); + if (process.env.REACT_APP_IS_APP_IFRAME !== "true" && localStorage.getItem("contentSessionId") !== null) { + const getPointersDetails = await axios.get( + `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${usernameDetails?.data?.result?.virtualID}/${session_id}?language=${lang}` + ); + setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); + } dispatch(setVirtualId(usernameDetails?.data?.result?.virtualID)); })(); @@ -635,7 +637,7 @@ const Assesment = ({ discoverStart }) => { localStorage.setItem("sessionId", sessionId); } - if (virtualId) { + if (process.env.REACT_APP_IS_APP_IFRAME !== "true" && virtualId && localStorage.getItem("contentSessionId") !== null) { const getPointersDetails = await axios.get( `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${virtualId}/${sessionId}?language=${lang}` ); diff --git a/src/components/AssesmentEnd/AssesmentEnd.jsx b/src/components/AssesmentEnd/AssesmentEnd.jsx index e3b1001a..77a0be56 100644 --- a/src/components/AssesmentEnd/AssesmentEnd.jsx +++ b/src/components/AssesmentEnd/AssesmentEnd.jsx @@ -16,19 +16,23 @@ import { useEffect, useState } from "react"; import LevelCompleteAudio from "../../assets/audio/levelComplete.wav"; import { ProfileHeader } from "../Assesment/Assesment"; import desktopLevel5 from "../../assets/images/assesmentComplete.png"; -import config from '../../utils/urlConstants.json'; +import config from "../../utils/urlConstants.json"; import { uniqueId } from "../../services/utilService"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; const AssesmentEnd = () => { const [shake, setShake] = useState(true); const [level, setLevel] = useState(""); const [previousLevel, setPreviousLevel] = useState(""); const [points, setPoints] = useState(0); + const levelCompleteAudioSrc = usePreloadAudio(LevelCompleteAudio); useEffect(() => { (async () => { - let audio = new Audio(LevelCompleteAudio); - audio.play(); + if (levelCompleteAudioSrc) { + let audio = new Audio(levelCompleteAudioSrc); + audio.play(); + } const virtualId = getLocalData("virtualId"); const lang = getLocalData("lang"); const previous_level = getLocalData("previous_level"); @@ -40,19 +44,21 @@ const AssesmentEnd = () => { setLevel(data.data.milestone_level); setLocalData("userLevel", data.data.milestone_level?.replace("m", "")); let sessionId = getLocalData("sessionId"); - if (!sessionId){ + if (!sessionId) { sessionId = uniqueId(); - localStorage.setItem("sessionId", sessionId) + localStorage.setItem("sessionId", sessionId); + } + if (process.env.REACT_APP_IS_APP_IFRAME !== "true" && localStorage.getItem("contentSessionId") !== null) { + const getPointersDetails = await axios.get( + `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${virtualId}/${sessionId}?language=${lang}` + ); + setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); } - const getPointersDetails = await axios.get( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${virtualId}/${sessionId}?language=${lang}` - ); - setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); })(); setTimeout(() => { setShake(false); }, 4000); - }, []); + }, [levelCompleteAudioSrc]); const navigate = useNavigate(); let newLevel = level.replace("m", ""); diff --git a/src/components/DiscoverEnd/DiscoverEnd.jsx b/src/components/DiscoverEnd/DiscoverEnd.jsx index 81a96cb3..4769d97c 100644 --- a/src/components/DiscoverEnd/DiscoverEnd.jsx +++ b/src/components/DiscoverEnd/DiscoverEnd.jsx @@ -8,12 +8,9 @@ import back from "../../assets/images/back-arrow.svg"; import discoverEndLeft from "../../assets/images/discover-end-left.svg"; import discoverEndRight from "../../assets/images/discover-end-right.svg"; import textureImage from "../../assets/images/textureImage.png"; -import { - LetsStart, - getLocalData, - setLocalData, -} from "../../utils/constants"; -import config from '../../utils/urlConstants.json'; +import { LetsStart, getLocalData, setLocalData } from "../../utils/constants"; +import config from "../../utils/urlConstants.json"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; const sectionStyle = { backgroundImage: `url(${textureImage})`, @@ -32,12 +29,14 @@ const sectionStyle = { const SpeakSentenceComponent = () => { const [shake, setShake] = useState(true); const [level, setLevel] = useState(""); + const levelCompleteAudioSrc = usePreloadAudio(LevelCompleteAudio); useEffect(() => { - (async () => { - let audio = new Audio(LevelCompleteAudio); - audio.play(); + if (levelCompleteAudioSrc) { + let audio = new Audio(levelCompleteAudioSrc); + audio.play(); + } const virtualId = getLocalData("virtualId"); const lang = getLocalData("lang"); const getMilestoneDetails = await axios.get( @@ -50,14 +49,14 @@ const SpeakSentenceComponent = () => { setTimeout(() => { setShake(false); }, 4000); - }, []); + }, [levelCompleteAudioSrc]); const handleProfileBack = () => { try { - if (process.env.REACT_APP_IS_APP_IFRAME === 'true') { - navigate("/") + if (process.env.REACT_APP_IS_APP_IFRAME === "true") { + navigate("/"); } else { - navigate("/discover-start") + navigate("/discover-start"); } } catch (error) { console.error("Error posting message:", error); @@ -132,7 +131,7 @@ const SpeakSentenceComponent = () => { handleProfileBack()} + onClick={handleProfileBack} sx={{ display: "flex", justifyContent: "center", diff --git a/src/components/DiscoverSentance/DiscoverSentance.jsx b/src/components/DiscoverSentance/DiscoverSentance.jsx index 9b574271..3f6ee8b0 100644 --- a/src/components/DiscoverSentance/DiscoverSentance.jsx +++ b/src/components/DiscoverSentance/DiscoverSentance.jsx @@ -14,6 +14,7 @@ import LevelCompleteAudio from "../../assets/audio/levelComplete.wav"; import config from "../../utils/urlConstants.json"; import { MessageDialog } from "../Assesment/Assesment"; import { Log } from "../../services/telementryService"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; const SpeakSentenceComponent = () => { const [currentQuestion, setCurrentQuestion] = useState(0); @@ -37,10 +38,13 @@ const SpeakSentenceComponent = () => { const [totalSyllableCount, setTotalSyllableCount] = useState(""); const [isNextButtonCalled, setIsNextButtonCalled] = useState(false); + const levelCompleteAudioSrc = usePreloadAudio(LevelCompleteAudio); + const callConfettiAndPlay = () => { - let audio = new Audio(LevelCompleteAudio); + let audio = new Audio(levelCompleteAudioSrc); audio.play(); callConfetti(); + window.telemetry?.syncEvents && window.telemetry.syncEvents(); }; useEffect(() => { @@ -118,35 +122,18 @@ const SpeakSentenceComponent = () => { try { const lang = getLocalData("lang"); - if (!(localStorage.getItem("contentSessionId") !== null)) { - const pointsRes = await axios.post( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_POINTER}`, - { - userId: localStorage.getItem("virtualId"), - sessionId: localStorage.getItem("sessionId"), - points: 1, - language: lang, - milestone: "m0", - } - ); - setPoints(pointsRes?.data?.result?.totalLanguagePoints || 0); - } else { - send(1); - // setPoints(localStorage.getItem("currentLessonScoreCount")); - } - - await axios.post( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_LESSON}`, - { - userId: localStorage.getItem("virtualId"), - sessionId: localStorage.getItem("sessionId"), - milestone: `discoveryList/discovery/${currentCollectionId}`, - lesson: localStorage.getItem("storyTitle"), - progress: ((currentQuestion + 1) * 100) / questions.length, - language: lang, - milestoneLevel: "m0", - } - ); + // await axios.post( + // `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_LESSON}`, + // { + // userId: localStorage.getItem("virtualId"), + // sessionId: localStorage.getItem("sessionId"), + // milestone: `discoveryList/discovery/${currentCollectionId}`, + // lesson: localStorage.getItem("storyTitle"), + // progress: ((currentQuestion + 1) * 100) / questions.length, + // language: lang, + // milestoneLevel: "m0", + // } + // ); if (currentQuestion < questions.length - 1) { setCurrentQuestion(currentQuestion + 1); @@ -164,6 +151,24 @@ const SpeakSentenceComponent = () => { language: localStorage.getItem("lang"), } ); + + if (!(localStorage.getItem("contentSessionId") !== null)) { + const pointsRes = await axios.post( + `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_POINTER}`, + { + userId: localStorage.getItem("virtualId"), + sessionId: localStorage.getItem("sessionId"), + points: 1, + language: lang, + milestone: "m0", + } + ); + setPoints(pointsRes?.data?.result?.totalLanguagePoints || 0); + } else { + send(5); + // setPoints(localStorage.getItem("currentLessonScoreCount")); + } + setInitialAssesment(false); const { data: getSetData } = getSetResultRes; const data = JSON.stringify(getSetData?.data); diff --git a/src/components/Layouts.jsx/MainLayout.jsx b/src/components/Layouts.jsx/MainLayout.jsx index e875a708..661ccf18 100644 --- a/src/components/Layouts.jsx/MainLayout.jsx +++ b/src/components/Layouts.jsx/MainLayout.jsx @@ -163,27 +163,70 @@ const MainLayout = (props) => { } }; + const [audioCache, setAudioCache] = useState({}); + + useEffect(() => { + const preloadAudio = async () => { + try { + const urls = [LevelCompleteAudio, gameLoseAudio]; + const cache = {}; + + for (const url of urls) { + const response = await fetch(url); + const audioBlob = await response.blob(); + const audioUrl = URL.createObjectURL(audioBlob); + cache[url] = audioUrl; + } + + setAudioCache(cache); + } catch (error) { + console.error("Error preloading audio:", error); + } + }; + + preloadAudio(); + + // Cleanup cached audio URLs on unmount + return () => { + Object.values(audioCache).forEach((audioUrl) => + URL.revokeObjectURL(audioUrl) + ); + }; + }, []); + useEffect(() => { if (isShowCase && gameOverData) { - setShake(gameOverData ? gameOverData.userWon : true); + setShake(gameOverData.userWon ?? true); - let audio = ""; + let audioSrc; if (gameOverData) { - audio = new Audio( - gameOverData.userWon ? LevelCompleteAudio : gameLoseAudio - ); + audioSrc = gameOverData.userWon + ? audioCache[LevelCompleteAudio] + : audioCache[gameLoseAudio]; + } else { + audioSrc = audioCache[LevelCompleteAudio]; + } + + if (audioSrc) { + const audio = new Audio(audioSrc); + audio.play().catch((error) => { + console.error("Error playing audio:", error); + }); + if (!gameOverData?.userWon) { callConfettiSnow(); } - } else { - audio = new Audio(LevelCompleteAudio); } - audio.play(); - setTimeout(() => { + + const shakeTimeout = setTimeout(() => { setShake(false); }, 4000); + + return () => { + clearTimeout(shakeTimeout); + }; } - }, [startShowCase, isShowCase, gameOverData]); + }, [startShowCase, isShowCase, gameOverData, audioCache]); let currentPracticeStep = progressData?.currentPracticeStep; let currentPracticeProgress = progressData?.currentPracticeProgress || 0; @@ -752,7 +795,7 @@ const MainLayout = (props) => { > gameLost @@ -1196,6 +1239,9 @@ MainLayout.propTypes = { storedData: PropTypes.array, resetStoredData: PropTypes.func, pageName: PropTypes.string, + gameOverData: PropTypes.shape({ + userWon: PropTypes.bool, + }), }; export default MainLayout; diff --git a/src/components/Practice/Mechanics3.jsx b/src/components/Practice/Mechanics3.jsx index e89913f0..6bbc4b3a 100644 --- a/src/components/Practice/Mechanics3.jsx +++ b/src/components/Practice/Mechanics3.jsx @@ -18,6 +18,7 @@ import VoiceAnalyser from "../../utils/VoiceAnalyser"; import { Modal } from "@mui/material"; import ZoomInIcon from "@mui/icons-material/ZoomIn"; import CloseIcon from "@mui/icons-material/Close"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; // TODO: update it as per File name OR update file name as per export variable name const Mechanics2 = ({ @@ -64,6 +65,9 @@ const Mechanics2 = ({ const [shake, setShake] = useState(false); const [wordToFill, setWordToFill] = useState(""); const [disabledWords, setDisabledWords] = useState(false); + const correctSoundAudio = usePreloadAudio(correctSound); + const wrongSoundAudio = usePreloadAudio(wrongSound); + const removeSoundAudio = usePreloadAudio(removeSound); const [answer, setAnswer] = useState({ text: "", audio_url: "", @@ -85,7 +89,7 @@ const Mechanics2 = ({ setAnswer(word); const isSoundCorrect = word.isAns; - let audio = new Audio(isSoundCorrect ? correctSound : wrongSound); + let audio = new Audio(isSoundCorrect ? correctSoundAudio : wrongSoundAudio); if (!isSoundCorrect) { setEnableNext(false); } @@ -97,7 +101,7 @@ const Mechanics2 = ({ }; const handleRemoveWord = () => { - let audio = new Audio(removeSound); + let audio = new Audio(removeSoundAudio); setAnswer({ text: "", audio_url: "", image_url: "", isAns: false }); audio.play(); setEnableNext(false); @@ -261,8 +265,8 @@ const Mechanics2 = ({ }, }} > - - {image && ( + {image?.split("/")?.[4] && ( + setZoomOpen(true)} src={image} @@ -273,32 +277,36 @@ const Mechanics2 = ({ }} alt="" /> - )} - {/* Subtle gradient overlay across the top */} - - {/* Zoom icon positioned in the top-left corner */} - setZoomOpen(true)} - sx={{ color: "white", fontSize: "22px", cursor: "pointer" }} - /> + {/* Subtle gradient overlay across the top */} + + {/* Zoom icon positioned in the top-left corner */} + setZoomOpen(true)} + sx={{ + color: "white", + fontSize: "22px", + cursor: "pointer", + }} + /> + - + )} setZoomOpen(false)} diff --git a/src/components/Practice/Mechanics4.jsx b/src/components/Practice/Mechanics4.jsx index 5b22c6c0..8b4fd785 100644 --- a/src/components/Practice/Mechanics4.jsx +++ b/src/components/Practice/Mechanics4.jsx @@ -13,6 +13,7 @@ import wrongSound from "../../assets/audio/wrong.wav"; import addSound from "../../assets/audio/add.mp3"; import removeSound from "../../assets/audio/remove.wav"; import { splitGraphemes } from "split-graphemes"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; const Mechanics4 = ({ page, @@ -51,6 +52,10 @@ const Mechanics4 = ({ const [words, setWords] = useState( type === "word" ? [] : ["Friend", "She is", "My"] ); + const correctSoundAudio = usePreloadAudio(correctSound); + const wrongSoundAudio = usePreloadAudio(wrongSound); + const addSoundAudio = usePreloadAudio(addSound); + const removeSoundAudio = usePreloadAudio(removeSound); const [wordsAfterSplit, setWordsAfterSplit] = useState([]); useEffect(() => { @@ -114,7 +119,7 @@ const Mechanics4 = ({ }, 3000); // audioPlay[word](); if (selectedWords?.length + 1 !== wordsAfterSplit?.length || isSelected) { - let audio = new Audio(isSelected ? removeSound : addSound); + let audio = new Audio(isSelected ? removeSoundAudio : addSoundAudio); audio.play(); setEnableNext(false); } @@ -134,8 +139,8 @@ const Mechanics4 = ({ if (selectedWords?.length + 1 === wordsAfterSplit?.length) { let audio = new Audio( [...selectedWords, word]?.join(" ") === parentWords - ? correctSound - : wrongSound + ? correctSoundAudio + : wrongSoundAudio ); audio.play(); } diff --git a/src/components/Practice/Mechanics5.jsx b/src/components/Practice/Mechanics5.jsx index 76121f04..dd9c3552 100644 --- a/src/components/Practice/Mechanics5.jsx +++ b/src/components/Practice/Mechanics5.jsx @@ -185,42 +185,44 @@ const Mechanics5 = ({ > {/* Image with full-width gradient overlay on top */} - - contentImage setZoomOpen(true)} // Open modal on click - /> - - {/* Subtle gradient overlay across the top */} - - {/* Zoom icon positioned in the top-left corner */} - setZoomOpen(true)} - sx={{ color: "white", fontSize: "22px", cursor: "pointer" }} + {image?.split("/")?.[4] && ( + + contentImage setZoomOpen(true)} // Open modal on click /> + + {/* Subtle gradient overlay across the top */} + + {/* Zoom icon positioned in the top-left corner */} + setZoomOpen(true)} + sx={{ color: "white", fontSize: "22px", cursor: "pointer" }} + /> + - + )} {/* Modal for zoomed image with gradient and close icon */} { + const [audioSrc, setAudioSrc] = useState(null); + + useEffect(() => { + const preloadAudio = async () => { + try { + const response = await fetch(audioUrl); + const audioBlob = await response.blob(); + const audioObjectUrl = URL.createObjectURL(audioBlob); + setAudioSrc(audioObjectUrl); + } catch (error) { + console.error("Error preloading audio:", error); + } + }; + + preloadAudio(); + + return () => { + if (audioSrc) { + URL.revokeObjectURL(audioSrc); + } + }; + }, [audioUrl]); + + return audioSrc; +}; +export default usePreloadAudio; diff --git a/src/index.css b/src/index.css index 217a9822..538f665f 100644 --- a/src/index.css +++ b/src/index.css @@ -1,3 +1,19 @@ +.loader { + width: 250px; + height: 20px; + margin-top: 80px; + border-radius: 20px; + background: + linear-gradient(rgb(80, 216, 1) 0 0) 0/0% no-repeat lightblue; + animation: l2 1s infinite steps(10); +} + +@keyframes l2 { + 100% { + background-size: 110% + } +} + .hide { display: none; } diff --git a/src/services/telementryService.js b/src/services/telementryService.js index cd51adbe..c946756a 100644 --- a/src/services/telementryService.js +++ b/src/services/telementryService.js @@ -3,6 +3,7 @@ import { CsTelemetryModule } from '@project-sunbird/client-services/telemetry'; import { uniqueId } from './utilService'; import { jwtDecode } from '../../node_modules/jwt-decode/build/cjs/index'; +let startTime; // Variable to store the timestamp when the start event is raised let contentSessionId; let playSessionId; let url; @@ -65,6 +66,7 @@ export const initialize = async ({ context, config, metadata }) => { export const start = (duration) => { try { + startTime = Date.now(); // Record the start time CsTelemetryModule.instance.telemetryService.raiseStartTelemetry({ options: getEventOptions(), edata: { @@ -114,15 +116,22 @@ export const Log = (context, pageid, telemetryMode) => { }; export const end = (data) => { - CsTelemetryModule.instance.telemetryService.raiseEndTelemetry({ - edata: { - type: 'content', - mode: 'play', - pageid: url, - summary: data?.summary || {}, - duration: data?.duration || '000', - }, - }); + try { + const endTime = Date.now(); // Record the end time + const duration = ((endTime - startTime) / 1000).toFixed(2); // Calculate duration in seconds + + CsTelemetryModule.instance.telemetryService.raiseEndTelemetry({ + edata: { + type: 'content', + mode: 'play', + pageid: url, + summary: data?.summary || {}, + duration: duration, // Log the calculated duration + }, + }); + } catch (error) { + console.error("Error in end telemetry event:", error); + } }; export const interact = (telemetryMode) => { diff --git a/src/utils/AudioCompare.js b/src/utils/AudioCompare.js index a81b3ac2..e91d228a 100644 --- a/src/utils/AudioCompare.js +++ b/src/utils/AudioCompare.js @@ -1,10 +1,11 @@ import React, { useState, useEffect, useRef } from "react"; import RecordRTC from "recordrtc"; -import { Box } from "@mui/material"; +import { Box, CircularProgress } from "@mui/material"; import { ListenButton, RetryIcon, SpeakButton, StopButton } from "./constants"; import RecordVoiceVisualizer from "./RecordVoiceVisualizer"; import playButton from "../../src/assets/listen.png"; import pauseButton from "../../src/assets/pause.png"; +import PropTypes from "prop-types"; const AudioRecorder = (props) => { const [isRecording, setIsRecording] = useState(false); @@ -12,6 +13,7 @@ const AudioRecorder = (props) => { const [audioBlob, setAudioBlob] = useState(null); const recorderRef = useRef(null); const mediaStreamRef = useRef(null); + const [showLoader, setShowLoader] = useState(false); useEffect(() => { // Cleanup when component unmounts @@ -26,12 +28,12 @@ const AudioRecorder = (props) => { }, []); const startRecording = async () => { - setStatus("recording"); - if (props.setEnableNext) { - props.setEnableNext(false); - } try { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + if (props.setEnableNext) { + props.setEnableNext(false); + } + setStatus("recording"); mediaStreamRef.current = stream; // Use RecordRTC with specific configurations to match the blob structure @@ -53,29 +55,29 @@ const AudioRecorder = (props) => { }; const stopRecording = () => { - setStatus("inactive"); - if (recorderRef.current) { - recorderRef.current.stopRecording(() => { - const blob = recorderRef.current.getBlob(); - - if (blob) { - setAudioBlob(blob); - saveBlob(blob); // Persist the blob - } else { - console.error("Failed to retrieve audio blob."); - } - - // Stop the media stream - if (mediaStreamRef.current) { - mediaStreamRef.current.getTracks().forEach((track) => track.stop()); - } + setShowLoader(true); + const timeoutId = setTimeout(() => { + setShowLoader(false); + setStatus("inactive"); + if (recorderRef.current) { + recorderRef.current.stopRecording(() => { + const blob = recorderRef.current.getBlob(); + if (blob) { + setAudioBlob(blob); + saveBlob(blob); + } else { + console.error("Failed to retrieve audio blob."); + } + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach((track) => track.stop()); + } + setIsRecording(false); + props.setEnableNext?.(true); + }); + } + }, 500); - setIsRecording(false); - }); - } - if (props.setEnableNext) { - props.setEnableNext(true); - } + return () => clearTimeout(timeoutId); }; const saveBlob = (blob) => { @@ -104,9 +106,13 @@ const AudioRecorder = (props) => { > - - - + {showLoader ? ( +
+ ) : ( + + + + )} ); } else { @@ -119,7 +125,8 @@ const AudioRecorder = (props) => { }} className="game-action-button" > - {props?.originalText && + {props.enableAfterLoad && + props?.originalText && (!props.dontShowListen || props.recordedAudio) && ( <> {!props.isShowCase && ( @@ -175,17 +182,24 @@ const AudioRecorder = (props) => { )}
- {props?.originalText && !props.showOnlyListen && ( - - {!props.recordedAudio ? : } + {props.enableAfterLoad ? ( + props?.originalText && + !props.showOnlyListen && ( + + {!props.recordedAudio ? : } + + ) + ) : ( + + )}
@@ -198,4 +212,11 @@ const AudioRecorder = (props) => { ); }; +AudioRecorder.propTypes = { + enableAfterLoad: PropTypes.bool, + showOnlyListen: PropTypes.bool, + recordedAudio: PropTypes.string, + originalText: PropTypes.string, +}; + export default AudioRecorder; diff --git a/src/utils/VoiceAnalyser.js b/src/utils/VoiceAnalyser.js index 1884ab47..3639cec2 100644 --- a/src/utils/VoiceAnalyser.js +++ b/src/utils/VoiceAnalyser.js @@ -33,6 +33,7 @@ import config from "./urlConstants.json"; import { filterBadWords } from "./Badwords"; import S3Client from "../config/awsS3"; import { PutObjectCommand } from "@aws-sdk/client-s3"; +import usePreloadAudio from "../hooks/usePreloadAudio"; /* eslint-disable */ const AudioPath = { @@ -63,6 +64,7 @@ function VoiceAnalyser(props) { const [pauseAudio, setPauseAudio] = useState(false); const [recordedAudio, setRecordedAudio] = useState(""); const [recordedAudioBase64, setRecordedAudioBase64] = useState(""); + const [enableAfterLoad, setEnableAfterLoad] = useState(false); const [audioPermission, setAudioPermission] = useState(null); const [apiResponse, setApiResponse] = useState(""); const [currentIndex, setCurrentIndex] = useState(); @@ -76,6 +78,8 @@ function VoiceAnalyser(props) { process.env.REACT_APP_IS_AUDIOPREPROCESSING === "true" ); const [isMatching, setIsMatching] = useState(false); + const livesAddAudio = usePreloadAudio(livesAdd); + const livesCutAudio = usePreloadAudio(livesCut); //console.log('audio', recordedAudio, isMatching); @@ -274,10 +278,17 @@ function VoiceAnalyser(props) { const lang = getLocalData("lang") || "ta"; fetchASROutput(lang, recordedAudioBase64); setLoader(true); + setEnableAfterLoad(false); } } }, [props.isNextButtonCalled]); + useEffect(() => { + if (props.originalText) { + setEnableAfterLoad(true); + } + }, [props.originalText]); + useEffect(() => { if (recordedAudioBase64 !== "") { if (props.setIsNextButtonCalled) { @@ -620,7 +631,7 @@ function VoiceAnalyser(props) { } else { isLiveLost = false; } - const audio = new Audio(isLiveLost ? livesCut : livesAdd); + const audio = new Audio(isLiveLost ? livesCutAudio : livesAddAudio); audio.play(); // Update the state or data structure with the new lives data. @@ -695,6 +706,7 @@ function VoiceAnalyser(props) { setEnableNext={props.setEnableNext} showOnlyListen={props.showOnlyListen} setOpenMessageDialog={props.setOpenMessageDialog} + enableAfterLoad={enableAfterLoad} /> {/* */} diff --git a/src/views/Practice/Practice.jsx b/src/views/Practice/Practice.jsx index 53520294..e78f2234 100644 --- a/src/views/Practice/Practice.jsx +++ b/src/views/Practice/Practice.jsx @@ -22,6 +22,7 @@ import config from "../../utils/urlConstants.json"; import { MessageDialog } from "../../components/Assesment/Assesment"; import { Log } from "../../services/telementryService"; import Mechanics6 from "../../components/Practice/Mechanics6"; +import usePreloadAudio from "../../hooks/usePreloadAudio"; const Practice = () => { const [page, setPage] = useState(""); @@ -70,10 +71,13 @@ const Practice = () => { } }, [startShowCase]); + const levelCompleteAudioSrc = usePreloadAudio(LevelCompleteAudio); + const callConfettiAndPlay = () => { - let audio = new Audio(LevelCompleteAudio); + const audio = new Audio(levelCompleteAudioSrc); audio.play(); callConfetti(); + window.telemetry?.syncEvents && window.telemetry.syncEvents(); }; useEffect(() => { @@ -154,24 +158,6 @@ const Practice = () => { try { const lang = getLocalData("lang"); - if (localStorage.getItem("contentSessionId") !== null) { - setPoints(1); - if (isShowCase) { - send(1); - } - } else { - const pointsRes = await axios.post( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_POINTER}`, - { - userId: localStorage.getItem("virtualId"), - sessionId: localStorage.getItem("sessionId"), - points: 1, - language: lang, - milestone: `m${level}`, - } - ); - setPoints(pointsRes?.data?.result?.totalLanguagePoints || 0); - } const virtualId = getLocalData("virtualId"); const sessionId = getLocalData("sessionId"); @@ -194,18 +180,18 @@ const Practice = () => { let showcasePercentage = ((currentQuestion + 1) * 100) / questions.length; - await axios.post( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_LESSON}`, - { - userId: virtualId, - sessionId: sessionId, - milestone: isShowCase ? "showcase" : `practice`, - lesson: currentPracticeStep, - progress: isShowCase ? showcasePercentage : currentPracticeProgress, - language: lang, - milestoneLevel: `m${level}`, - } - ); + // await axios.post( + // `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_LESSON}`, + // { + // userId: virtualId, + // sessionId: sessionId, + // milestone: isShowCase ? "showcase" : `practice`, + // lesson: currentPracticeStep, + // progress: isShowCase ? showcasePercentage : currentPracticeProgress, + // language: lang, + // milestoneLevel: `m${level}`, + // } + // ); let newPracticeStep = currentQuestion === questions.length - 1 || isGameOver @@ -226,6 +212,27 @@ const Practice = () => { let currentPracticeStep = practiceProgress[virtualId].currentPracticeStep; let isShowCase = currentPracticeStep === 4 || currentPracticeStep === 9; // P4 or P8 + + // Set points + if (localStorage.getItem("contentSessionId") !== null) { + setPoints(1); + if (isShowCase) { + send(5); + } + } else { + const pointsRes = await axios.post( + `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.ADD_POINTER}`, + { + userId: localStorage.getItem("virtualId"), + sessionId: localStorage.getItem("sessionId"), + points: 1, + language: lang, + milestone: `m${level}`, + } + ); + setPoints(pointsRes?.data?.result?.totalLanguagePoints || 0); + } + if (isShowCase || isGameOver) { // assesment @@ -448,12 +455,14 @@ const Practice = () => { // TODO: Handle Error for lessons - no lesson progress - starting point should be P1 - const getPointersDetails = await axios.get( - `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${virtualId}/${sessionId}?language=${lang}` - ); + if (process.env.REACT_APP_IS_APP_IFRAME !== "true" && localStorage.getItem("contentSessionId") !== null) { + const getPointersDetails = await axios.get( + `${process.env.REACT_APP_LEARNER_AI_ORCHESTRATION_HOST}/${config.URLS.GET_POINTER}/${virtualId}/${sessionId}?language=${lang}` + ); - // TODO: Just Opss icon - we are trying to fetch the score for you - setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); + // TODO: Just Opss icon - we are trying to fetch the score for you + setPoints(getPointersDetails?.data?.result?.totalLanguagePoints || 0); + } let userState = Number.isInteger( Number(resLessons.data?.result?.result?.lesson)