From d90385ec7d469b2d7c52a5d377ddcfc97ae1f921 Mon Sep 17 00:00:00 2001 From: optrader Date: Thu, 16 Oct 2025 17:04:08 +0900 Subject: [PATCH] [251016] feat: WebSpeech API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ๐Ÿ• ์ปค๋ฐ‹ ์‹œ๊ฐ„: 2025. 10. 16. 17:04:05 ๐Ÿ“Š ๋ณ€๊ฒฝ ํ†ต๊ณ„: โ€ข ์ด ํŒŒ์ผ: 13๊ฐœ โ€ข ์ถ”๊ฐ€: +558์ค„ โ€ข ์‚ญ์ œ: -91์ค„ ๐Ÿ“ ์ถ”๊ฐ€๋œ ํŒŒ์ผ: + com.twin.app.shoptime/src/actions/webSpeechActions.js + com.twin.app.shoptime/src/hooks/useWebSpeech.js + com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js + com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx + com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less ๐Ÿ“ ์ˆ˜์ •๋œ ํŒŒ์ผ: ~ com.twin.app.shoptime/src/actions/actionTypes.js ~ com.twin.app.shoptime/src/actions/voiceActions.js ~ com.twin.app.shoptime/src/reducers/voiceReducer.js ~ com.twin.app.shoptime/src/views/SearchPanel/SearchPanel.new.jsx ~ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx ~ com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx ~ com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.module.less ~ com.twin.app.shoptime/web-speech.md ๐Ÿ”ง ํ•จ์ˆ˜ ๋ณ€๊ฒฝ ๋‚ด์šฉ: ๐Ÿ“„ com.twin.app.shoptime/src/actions/voiceActions.js (javascript): ๐Ÿ”„ Modified: addLog(), handleScrollIntent() ๐Ÿ“„ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx (javascript): โœ… Added: Spottable() ๐Ÿ“„ com.twin.app.shoptime/web-speech.md (mdํŒŒ์ผ): โœ… Added: Framework() ๐Ÿ”„ Modified: onresult() โŒ Deleted: Framework() ๐Ÿ“„ com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js (javascript): โœ… Added: WebSpeechService() ๐Ÿ“„ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx (javascript): โœ… Added: handleTalkAgainClick() ๐Ÿ“„ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less (unknown): โœ… Added: translateY() ๐Ÿ”ง ์ฃผ์š” ๋ณ€๊ฒฝ ๋‚ด์šฉ: โ€ข ํƒ€์ž… ์‹œ์Šคํ…œ ์•ˆ์ •์„ฑ ๊ฐ•ํ™” โ€ข ํ•ต์‹ฌ ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง ๊ฐœ์„  โ€ข ๊ฐœ๋ฐœ ๋ฌธ์„œ ๋ฐ ๊ฐ€์ด๋“œ ๊ฐœ์„  โ€ข API ์„œ๋น„์Šค ๋ ˆ์ด์–ด ๊ฐœ์„  --- .../src/actions/actionTypes.js | 8 + .../src/actions/voiceActions.js | 228 +++++++++++++-- .../src/actions/webSpeechActions.js | 129 +++++++++ .../src/hooks/useWebSpeech.js | 76 +++++ .../src/reducers/voiceReducer.js | 70 +++++ .../services/webSpeech/WebSpeechService.js | 225 +++++++++++++++ .../src/views/SearchPanel/SearchPanel.new.jsx | 27 +- .../VoiceInputOverlay/VoiceInputOverlay.jsx | 268 ++++++++++++++---- .../VoiceInputOverlay/modes/VoiceResponse.jsx | 62 ++++ .../modes/VoiceResponse.module.less | 99 +++++++ .../src/views/VoicePanel/VoicePanel.jsx | 74 +++++ .../views/VoicePanel/VoicePanel.module.less | 8 +- com.twin.app.shoptime/web-speech.md | 16 +- 13 files changed, 1200 insertions(+), 90 deletions(-) create mode 100644 com.twin.app.shoptime/src/actions/webSpeechActions.js create mode 100644 com.twin.app.shoptime/src/hooks/useWebSpeech.js create mode 100644 com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js create mode 100644 com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx create mode 100644 com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less diff --git a/com.twin.app.shoptime/src/actions/actionTypes.js b/com.twin.app.shoptime/src/actions/actionTypes.js index ecfa24b6..9e555e0c 100644 --- a/com.twin.app.shoptime/src/actions/actionTypes.js +++ b/com.twin.app.shoptime/src/actions/actionTypes.js @@ -294,4 +294,12 @@ export const types = { VOICE_ADD_LOG: 'VOICE_ADD_LOG', VOICE_CLEAR_LOGS: 'VOICE_CLEAR_LOGS', VOICE_STT_TEXT_RECEIVED: 'VOICE_STT_TEXT_RECEIVED', // STT ํ…์ŠคํŠธ ์ˆ˜์‹  + + // ๐Ÿ”ฝ Web Speech API ๊ด€๋ จ + WEB_SPEECH_INITIALIZED: 'WEB_SPEECH_INITIALIZED', + WEB_SPEECH_START: 'WEB_SPEECH_START', + WEB_SPEECH_INTERIM_RESULT: 'WEB_SPEECH_INTERIM_RESULT', + WEB_SPEECH_END: 'WEB_SPEECH_END', + WEB_SPEECH_ERROR: 'WEB_SPEECH_ERROR', + WEB_SPEECH_CLEANUP: 'WEB_SPEECH_CLEANUP', }; diff --git a/com.twin.app.shoptime/src/actions/voiceActions.js b/com.twin.app.shoptime/src/actions/voiceActions.js index c7d77fd0..77c1068c 100644 --- a/com.twin.app.shoptime/src/actions/voiceActions.js +++ b/com.twin.app.shoptime/src/actions/voiceActions.js @@ -65,14 +65,45 @@ export const registerVoiceFramework = () => (dispatch, getState) => { voiceHandler = lunaSend.registerVoiceConductor({ onSuccess: (res) => { - console.log('[Voice] Response from voice framework:', res); + console.log('[Voice] โญ Response from voice framework:', res); + console.log('[Voice] Response details:', { + subscribed: res.subscribed, + returnValue: res.returnValue, + command: res.command, + voiceTicket: res.voiceTicket, + action: res.action, + fullResponse: JSON.stringify(res), + }); - // Log all responses - dispatch(addLog('RESPONSE', 'Voice Framework Response', res, true)); + // โญ VoicePanel UI์— ํ‘œ์‹œ๋˜๋Š” ๋กœ๊ทธ ์ถ”๊ฐ€ + dispatch( + addLog( + 'RESPONSE', + '[VoiceConductor] Response Received', + { + ...res, + _diagnostics: { + subscribed: res.subscribed, + hasCommand: !!res.command, + hasVoiceTicket: !!res.voiceTicket, + hasAction: !!res.action, + timestamp: new Date().toISOString(), + }, + }, + true + ) + ); // Initial registration response if (res.subscribed && res.returnValue && !res.command) { console.log('[Voice] Registration successful'); + dispatch( + addLog('ACTION', '[Voice] โœ… Registration Successful', { + message: 'Successfully registered with voice framework', + subscribed: true, + waiting: 'Waiting for setContext command from voice framework...', + }) + ); dispatch({ type: types.VOICE_REGISTER_SUCCESS, payload: { handler: voiceHandler }, @@ -83,9 +114,11 @@ export const registerVoiceFramework = () => (dispatch, getState) => { if (res.command === 'setContext' && res.voiceTicket) { console.log('[Voice] setContext command received, ticket:', res.voiceTicket); dispatch( - addLog('COMMAND', 'setContext Command Received', { + addLog('COMMAND', '[VoiceConductor] setContext Command Received', { command: res.command, voiceTicket: res.voiceTicket, + message: 'Voice framework is requesting supported intents', + nextAction: 'Sending inAppIntents to voice framework...', }) ); dispatch({ @@ -103,12 +136,13 @@ export const registerVoiceFramework = () => (dispatch, getState) => { // โญ ์ค‘์š”: performAction ์ˆ˜์‹  ์„ฑ๊ณต ๋กœ๊ทธ (๋ช…ํ™•ํ•˜๊ฒŒ) dispatch( - addLog('COMMAND', 'โœ… performAction RECEIVED!', { + addLog('COMMAND', '[VoiceConductor] โœ…โœ…โœ… performAction RECEIVED!', { message: 'โœ… SUCCESS! Voice framework sent performAction event.', command: res.command, action: res.action, intent: res.action?.intent, value: res.action?.value || res.action?.itemId, + timestamp: new Date().toISOString(), }) ); @@ -128,7 +162,22 @@ export const registerVoiceFramework = () => (dispatch, getState) => { onFailure: (err) => { console.error('[Voice] Registration failed:', err); - dispatch(addLog('ERROR', 'Registration Failed', err, false)); + dispatch( + addLog( + 'ERROR', + '[VoiceConductor] โŒ Registration Failed', + { + ...err, + message: 'Failed to register with voice framework', + possibleReasons: [ + 'Voice framework service might not be available', + 'Permission denied for voice framework access', + 'webOS version might not support voice framework', + ], + }, + false + ) + ); dispatch({ type: types.VOICE_REGISTER_FAILURE, payload: err, @@ -152,6 +201,27 @@ export const sendVoiceIntents = (voiceTicket) => (dispatch, getState) => { // Define the intents that this app supports // This is a sample configuration - customize based on your app's features + + // โญ ๋””๋ฒ„๊น… ํŒ: UseIME์ด ์•ˆ๋˜๋ฉด ๋จผ์ € Select/Scroll ํ…Œ์ŠคํŠธ + console.log('[Voice] โš ๏ธ DEBUGGING TIP:'); + console.log(' 1. UseIME might not be supported on all webOS versions'); + console.log(' 2. Try saying "Search" or "Home" to test Select intent first'); + console.log(' 3. If Select works but UseIME does not, UseIME is not supported'); + console.log(' 4. Check webOS system logs: journalctl -u voiceconductor'); + + // VoicePanel UI์—๋„ ํ‘œ์‹œ + dispatch( + addLog('ACTION', '[Voice] ๐Ÿ”ง Preparing Intents', { + message: 'About to send supported intents to voice framework', + debuggingTips: [ + 'UseIME might not be supported on all webOS versions', + 'Try saying "Search" or "Home" to test Select intent first', + 'If Select works but UseIME does not, UseIME is not supported', + 'Check webOS system logs: ssh to TV, then: journalctl -u voiceconductor -f', + ], + }) + ); + const inAppIntents = [ // UseIME Intent - STT ํ…์ŠคํŠธ ๋ฐ›๊ธฐ { @@ -220,7 +290,18 @@ export const sendVoiceIntents = (voiceTicket) => (dispatch, getState) => { onSuccess: (res) => { console.log('[Voice] Voice context set successfully:', res); // Log successful context setting - dispatch(addLog('RESPONSE', 'Set Voice Context Success', res, true)); + dispatch( + addLog( + 'RESPONSE', + '[Voice] โœ… Set Voice Context Success', + { + ...res, + message: 'Successfully sent intents to voice framework', + intentsCount: inAppIntents.length, + }, + true + ) + ); dispatch({ type: types.VOICE_SET_CONTEXT_SUCCESS, payload: res, @@ -228,10 +309,21 @@ export const sendVoiceIntents = (voiceTicket) => (dispatch, getState) => { // โญ ์ค‘์š”: Voice input ๋Œ€๊ธฐ ์ƒํƒœ ๋กœ๊ทธ dispatch( - addLog('ACTION', '๐ŸŽค Ready for Voice Input', { + addLog('ACTION', '[Voice] ๐ŸŽค Ready for Voice Input', { message: 'Context set successfully. Press the MIC button on remote and speak.', nextStep: 'Waiting for performAction event...', voiceTicket: voiceTicket, + testInstructions: { + useIME: 'Say anything for STT (e.g., "iPhone 15 Pro")', + select: 'Say "Search", "Home", "Cart", or "My Page"', + scroll: 'Say "Scroll Up" or "Scroll Down"', + }, + debuggingTips: [ + 'If no performAction arrives, try Select intent first (say "Search")', + 'UseIME might not be supported on this webOS version', + 'Check if VoicePanel stays on top (isOnTop must be true)', + 'Check system logs: ssh root@TV_IP, then: journalctl -u voiceconductor -f', + ], }) ); @@ -241,26 +333,82 @@ export const sendVoiceIntents = (voiceTicket) => (dispatch, getState) => { // lastSTTText๊ฐ€ ์—…๋ฐ์ดํŠธ๋˜์ง€ ์•Š์•˜์œผ๋ฉด (performAction์ด ์•ˆ ์™”์œผ๋ฉด) if (!currentState.lastSTTText || currentState.sttTimestamp < Date.now() - 14000) { dispatch( - addLog('ERROR', 'โš ๏ธ No performAction received yet', { + addLog('ERROR', '[Voice] โš ๏ธ No performAction received yet', { message: 'performAction event was not received within 15 seconds after setContext.', possibleReasons: [ '1. Did you press the MIC button on the remote control?', '2. Did you speak after pressing the MIC button?', '3. UseIME intent might not be supported on this webOS version', '4. Voice framework might not be routing events correctly', + '5. Subscription might have been cancelled or disconnected', ], + diagnostics: { + subscriptionActive: currentState.isRegistered, + voiceTicket: currentState.voiceTicket, + handler: currentState.voiceHandler ? 'exists' : 'null', + }, suggestion: 'Try pressing the remote MIC button and speaking clearly. Check VoicePanel logs for performAction event.', }) ); } }, 15000); + + // โญ Subscription ํ—ฌ์Šค์ฒดํฌ: 3์ดˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ (์ด 30์ดˆ) + let healthCheckCount = 0; + const healthCheckInterval = setInterval(() => { + healthCheckCount++; + const currentState = getState().voice; + + console.log(`[Voice] ๐Ÿฅ Subscription Health Check #${healthCheckCount}:`, { + isRegistered: currentState.isRegistered, + hasVoiceTicket: !!currentState.voiceTicket, + voiceTicket: currentState.voiceTicket, + hasHandler: !!currentState.voiceHandler, + receivedSTT: !!currentState.lastSTTText, + timestamp: new Date().toISOString(), + }); + + dispatch( + addLog('ACTION', `[Voice] ๐Ÿฅ Health Check #${healthCheckCount}/10`, { + isRegistered: currentState.isRegistered, + hasVoiceTicket: !!currentState.voiceTicket, + voiceTicket: currentState.voiceTicket, + hasHandler: !!currentState.voiceHandler, + receivedSTT: !!currentState.lastSTTText, + message: `Subscription health check ${healthCheckCount}/10 - Still waiting for voice input...`, + status: currentState.isRegistered ? 'โœ… Active' : 'โŒ Disconnected', + }) + ); + + // 10๋ฒˆ ์ฒดํฌํ•˜๋ฉด ์ค‘๋‹จ (30์ดˆ) + if (healthCheckCount >= 10 || currentState.lastSTTText) { + clearInterval(healthCheckInterval); + console.log('[Voice] Health check completed or STT received'); + } + }, 3000); }, onFailure: (err) => { console.error('[Voice] Failed to set voice context:', err); // Log failed context setting - dispatch(addLog('ERROR', 'Set Voice Context Failed', err, false)); + dispatch( + addLog( + 'ERROR', + '[Voice] โŒ Set Voice Context Failed', + { + ...err, + message: 'Failed to send intents to voice framework', + voiceTicket: voiceTicket, + possibleReasons: [ + 'Invalid voiceTicket', + 'Voice framework service disconnected', + 'Malformed intent data', + ], + }, + false + ) + ); dispatch({ type: types.VOICE_SET_CONTEXT_FAILURE, payload: err, @@ -282,10 +430,11 @@ export const handleVoiceAction = (voiceTicket, action) => (dispatch, getState) = // Log that we're processing the action dispatch( - addLog('ACTION', 'Processing Voice Action', { + addLog('ACTION', '[Voice] ๐Ÿ”„ Processing Voice Action', { intent: action.intent, itemId: action.itemId, fullAction: action, + message: `Processing ${action.intent} intent action`, }) ); @@ -299,12 +448,13 @@ export const handleVoiceAction = (voiceTicket, action) => (dispatch, getState) = // ๐Ÿ“ ๋กœ๊ทธ: STT ํ…์ŠคํŠธ ์ถ”์ถœ ๊ณผ์ • dispatch( - addLog('ACTION', '๐ŸŽค STT Text Extracted (Speech โ†’ Text)', { + addLog('ACTION', '[Voice] ๐ŸŽค STT Text Extracted (Speech โ†’ Text)', { intent: 'UseIME', extractedText: action.value, textLength: action.value.length, timestamp: new Date().toISOString(), description: 'User speech has been converted to text successfully', + message: `Extracted text: "${action.value}"`, }) ); @@ -316,10 +466,11 @@ export const handleVoiceAction = (voiceTicket, action) => (dispatch, getState) = // ๐Ÿ“ ๋กœ๊ทธ: Redux ์ €์žฅ ์™„๋ฃŒ dispatch( - addLog('ACTION', 'โœ… STT Text Saved to Redux', { + addLog('ACTION', '[Voice] โœ… STT Text Saved to Redux', { savedText: action.value, reduxAction: 'VOICE_STT_TEXT_RECEIVED', state: 'lastSTTText updated', + message: 'STT text has been saved to Redux state and is now available', }) ); @@ -356,10 +507,12 @@ export const handleVoiceAction = (voiceTicket, action) => (dispatch, getState) = // Log the processing result before reporting dispatch( - addLog('ACTION', 'Action Processing Complete', { + addLog('ACTION', `[Voice] โœ… Action Processing Complete`, { result, feedback, - action: action.itemId, + action: action.itemId || action.intent, + message: result ? 'Action processed successfully' : 'Action processing failed', + status: result ? 'โœ… Success' : 'โŒ Failed', }) ); @@ -435,10 +588,11 @@ export const reportActionResult = // Log the report request dispatch( - addLog('ACTION', 'Reporting Action Result', { + addLog('ACTION', '[Voice] ๐Ÿ“ค Reporting Action Result', { voiceTicket, result, feedback, + message: 'Sending action result back to voice framework', }) ); @@ -446,7 +600,18 @@ export const reportActionResult = onSuccess: (res) => { console.log('[Voice] Action result reported successfully:', res); // Log successful report - dispatch(addLog('RESPONSE', 'Report Action Result Success', res, true)); + dispatch( + addLog( + 'RESPONSE', + '[Voice] โœ… Report Action Result Success', + { + ...res, + result: result, + message: 'Successfully reported action result to voice framework', + }, + true + ) + ); dispatch({ type: types.VOICE_REPORT_RESULT_SUCCESS, payload: { result, feedback }, @@ -456,7 +621,18 @@ export const reportActionResult = onFailure: (err) => { console.error('[Voice] Failed to report action result:', err); // Log failed report - dispatch(addLog('ERROR', 'Report Action Result Failed', err, false)); + dispatch( + addLog( + 'ERROR', + '[Voice] โŒ Report Action Result Failed', + { + ...err, + message: 'Failed to report action result to voice framework', + voiceTicket: voiceTicket, + }, + false + ) + ); dispatch({ type: types.VOICE_REPORT_RESULT_FAILURE, payload: err, @@ -477,9 +653,23 @@ export const unregisterVoiceFramework = () => (dispatch, getState) => { const { voiceHandler } = getState().voice; const isTV = typeof window === 'object' && window.PalmSystem; + console.log('[Voice] Unregistering from voice framework'); + + dispatch( + addLog('ACTION', '[Voice] ๐Ÿ”Œ Unregistering Voice Framework', { + message: 'Cancelling voice framework subscription', + hasHandler: !!voiceHandler, + isTV: isTV, + }) + ); + if (voiceHandler && isTV) { - console.log('[Voice] Unregistering from voice framework'); lunaSend.cancelVoiceRegistration(voiceHandler); + dispatch( + addLog('ACTION', '[Voice] โœ… Voice Framework Unregistered', { + message: 'Successfully cancelled voice framework subscription', + }) + ); } // Always clear state on unmount, regardless of platform diff --git a/com.twin.app.shoptime/src/actions/webSpeechActions.js b/com.twin.app.shoptime/src/actions/webSpeechActions.js new file mode 100644 index 00000000..536467c6 --- /dev/null +++ b/com.twin.app.shoptime/src/actions/webSpeechActions.js @@ -0,0 +1,129 @@ +// src/actions/webSpeechActions.js + +import { types } from './actionTypes'; +import webSpeechService from '../services/webSpeech/WebSpeechService'; + +/** + * Web Speech ์ดˆ๊ธฐํ™” ๋ฐ ์‹œ์ž‘ + * @param {Object} config - ์–ธ์–ด ๋ฐ ์„ค์ • + */ +export const initializeWebSpeech = + (config = {}) => + (dispatch) => { + console.log('[WebSpeechActions] Initializing Web Speech...'); + + // ์ง€์› ์—ฌ๋ถ€ ํ™•์ธ + if (!webSpeechService.isSupported) { + const error = 'Web Speech API is not supported in this browser'; + console.error('[WebSpeechActions]', error); + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error, message: error }, + }); + return false; + } + + // ์ดˆ๊ธฐํ™” + const initialized = webSpeechService.initialize({ + lang: config.lang || 'ko-KR', + continuous: config.continuous || false, + interimResults: config.interimResults !== false, + maxAlternatives: config.maxAlternatives || 1, + }); + + if (!initialized) { + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error: 'Failed to initialize', message: 'Failed to initialize Web Speech' }, + }); + return false; + } + + // ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ๋“ฑ๋ก + webSpeechService.on('start', () => { + dispatch({ + type: types.WEB_SPEECH_START, + }); + }); + + webSpeechService.on('result', (result) => { + console.log('[WebSpeechActions] Result:', result); + + // Interim ๊ฒฐ๊ณผ (์ค‘๊ฐ„ ๊ฒฐ๊ณผ) + if (!result.isFinal) { + dispatch({ + type: types.WEB_SPEECH_INTERIM_RESULT, + payload: result.transcript, + }); + } + // Final ๊ฒฐ๊ณผ (์ตœ์ข… ๊ฒฐ๊ณผ) + else { + dispatch({ + type: types.VOICE_STT_TEXT_RECEIVED, // ๊ธฐ์กด VUI์™€ ๋™์ผํ•œ ์•ก์…˜ ์‚ฌ์šฉ + payload: result.transcript, + }); + } + }); + + webSpeechService.on('error', (errorInfo) => { + console.error('[WebSpeechActions] Error:', errorInfo); + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: errorInfo, + }); + }); + + webSpeechService.on('end', () => { + dispatch({ + type: types.WEB_SPEECH_END, + }); + }); + + dispatch({ + type: types.WEB_SPEECH_INITIALIZED, + }); + + return true; + }; + +/** + * ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + */ +export const startWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Starting recognition...'); + const started = webSpeechService.start(); + + if (!started) { + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error: 'Failed to start', message: 'Failed to start recognition' }, + }); + } +}; + +/** + * ์Œ์„ฑ ์ธ์‹ ์ค‘์ง€ + */ +export const stopWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Stopping recognition...'); + webSpeechService.stop(); +}; + +/** + * ์Œ์„ฑ ์ธ์‹ ์ค‘๋‹จ + */ +export const abortWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Aborting recognition...'); + webSpeechService.abort(); +}; + +/** + * ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ + */ +export const cleanupWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Cleaning up...'); + webSpeechService.cleanup(); + dispatch({ + type: types.WEB_SPEECH_CLEANUP, + }); +}; diff --git a/com.twin.app.shoptime/src/hooks/useWebSpeech.js b/com.twin.app.shoptime/src/hooks/useWebSpeech.js new file mode 100644 index 00000000..fa75decb --- /dev/null +++ b/com.twin.app.shoptime/src/hooks/useWebSpeech.js @@ -0,0 +1,76 @@ +// src/hooks/useWebSpeech.js + +import { useEffect, useCallback } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { + initializeWebSpeech, + startWebSpeech, + stopWebSpeech, + cleanupWebSpeech, +} from '../actions/webSpeechActions'; + +/** + * Web Speech API Hook + * - SearchPanel์—์„œ ์‚ฌ์šฉํ•˜๋Š” ์Œ์„ฑ ์ž…๋ ฅ Hook + * - VoiceInputOverlay์™€ ํ†ตํ•ฉ + * + * @param {boolean} isActive - Hook ํ™œ์„ฑํ™” ์—ฌ๋ถ€ (์˜ˆ: SearchPanel์ด foreground์ธ์ง€) + * @param {function} onSTTText - STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์ฝœ๋ฐฑ + * @param {Object} config - Web Speech ์„ค์ • + */ +export const useWebSpeech = (isActive, onSTTText, config = {}) => { + const dispatch = useDispatch(); + const { lastSTTText, sttTimestamp, webSpeech } = useSelector((state) => state.voice); + + // Web Speech ์ดˆ๊ธฐํ™” + useEffect(() => { + if (isActive) { + console.log('[useWebSpeech] Initializing Web Speech API'); + dispatch( + initializeWebSpeech({ + lang: config.lang || 'ko-KR', + continuous: config.continuous || false, + interimResults: config.interimResults !== false, + }) + ); + } + + // Cleanup on unmount only + return () => { + if (isActive) { + console.log('[useWebSpeech] Cleaning up Web Speech API (unmount)'); + dispatch(cleanupWebSpeech()); + } + }; + }, [isActive, dispatch, config.lang, config.continuous, config.interimResults]); + + // STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์ฒ˜๋ฆฌ + useEffect(() => { + if (lastSTTText && sttTimestamp) { + console.log('[useWebSpeech] STT text received:', lastSTTText); + if (onSTTText) { + onSTTText(lastSTTText); + } + } + }, [lastSTTText, sttTimestamp, onSTTText]); + + // ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘/์ค‘์ง€ ํ•จ์ˆ˜ ๋ฐ˜ํ™˜ + const startListening = useCallback(() => { + dispatch(startWebSpeech()); + }, [dispatch]); + + const stopListening = useCallback(() => { + dispatch(stopWebSpeech()); + }, [dispatch]); + + return { + isInitialized: webSpeech.isInitialized, + isListening: webSpeech.isListening, + interimText: webSpeech.interimText, + error: webSpeech.error, + startListening, + stopListening, + }; +}; + +export default useWebSpeech; diff --git a/com.twin.app.shoptime/src/reducers/voiceReducer.js b/com.twin.app.shoptime/src/reducers/voiceReducer.js index b73d7339..325d6b30 100644 --- a/com.twin.app.shoptime/src/reducers/voiceReducer.js +++ b/com.twin.app.shoptime/src/reducers/voiceReducer.js @@ -30,6 +30,14 @@ const initialState = { // Logging for debugging logs: [], logIdCounter: 0, + + // Web Speech API state + webSpeech: { + isInitialized: false, + isListening: false, + interimText: null, + error: null, + }, }; export const voiceReducer = (state = initialState, action) => { @@ -133,6 +141,68 @@ export const voiceReducer = (state = initialState, action) => { sttTimestamp: new Date().toISOString(), }; + // Web Speech API cases + case types.WEB_SPEECH_INITIALIZED: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isInitialized: true, + error: null, + }, + }; + + case types.WEB_SPEECH_START: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: true, + interimText: null, + error: null, + }, + }; + + case types.WEB_SPEECH_INTERIM_RESULT: + return { + ...state, + webSpeech: { + ...state.webSpeech, + interimText: action.payload, + }, + }; + + case types.WEB_SPEECH_END: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: false, + interimText: null, + }, + }; + + case types.WEB_SPEECH_ERROR: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: false, + error: action.payload, + }, + }; + + case types.WEB_SPEECH_CLEANUP: + return { + ...state, + webSpeech: { + isInitialized: false, + isListening: false, + interimText: null, + error: null, + }, + }; + default: return state; } diff --git a/com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js b/com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js new file mode 100644 index 00000000..87688873 --- /dev/null +++ b/com.twin.app.shoptime/src/services/webSpeech/WebSpeechService.js @@ -0,0 +1,225 @@ +// src/services/webSpeech/WebSpeechService.js + +/** + * Web Speech API ๋ž˜ํผ ์„œ๋น„์Šค + * - SpeechRecognition ๊ฐ์ฒด ๊ด€๋ฆฌ + * - ์ด๋ฒคํŠธ ํ•ธ๋“ค๋ง + * - ์ƒํƒœ ๊ด€๋ฆฌ + */ +class WebSpeechService { + constructor() { + this.recognition = null; + this.isSupported = this.checkSupport(); + this.isListening = false; + this.callbacks = { + onResult: null, + onError: null, + onStart: null, + onEnd: null, + }; + } + + /** + * Web Speech API ์ง€์› ์—ฌ๋ถ€ ํ™•์ธ + */ + checkSupport() { + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + return !!SpeechRecognition; + } + + /** + * Speech Recognition ์ดˆ๊ธฐํ™” + * @param {Object} config - ์„ค์ • ์˜ต์…˜ + * @param {string} config.lang - ์–ธ์–ด ์ฝ”๋“œ (์˜ˆ: 'ko-KR', 'en-US') + * @param {boolean} config.continuous - ์—ฐ์† ์ธ์‹ ์—ฌ๋ถ€ + * @param {boolean} config.interimResults - ์ค‘๊ฐ„ ๊ฒฐ๊ณผ ํ‘œ์‹œ ์—ฌ๋ถ€ + * @param {number} config.maxAlternatives - ๋Œ€์ฒด ๊ฒฐ๊ณผ ์ตœ๋Œ€ ๊ฐœ์ˆ˜ + */ + initialize(config = {}) { + if (!this.isSupported) { + console.error('[WebSpeech] Speech Recognition not supported'); + return false; + } + + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + this.recognition = new SpeechRecognition(); + + // ์„ค์ • ์ ์šฉ + this.recognition.lang = config.lang || 'ko-KR'; + this.recognition.continuous = config.continuous || false; + this.recognition.interimResults = config.interimResults !== false; // default true + this.recognition.maxAlternatives = config.maxAlternatives || 1; + + // ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ๋“ฑ๋ก + this.setupEventHandlers(); + + console.log('[WebSpeech] Initialized with config:', { + lang: this.recognition.lang, + continuous: this.recognition.continuous, + interimResults: this.recognition.interimResults, + }); + + return true; + } + + /** + * ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์„ค์ • + */ + setupEventHandlers() { + // ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + this.recognition.onstart = () => { + console.log('[WebSpeech] Recognition started'); + this.isListening = true; + if (this.callbacks.onStart) { + this.callbacks.onStart(); + } + }; + + // ์Œ์„ฑ ์ธ์‹ ๊ฒฐ๊ณผ + this.recognition.onresult = (event) => { + const results = event.results; + const lastResult = results[results.length - 1]; + const transcript = lastResult[0].transcript; + const isFinal = lastResult.isFinal; + const confidence = lastResult[0].confidence; + + console.log('[WebSpeech] Result:', { transcript, isFinal, confidence }); + + if (this.callbacks.onResult) { + this.callbacks.onResult({ + transcript, + isFinal, + confidence, + alternatives: Array.from(lastResult).map((alt) => ({ + transcript: alt.transcript, + confidence: alt.confidence, + })), + }); + } + }; + + // ์—๋Ÿฌ ์ฒ˜๋ฆฌ + this.recognition.onerror = (event) => { + console.error('[WebSpeech] Recognition error:', event.error); + this.isListening = false; + + if (this.callbacks.onError) { + this.callbacks.onError({ + error: event.error, + message: this.getErrorMessage(event.error), + }); + } + }; + + // ์Œ์„ฑ ์ธ์‹ ์ข…๋ฃŒ + this.recognition.onend = () => { + console.log('[WebSpeech] Recognition ended'); + this.isListening = false; + + if (this.callbacks.onEnd) { + this.callbacks.onEnd(); + } + }; + } + + /** + * ์—๋Ÿฌ ๋ฉ”์‹œ์ง€ ๋ฒˆ์—ญ + */ + getErrorMessage(error) { + const errorMessages = { + 'no-speech': '์Œ์„ฑ์ด ๊ฐ์ง€๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.', + 'audio-capture': '๋งˆ์ดํฌ์— ์ ‘๊ทผํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.', + 'not-allowed': '๋งˆ์ดํฌ ์‚ฌ์šฉ ๊ถŒํ•œ์ด ๊ฑฐ๋ถ€๋˜์—ˆ์Šต๋‹ˆ๋‹ค.', + network: '๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค.', + aborted: '์Œ์„ฑ ์ธ์‹์ด ์ค‘๋‹จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.', + 'service-not-allowed': '์Œ์„ฑ ์ธ์‹ ์„œ๋น„์Šค๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.', + }; + + return errorMessages[error] || `์•Œ ์ˆ˜ ์—†๋Š” ์˜ค๋ฅ˜: ${error}`; + } + + /** + * ์ฝœ๋ฐฑ ๋“ฑ๋ก + * @param {string} event - ์ด๋ฒคํŠธ ์ด๋ฆ„ ('result', 'error', 'start', 'end') + * @param {Function} callback - ์ฝœ๋ฐฑ ํ•จ์ˆ˜ + */ + on(event, callback) { + const eventKey = `on${event.charAt(0).toUpperCase() + event.slice(1)}`; + if (Object.prototype.hasOwnProperty.call(this.callbacks, eventKey)) { + this.callbacks[eventKey] = callback; + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + */ + start() { + if (!this.recognition) { + console.error('[WebSpeech] Recognition not initialized. Call initialize() first.'); + return false; + } + + if (this.isListening) { + console.warn('[WebSpeech] Already listening'); + return false; + } + + try { + this.recognition.start(); + console.log('[WebSpeech] Starting recognition...'); + return true; + } catch (error) { + console.error('[WebSpeech] Failed to start:', error); + return false; + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์ค‘์ง€ + */ + stop() { + if (!this.recognition) { + return; + } + + if (!this.isListening) { + console.warn('[WebSpeech] Not listening'); + return; + } + + try { + this.recognition.stop(); + console.log('[WebSpeech] Stopping recognition...'); + } catch (error) { + console.error('[WebSpeech] Failed to stop:', error); + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์ค‘๋‹จ (์ฆ‰์‹œ ์ข…๋ฃŒ) + */ + abort() { + if (this.recognition) { + this.recognition.abort(); + this.isListening = false; + } + } + + /** + * ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ + */ + cleanup() { + this.abort(); + this.callbacks = { + onResult: null, + onError: null, + onStart: null, + onEnd: null, + }; + } +} + +// Singleton ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ +const webSpeechService = new WebSpeechService(); + +export default webSpeechService; diff --git a/com.twin.app.shoptime/src/views/SearchPanel/SearchPanel.new.jsx b/com.twin.app.shoptime/src/views/SearchPanel/SearchPanel.new.jsx index 345a3096..5f7506d6 100644 --- a/com.twin.app.shoptime/src/views/SearchPanel/SearchPanel.new.jsx +++ b/com.twin.app.shoptime/src/views/SearchPanel/SearchPanel.new.jsx @@ -398,6 +398,11 @@ export default function SearchPanel({ panelInfo, isOnTop, spotlightId, scrollOpt if (!isOnTopRef.current) { return; } + // VoiceInputOverlay๊ฐ€ ์—ด๋ ค์žˆ์œผ๋ฉด ๋จผ์ € ๋‹ซ๊ธฐ + if (isVoiceOverlayVisible) { + setIsVoiceOverlayVisible(false); + return; + } if (searchQuery === null || searchQuery === '') { dispatch(popPanel(panel_names.SEARCH_PANEL)); } else { @@ -406,7 +411,7 @@ export default function SearchPanel({ panelInfo, isOnTop, spotlightId, scrollOpt dispatch(resetSearch()); Spotlight.focus(SPOTLIGHT_IDS.SEARCH_INPUT_BOX); } - }, [searchQuery, dispatch]); + }, [searchQuery, dispatch, isVoiceOverlayVisible]); const onFocusedContainerId = useCallback( (containerId) => { @@ -759,15 +764,17 @@ export default function SearchPanel({ panelInfo, isOnTop, spotlightId, scrollOpt /> */} {/* Voice Input Overlay */} - setIsVoiceOverlayVisible(false)} - mode={voiceMode} - suggestions={voiceSuggestions} - searchQuery={searchQuery} - onSearchChange={handleSearchChange} - onSearchSubmit={handleSearchSubmit} - /> + {isVoiceOverlayVisible && ( + setIsVoiceOverlayVisible(false)} + mode={voiceMode} + suggestions={voiceSuggestions} + searchQuery={searchQuery} + onSearchChange={handleSearchChange} + onSearchSubmit={handleSearchSubmit} + /> + )} ); } diff --git a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx index 7d681dc0..9119eb29 100644 --- a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx +++ b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx @@ -13,10 +13,12 @@ import micIcon from '../../../../assets/images/searchpanel/image-mic.png'; import { getShopperHouseSearch } from '../../../actions/searchActions'; import TFullPopup from '../../../components/TFullPopup/TFullPopup'; import TInput, { ICONS, KINDS } from '../../../components/TInput/TInput'; +import { useWebSpeech } from '../../../hooks/useWebSpeech'; import VoiceListening from './modes/VoiceListening'; import VoiceNotRecognized from './modes/VoiceNotRecognized'; import VoiceNotRecognizedCircle from './modes/VoiceNotRecognizedCircle'; import VoicePromptScreen from './modes/VoicePromptScreen'; +import VoiceResponse from './modes/VoiceResponse'; import css from './VoiceInputOverlay.module.less'; const OverlayContainer = SpotlightContainerDecorator( @@ -33,6 +35,7 @@ const SpottableMicButton = Spottable('div'); export const VOICE_MODES = { PROMPT: 'prompt', // Try saying ํ™”๋ฉด LISTENING: 'listening', // ๋“ฃ๋Š” ์ค‘ ํ™”๋ฉด + RESPONSE: 'response', // STT ํ…์ŠคํŠธ ํ‘œ์‹œ ํ™”๋ฉด MODE_3: 'mode3', // ์ถ”ํ›„ ์ถ”๊ฐ€ MODE_4: 'mode4', // ์ถ”ํ›„ ์ถ”๊ฐ€ }; @@ -59,13 +62,44 @@ const VoiceInputOverlay = ({ }) => { const dispatch = useDispatch(); const lastFocusedElement = useRef(null); + const listeningTimerRef = useRef(null); const [inputFocus, setInputFocus] = useState(false); const [micFocused, setMicFocused] = useState(false); const [micWebSpeechFocused, setMicWebSpeechFocused] = useState(false); - // ๋‚ด๋ถ€ ๋ชจ๋“œ ์ƒํƒœ ๊ด€๋ฆฌ (prompt -> listening -> close) + // ๋‚ด๋ถ€ ๋ชจ๋“œ ์ƒํƒœ ๊ด€๋ฆฌ (prompt -> listening -> response -> close) const [currentMode, setCurrentMode] = useState(mode); // ์Œ์„ฑ์ธ์‹ ์ž…๋ ฅ ๋ชจ๋“œ (VUI vs WebSpeech) const [voiceInputMode, setVoiceInputMode] = useState(null); + // STT ์‘๋‹ต ํ…์ŠคํŠธ ์ €์žฅ + const [sttResponseText, setSttResponseText] = useState(''); + + // Web Speech API Hook (WebSpeech ๋ชจ๋“œ์ผ ๋•Œ๋งŒ ํ™œ์„ฑํ™”) + const handleWebSpeechSTT = useCallback((sttText) => { + console.log('๐ŸŽค [VoiceInputOverlay] WebSpeech STT text received:', sttText); + + // ํƒ€์ด๋จธ ์ค‘์ง€ + if (listeningTimerRef.current) { + clearTimeout(listeningTimerRef.current); + listeningTimerRef.current = null; + } + + // STT ํ…์ŠคํŠธ ์ €์žฅ + setSttResponseText(sttText); + + // RESPONSE ๋ชจ๋“œ๋กœ ์ „ํ™˜ + setCurrentMode(VOICE_MODES.RESPONSE); + console.log('๐Ÿ“บ [VoiceInputOverlay] Switching to RESPONSE mode with text:', sttText); + }, []); + + const { isListening, interimText, startListening, stopListening, error } = useWebSpeech( + isVisible, // Overlay๊ฐ€ ์—ด๋ ค์žˆ์„ ๋•Œ๋งŒ ํ™œ์„ฑํ™” (voiceInputMode์™€ ๋ฌด๊ด€ํ•˜๊ฒŒ ์ดˆ๊ธฐํ™”) + handleWebSpeechSTT, + { + lang: 'en-US', + continuous: false, + interimResults: true, + } + ); // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: VoicePanel ๋…๋ฆฝ ํ…Œ์ŠคํŠธ ์‹œ ์ถฉ๋Œ ๋ฐฉ์ง€ // Redux์—์„œ voice ์ƒํƒœ ๊ฐ€์ ธ์˜ค๊ธฐ @@ -110,6 +144,55 @@ const VoiceInputOverlay = ({ // } // }, [lastSTTText, sttTimestamp, isVisible, onSearchChange, onClose]); + // WebSpeech Interim ํ…์ŠคํŠธ ๋กœ๊ทธ ์ถœ๋ ฅ + useEffect(() => { + if (interimText && voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH) { + console.log('๐Ÿ’ฌ [VoiceInputOverlay] WebSpeech Interim text:', interimText); + } + }, [interimText, voiceInputMode]); + + // WebSpeech ์—๋Ÿฌ ์ฒ˜๋ฆฌ + useEffect(() => { + if (error && voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH) { + console.error('โŒ [VoiceInputOverlay] WebSpeech Error:', error); + } + }, [error, voiceInputMode]); + + // WebSpeech listening ์ƒํƒœ๊ฐ€ ์ข…๋ฃŒ๋˜์–ด๋„ 15์ดˆ ํƒ€์ด๋จธ๋Š” ๊ทธ๋Œ€๋กœ ์œ ์ง€ + // (์Œ์„ฑ ์ž…๋ ฅ์ด ๋๋‚˜๋„ listening ๋ชจ๋“œ๋Š” 15์ดˆ๊ฐ„ ์œ ์ง€) + useEffect(() => { + if (!isListening && voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH) { + console.log('๐ŸŽค [VoiceInputOverlay] WebSpeech stopped, but LISTENING mode continues...'); + // ํƒ€์ด๋จธ ์ •๋ฆฌํ•˜์ง€ ์•Š์Œ - 15์ดˆ๊ฐ€ ๋๋‚  ๋•Œ๊นŒ์ง€ listening ๋ชจ๋“œ ์œ ์ง€ + } + }, [isListening, voiceInputMode]); + + // โ›” ๋…๋ฆฝ ํ…Œ์ŠคํŠธ: WebSpeech API ํ˜ธ์ถœ ๋น„ํ™œ์„ฑํ™” + // WebSpeech ๋ชจ๋“œ๋กœ ์ „ํ™˜๋˜๋ฉด ์ž๋™์œผ๋กœ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + // useEffect(() => { + // if ( + // voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH && + // currentMode === VOICE_MODES.LISTENING && + // !isListening + // ) { + // console.log('๐ŸŽ™๏ธ [VoiceInputOverlay] Auto-starting Web Speech API after mode change...'); + // startListening(); + + // // 15์ดˆ ํƒ€์ด๋จธ ์„ค์ • + // if (listeningTimerRef.current) { + // clearTimeout(listeningTimerRef.current); + // } + // listeningTimerRef.current = setTimeout(() => { + // console.log('โฐ [VoiceInputOverlay] 15์ดˆ ํƒ€์ž„์•„์›ƒ - WebSpeech ์ž๋™ ์ข…๋ฃŒ'); + // stopListening(); + // setCurrentMode(VOICE_MODES.PROMPT); + // setVoiceInputMode(null); + // }, 15000); // 15์ดˆ + // } + // }, [voiceInputMode, currentMode, isListening, startListening, stopListening]); + + // ์ด useEffect ์ œ๊ฑฐ - renderModeContent()์—์„œ ์ง์ ‘ ํŒ๋‹จํ•˜๋ฏ€๋กœ ๋ถˆํ•„์š” + // Overlay๊ฐ€ ์—ด๋ฆด ๋•Œ ํฌ์ปค์Šค๋ฅผ overlay ๋‚ด๋ถ€๋กœ ์ด๋™ useEffect(() => { if (isVisible) { @@ -126,7 +209,23 @@ const VoiceInputOverlay = ({ }, 100); } else { // Overlay๊ฐ€ ๋‹ซํž ๋•Œ ์›๋ž˜ ํฌ์ปค์Šค ๋ณต์› ๋ฐ ์ƒํƒœ ์ดˆ๊ธฐํ™” + + // ํƒ€์ด๋จธ ์ •๋ฆฌ + if (listeningTimerRef.current) { + clearTimeout(listeningTimerRef.current); + listeningTimerRef.current = null; + } + + // โ›” ๋…๋ฆฝ ํ…Œ์ŠคํŠธ: WebSpeech API ํ˜ธ์ถœ ๋น„ํ™œ์„ฑํ™” + // WebSpeech ์ค‘์ง€ (๋น„๋™๊ธฐ๋กœ ์ฒ˜๋ฆฌ) + // if (isListening) { + // stopListening(); + // } + + // ์ƒํƒœ ์ดˆ๊ธฐํ™” setVoiceInputMode(null); + setCurrentMode(VOICE_MODES.PROMPT); + if (lastFocusedElement.current) { setTimeout(() => { Spotlight.focus(lastFocusedElement.current); @@ -182,15 +281,59 @@ const VoiceInputOverlay = ({ } }, []); + // TALK AGAIN ๋ฒ„ํŠผ ํ•ธ๋“ค๋Ÿฌ + const handleTalkAgain = useCallback(() => { + console.log('๐ŸŽค [VoiceInputOverlay] TALK AGAIN - Restarting LISTENING mode'); + + // ๊ธฐ์กด ํƒ€์ด๋จธ ์ •๋ฆฌ + if (listeningTimerRef.current) { + clearTimeout(listeningTimerRef.current); + listeningTimerRef.current = null; + } + + // STT ํ…์ŠคํŠธ ์ดˆ๊ธฐํ™” + setSttResponseText(''); + + // LISTENING ๋ชจ๋“œ๋กœ ์ „ํ™˜ + setVoiceInputMode(VOICE_INPUT_MODE.WEBSPEECH); + setCurrentMode(VOICE_MODES.LISTENING); + + // WebSpeech API ์‹œ์ž‘ + startListening(); + + // 15์ดˆ ํƒ€์ด๋จธ ์„ค์ • + listeningTimerRef.current = setTimeout(() => { + console.log('โฐ [VoiceInputOverlay] 15์ดˆ ํƒ€์ž„์•„์›ƒ - PROMPT ๋ชจ๋“œ๋กœ ๋ณต๊ท€'); + setCurrentMode(VOICE_MODES.PROMPT); + setVoiceInputMode(null); + listeningTimerRef.current = null; + stopListening(); + }, 15000); + }, [startListening, stopListening]); + // ๋ชจ๋“œ์— ๋”ฐ๋ฅธ ์ปจํ…์ธ  ๋ Œ๋”๋ง const renderModeContent = () => { + console.log( + '๐Ÿ“บ [VoiceInputOverlay] renderModeContent - currentMode:', + currentMode, + 'voiceInputMode:', + voiceInputMode, + 'isListening:', + isListening + ); + switch (currentMode) { case VOICE_MODES.PROMPT: + console.log('๐Ÿ“บ Rendering: VoicePromptScreen'); return ( ); case VOICE_MODES.LISTENING: + console.log('๐Ÿ“บ Rendering: VoiceListening (15์ดˆ ํƒ€์ด๋จธ ๊ธฐ๋ฐ˜)'); return ; + case VOICE_MODES.RESPONSE: + console.log('๐Ÿ“บ Rendering: VoiceResponse with text:', sttResponseText); + return ; case VOICE_MODES.MODE_3: // ์ถ”ํ›„ MODE_3 ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€ return ; @@ -198,6 +341,7 @@ const VoiceInputOverlay = ({ // ์ถ”ํ›„ MODE_4 ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€ return ; default: + console.log('๐Ÿ“บ Rendering: VoicePromptScreen (default)'); return ( ); @@ -267,10 +411,26 @@ const VoiceInputOverlay = ({ // [currentMode, voiceInputMode, onClose] // ); - // WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ (๋ชจ๋“œ ์ „ํ™˜: prompt -> listening -> close) + // Overlay ๋‹ซ๊ธฐ ํ•ธ๋“ค๋Ÿฌ (๋ชจ๋“  ๋‹ซ๊ธฐ ๋™์ž‘์„ ํ†ตํ•ฉ) + const handleClose = useCallback(() => { + console.log('[VoiceInputOverlay] Closing overlay'); + if (listeningTimerRef.current) { + clearTimeout(listeningTimerRef.current); + listeningTimerRef.current = null; + } + setVoiceInputMode(null); + setCurrentMode(VOICE_MODES.PROMPT); + setSttResponseText(''); + onClose(); + }, [onClose]); + + // WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ const handleWebSpeechMicClick = useCallback( (e) => { - console.log('[VoiceInputOverlay] handleWebSpeechMicClick called, currentMode:', currentMode); + console.log( + '๐ŸŽค [VoiceInputOverlay] handleWebSpeechMicClick called, currentMode:', + currentMode + ); // ์ด๋ฒคํŠธ ์ „ํŒŒ ๋ฐฉ์ง€ - dim ๋ ˆ์ด์–ด์˜ onClick ์‹คํ–‰ ๋ฐฉ์ง€ if (e && e.stopPropagation) { @@ -280,44 +440,51 @@ const VoiceInputOverlay = ({ e.nativeEvent.stopImmediatePropagation(); } - if (currentMode === VOICE_MODES.PROMPT) { - // prompt ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> WebSpeech listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ - console.log('[VoiceInputOverlay] Switching to WebSpeech LISTENING mode'); + if (currentMode === VOICE_MODES.PROMPT || currentMode === VOICE_MODES.RESPONSE) { + // prompt ๋ชจ๋“œ ๋˜๋Š” response ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ: + // 1. listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ (15์ดˆ ํƒ€์ด๋จธ) + // 2. WebSpeech API ์‹œ์ž‘ (๋…๋ฆฝ ๋™์ž‘) + console.log('๐ŸŽค [VoiceInputOverlay] Starting LISTENING mode (15s) + WebSpeech API'); + + // ๊ธฐ์กด ํƒ€์ด๋จธ ์ •๋ฆฌ + if (listeningTimerRef.current) { + clearTimeout(listeningTimerRef.current); + listeningTimerRef.current = null; + } + + // STT ํ…์ŠคํŠธ ์ดˆ๊ธฐํ™” (RESPONSE ๋ชจ๋“œ์—์„œ ์˜ฌ ๊ฒฝ์šฐ) + if (currentMode === VOICE_MODES.RESPONSE) { + setSttResponseText(''); + } + setVoiceInputMode(VOICE_INPUT_MODE.WEBSPEECH); setCurrentMode(VOICE_MODES.LISTENING); - // TODO: Web Speech API ์‹œ์ž‘ ๋กœ์ง ์ถ”๊ฐ€ - } else if ( - currentMode === VOICE_MODES.LISTENING && - voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH - ) { - // WebSpeech listening ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> ์ข…๋ฃŒ - console.log('[VoiceInputOverlay] Closing from WebSpeech LISTENING mode'); - setVoiceInputMode(null); - onClose(); + + // WebSpeech API ์‹œ์ž‘ + startListening(); + + // 15์ดˆ ํƒ€์ด๋จธ ์„ค์ • (WebSpeech ์ข…๋ฃŒ์™€ ๋ฌด๊ด€ํ•˜๊ฒŒ 15์ดˆ ํ›„ PROMPT ๋ณต๊ท€) + listeningTimerRef.current = setTimeout(() => { + console.log('โฐ [VoiceInputOverlay] 15์ดˆ ํƒ€์ž„์•„์›ƒ - PROMPT ๋ชจ๋“œ๋กœ ๋ณต๊ท€'); + setCurrentMode(VOICE_MODES.PROMPT); + setVoiceInputMode(null); + listeningTimerRef.current = null; + // WebSpeech๊ฐ€ ์•„์ง ๋™์ž‘ ์ค‘์ด๋ฉด ์ค‘์ง€ + stopListening(); + }, 15000); // 15์ดˆ } else { - // ๊ธฐํƒ€ ๋ชจ๋“œ์—์„œ๋Š” ๋ฐ”๋กœ ์ข…๋ฃŒ - console.log('[VoiceInputOverlay] Closing from other mode'); - setVoiceInputMode(null); - onClose(); + // listening ๋ชจ๋“œ ๋˜๋Š” ๊ธฐํƒ€ ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> overlay ๋‹ซ๊ธฐ + console.log('๐ŸŽค [VoiceInputOverlay] Closing overlay'); + handleClose(); } }, - [currentMode, voiceInputMode, onClose] - ); - - // dim ๋ ˆ์ด์–ด ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ (๋งˆ์ดํฌ ๋ฒ„ํŠผ๊ณผ ๋ถ„๋ฆฌ) - const handleDimClick = useCallback( - (e) => { - console.log('[VoiceInputOverlay] dimBackground clicked'); - setVoiceInputMode(null); - onClose(); - }, - [onClose] + [currentMode, handleClose, startListening, stopListening] ); return (
{/* ๋ฐฐ๊ฒฝ dim ๋ ˆ์ด์–ด - ํด๋ฆญํ•˜๋ฉด ๋‹ซํž˜ */} -
+
- {/* โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: Voice ๋“ฑ๋ก ์ƒํƒœ ํ‘œ์‹œ (๋””๋ฒ„๊น…์šฉ) */} - {/* {process.env.NODE_ENV === 'development' && ( -
- Voice: {isRegistered ? 'โœ“ Ready' : 'โœ— Not Ready'} -
- Mode: {voiceInputMode || 'None'} -
- )} */} + {/* ๋””๋ฒ„๊น…์šฉ: WebSpeech ์ƒํƒœ ํ‘œ์‹œ */} +
+
Input Mode: {voiceInputMode || 'None'}
+
Current Mode: {currentMode}
+
isListening: {isListening ? '๐ŸŽค YES' : 'โŒ NO'}
+
Interim: {interimText || 'N/A'}
+
{/* ๋ชจ๋“œ๋ณ„ ์ปจํ…์ธ  ์˜์—ญ - Spotlight Container (self-only) */} {/* ๋ชจ๋“œ๋ณ„ ์ปจํ…์ธ  */} - {renderModeContent()} +
{renderModeContent()}
diff --git a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx new file mode 100644 index 00000000..98e7c103 --- /dev/null +++ b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx @@ -0,0 +1,62 @@ +// src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.jsx +import React from 'react'; +import PropTypes from 'prop-types'; +import Spottable from '@enact/spotlight/Spottable'; +import SpotlightContainerDecorator from '@enact/spotlight/SpotlightContainerDecorator'; +import css from './VoiceResponse.module.less'; + +const SpottableBubble = Spottable('div'); +const SpottableButton = Spottable('button'); + +const ResponseContainer = SpotlightContainerDecorator( + { + enterTo: 'default-element', + restrict: 'self-only', + }, + 'div' +); + +const VoiceResponse = ({ responseText = '', onTalkAgain }) => { + const handleTalkAgainClick = () => { + console.log('[VoiceResponse] TALK AGAIN clicked'); + if (onTalkAgain) { + onTalkAgain(); + } + }; + + return ( + +
+ { + if (e.key === 'Enter' || e.keyCode === 13) { + e.preventDefault(); + handleTalkAgainClick(); + } + }} + spotlightId="voice-talk-again-button" + > + TALK AGAIN + + + +
{responseText}
+
+
+
+ ); +}; + +VoiceResponse.propTypes = { + responseText: PropTypes.string, + onTalkAgain: PropTypes.func, +}; + +VoiceResponse.defaultProps = { + responseText: '', + onTalkAgain: null, +}; + +export default VoiceResponse; diff --git a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less new file mode 100644 index 00000000..d86dadc1 --- /dev/null +++ b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less @@ -0,0 +1,99 @@ +// src/views/SearchPanel/VoiceInputOverlay/modes/VoiceResponse.module.less +@import "../../../../style/CommonStyle.module.less"; + +.container { + width: 100%; + height: 437px; + position: relative; + border-radius: 12px; + pointer-events: all; + margin-top: 100px; + display: flex; + justify-content: center; + align-items: center; +} + +.responseContainer { + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + width: 100%; + padding: 0 40px; +} + +.talkAgainButton { + margin-bottom: 100px; + padding: 20px 60px; + background: rgba(68, 68, 68, 0.5); + border: none; + border-radius: 1000px; + outline: 2px rgba(251, 251, 251, 0.2) solid; + outline-offset: -2px; + color: #eaeaea; + font-size: 28px; + font-family: "LG Smart UI"; + font-weight: 700; + cursor: pointer; + transition: all 0.3s ease; + box-shadow: 0px 10px 30px rgba(0, 0, 0, 0.35); + + &:hover { + background: rgba(88, 88, 88, 0.6); + outline: 2px rgba(251, 251, 251, 0.3) solid; + transform: translateY(-2px); + box-shadow: 0px 12px 35px rgba(0, 0, 0, 0.45); + } + + &:focus { + background: @PRIMARY_COLOR_RED; + color: @COLOR_WHITE; + outline: 3px @PRIMARY_COLOR_RED solid; + outline-offset: -3px; + box-shadow: 0px 15px 40px rgba(199, 8, 80, 0.55); + transform: translateY(-3px); + } +} + +.bubbleMessage { + padding: 20px 40px; + background: rgba(68, 68, 68, 0.5); + box-shadow: 0px 10px 30px rgba(0, 0, 0, 0.35); + border-radius: 1000px; + outline: 2px rgba(251, 251, 251, 0.2) solid; + outline-offset: -2px; + display: flex; + justify-content: center; + align-items: center; + max-width: none; /* ์ขŒ์šฐ ํญ ์ œํ•œ ์—†์Œ */ + min-width: 300px; + cursor: pointer; + transition: all 0.3s ease; + + &:hover { + background: rgba(88, 88, 88, 0.6); + outline: 2px rgba(251, 251, 251, 0.3) solid; + transform: translateY(-2px); + box-shadow: 0px 12px 35px rgba(0, 0, 0, 0.45); + } + + &:focus { + background: rgba(100, 100, 100, 0.7); + outline: 3px rgba(251, 251, 251, 0.5) solid; + outline-offset: -3px; + box-shadow: 0px 15px 40px rgba(0, 0, 0, 0.55); + transform: translateY(-3px); + } +} + +.bubbleText { + text-align: center; + color: #eaeaea; + font-size: 28px; + font-family: "LG Smart UI"; + font-weight: 700; + line-height: 32px; + word-wrap: break-word; + letter-spacing: -1px; + white-space: nowrap; /* ํ•œ ์ค„๋กœ ๊ธธ๊ฒŒ ํ‘œ์‹œ */ +} diff --git a/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx b/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx index bc2bc5d9..1899eb5a 100644 --- a/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx +++ b/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx @@ -73,6 +73,73 @@ export default function VoicePanel({ panelInfo, isOnTop, spotlightId }) { dispatch({ type: types.VOICE_CLEAR_LOGS }); }, [dispatch]); + const handleTestLog = useCallback(() => { + const now = new Date(); + const timestamp = now.toISOString(); + const formattedTime = now.toLocaleString('ko-KR', { + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: false, + }); + + console.log( + '[VoicePanel] Test Log Button Clicked - Sending test log with timestamp:', + timestamp + ); + + // Test log dispatch - ์—ฌ๋Ÿฌ ํƒ€์ž…์˜ ๋กœ๊ทธ๋ฅผ ์ˆœ์ฐจ์ ์œผ๋กœ ์ถ”๊ฐ€ + dispatch({ + type: types.VOICE_ADD_LOG, + payload: { + timestamp: timestamp, + type: 'ACTION', + title: '[TEST] ๐Ÿงช Test Log Button Clicked', + data: { + message: 'This is a test log to verify addLog dispatch works correctly', + currentTime: formattedTime, + timestamp: timestamp, + testNumber: Math.floor(Math.random() * 1000), + }, + }, + }); + + // 100ms ํ›„ ๋‘ ๋ฒˆ์งธ ๋กœ๊ทธ + setTimeout(() => { + dispatch({ + type: types.VOICE_ADD_LOG, + payload: { + timestamp: new Date().toISOString(), + type: 'RESPONSE', + title: '[TEST] โœ… Second Test Log', + data: { + message: 'If you see this, dispatch and Redux are working correctly', + delay: '100ms after first log', + }, + }, + }); + }, 100); + + // 200ms ํ›„ ์„ธ ๋ฒˆ์งธ ๋กœ๊ทธ + setTimeout(() => { + dispatch({ + type: types.VOICE_ADD_LOG, + payload: { + timestamp: new Date().toISOString(), + type: 'ERROR', + title: '[TEST] โš ๏ธ Error Type Test Log', + data: { + message: 'This is a test ERROR log to verify different log types display correctly', + logType: 'ERROR', + }, + }, + }); + }, 200); + }, [dispatch]); + const handleLoadMockData = useCallback(() => { console.log('[VoicePanel] Loading mock data: STT text + logs'); @@ -202,6 +269,13 @@ export default function VoicePanel({ panelInfo, isOnTop, spotlightId }) { > Clear + + Test Log + **ํ”„๋กœ์ ํŠธ**: ShopTime webOS TV Application -> **๋ชฉ์ **: webOS VUI Framework ๋Œ€์•ˆ์œผ๋กœ Web Speech API ๊ตฌํ˜„ -> **ํ™˜๊ฒฝ**: Chrome 68, React 16.7, Enact ํ”„๋ ˆ์ž„์›Œํฌ, Redux 3.7.2 -> **์ž‘์„ฑ์ผ**: 2025-10-16 +> **ํ”„๋กœ์ ํŠธ**: ShopTime webOS TV Application +> **๋ชฉ์ **: webOS VUI Framework ๋Œ€์•ˆ์œผ๋กœ Web Speech API ๊ตฌํ˜„ +> **ํ™˜๊ฒฝ**: Chrome 68, React 16.7, Enact ํ”„๋ ˆ์ž„์›Œํฌ, Redux 3.7.2 +> **์ž‘์„ฑ์ผ**: 2025-10-16 --- ## ๐Ÿ“‹ ๊ฐœ์š” -์ด ๋ฌธ์„œ๋Š” ํ˜„์žฌ ๊ตฌํ˜„ ์ค‘์ธ **webOS VUI Framework(Plan A)**์˜ ๋Œ€์•ˆ์œผ๋กœ **Web Speech API(Plan B)**๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ตฌํ˜„ ๋ฐฉ๋ฒ•์„ ์„ค๋ช…ํ•ฉ๋‹ˆ๋‹ค. +์ด ๋ฌธ์„œ๋Š” ํ˜„์žฌ ๊ตฌํ˜„ ์ค‘์ธ **webOS VUI Framework (Plan A)**์˜ ๋Œ€์•ˆ์œผ๋กœ **Web Speech API (Plan B)**๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ตฌํ˜„ ๋ฐฉ๋ฒ•์„ ์„ค๋ช…ํ•ฉ๋‹ˆ๋‹ค. ### Plan A vs Plan B ๋น„๊ต | ๊ตฌ๋ถ„ | Plan A (VUI Framework) | Plan B (Web Speech API) | -|-----|----------------------|------------------------| +|------|------------------------|--------------------------| | **API** | webOS Voice Conductor Service | ๋ธŒ๋ผ์šฐ์ € ๋„ค์ดํ‹ฐ๋ธŒ Web Speech API | | **์˜์กด์„ฑ** | webOS ํ”Œ๋žซํผ ์ „์šฉ | ๋ฒ”์šฉ ์›น ๋ธŒ๋ผ์šฐ์ € | | **๋„คํŠธ์›Œํฌ** | webOS ์Œ์„ฑ ์„œ๋ฒ„ | Google ์Œ์„ฑ ์„œ๋ฒ„ | | **ํ˜ธํ™˜์„ฑ** | webOS TV๋งŒ | Chrome 68+ ๋ชจ๋“  ํ”Œ๋žซํผ | -| **๊ถŒํ•œ** | PalmSystem API | navigator.mediaDevices | +| **๊ถŒํ•œ** | PalmSystem API | `navigator.mediaDevices` | | **์žฅ์ ** | TV ํ™˜๊ฒฝ ์ตœ์ ํ™”, ๋ฆฌ๋ชจ์ปจ ํ†ตํ•ฉ | ํฌ๋กœ์Šค ํ”Œ๋žซํผ, ๊ฐœ๋ฐœ ํŽธ์˜์„ฑ | | **๋‹จ์ ** | webOS ์ „์šฉ, ๋ณต์žกํ•œ ๊ตฌ์กฐ | ๋„คํŠธ์›Œํฌ ์˜์กด, TV ํ™˜๊ฒฝ ์ตœ์ ํ™” ํ•„์š” | @@ -1414,4 +1414,4 @@ recognition.start(); --- -์ด ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹จ๊ณ„๋ณ„๋กœ ๊ตฌํ˜„ํ•˜์‹œ๋ฉด ๋ฉ๋‹ˆ๋‹ค. ์งˆ๋ฌธ์ด ์žˆ์œผ์‹œ๋ฉด ์–ธ์ œ๋“ ์ง€ ๋ง์”€ํ•ด์ฃผ์„ธ์š”! ๐Ÿš€ +์ด ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹จ๊ณ„๋ณ„๋กœ ๊ตฌํ˜„ํ•˜์‹œ๋ฉด ๋ฉ๋‹ˆ๋‹ค. ์งˆ๋ฌธ์ด ์žˆ์œผ์‹œ๋ฉด ์–ธ์ œ๋“ ์ง€ ๋ง์”€ํ•ด์ฃผ์„ธ์š”! ๐Ÿš€ \ No newline at end of file