From 297ca5791ffad60e288ae5fa0f4fc84b33be7d5e Mon Sep 17 00:00:00 2001 From: optrader Date: Thu, 16 Oct 2025 15:01:11 +0900 Subject: [PATCH] [251016] fix: VUI Test-2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ๐Ÿ• ์ปค๋ฐ‹ ์‹œ๊ฐ„: 2025. 10. 16. 15:01:09 ๐Ÿ“Š ๋ณ€๊ฒฝ ํ†ต๊ณ„: โ€ข ์ด ํŒŒ์ผ: 5๊ฐœ โ€ข ์ถ”๊ฐ€: +240์ค„ โ€ข ์‚ญ์ œ: -48์ค„ ๐Ÿ“ ์ถ”๊ฐ€๋œ ํŒŒ์ผ: + com.twin.app.shoptime/vui-test.1.md + com.twin.app.shoptime/web-speech.md ๐Ÿ“ ์ˆ˜์ •๋œ ํŒŒ์ผ: ~ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx ~ com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.module.less ~ com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx ๐Ÿ”ง ํ•จ์ˆ˜ ๋ณ€๊ฒฝ ๋‚ด์šฉ: ๐Ÿ“„ com.twin.app.shoptime/web-speech.md (mdํŒŒ์ผ): โœ… Added: Framework(), Hook(), constructor(), checkSupport(), initialize(), SpeechRecognition(), setupEventHandlers(), onStart(), onResult(), onError(), getErrorMessage(), onEnd(), start(), abort(), cleanup(), WebSpeechService(), dispatch(), Date(), useDispatch(), useSelector(), useEffect(), initializeWebSpeech(), onSTTText(), useCallback(), SearchPanel(), useState(), setSearchQuery(), setTimeout(), setIsVoiceOverlayVisible(), useWebSpeech(), setVoiceMode(), stopListening(), handleSearchSubmit(), onSearchChange(), setCurrentMode(), onClose(), stopPropagation(), classNames(), renderModeContent(), async(), getUserMedia(), getTracks(), preventDefault(), startListening(), useSearchVoice() ๐Ÿ”ง ์ฃผ์š” ๋ณ€๊ฒฝ ๋‚ด์šฉ: โ€ข ํ…Œ์ŠคํŠธ ์ปค๋ฒ„๋ฆฌ์ง€ ๋ฐ ์•ˆ์ •์„ฑ ํ–ฅ์ƒ โ€ข ๊ฐœ๋ฐœ ๋ฌธ์„œ ๋ฐ ๊ฐ€์ด๋“œ ๊ฐœ์„  --- .../VoiceInputOverlay/VoiceInputOverlay.jsx | 213 ++- .../VoiceInputOverlay.module.less | 73 + .../src/views/VoicePanel/VoicePanel.jsx | 5 + com.twin.app.shoptime/vui-test.1.md | 309 ++++ com.twin.app.shoptime/web-speech.md | 1417 +++++++++++++++++ 5 files changed, 1969 insertions(+), 48 deletions(-) create mode 100644 com.twin.app.shoptime/vui-test.1.md create mode 100644 com.twin.app.shoptime/web-speech.md diff --git a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx index 8e272264..7d681dc0 100644 --- a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx +++ b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx @@ -37,9 +37,16 @@ export const VOICE_MODES = { MODE_4: 'mode4', // ์ถ”ํ›„ ์ถ”๊ฐ€ }; +// ์Œ์„ฑ์ธ์‹ ์ž…๋ ฅ ๋ชจ๋“œ (VUI vs WebSpeech) +export const VOICE_INPUT_MODE = { + VUI: 'vui', // VUI (Voice UI Framework) + WEBSPEECH: 'webspeech', // Web Speech API +}; + const OVERLAY_SPOTLIGHT_ID = 'voice-input-overlay-container'; const INPUT_SPOTLIGHT_ID = 'voice-overlay-input-box'; const MIC_SPOTLIGHT_ID = 'voice-overlay-mic-button'; +const MIC_WEBSPEECH_SPOTLIGHT_ID = 'voice-overlay-mic-webspeech-button'; const VoiceInputOverlay = ({ isVisible, @@ -54,11 +61,15 @@ const VoiceInputOverlay = ({ const lastFocusedElement = useRef(null); const [inputFocus, setInputFocus] = useState(false); const [micFocused, setMicFocused] = useState(false); + const [micWebSpeechFocused, setMicWebSpeechFocused] = useState(false); // ๋‚ด๋ถ€ ๋ชจ๋“œ ์ƒํƒœ ๊ด€๋ฆฌ (prompt -> listening -> close) const [currentMode, setCurrentMode] = useState(mode); + // ์Œ์„ฑ์ธ์‹ ์ž…๋ ฅ ๋ชจ๋“œ (VUI vs WebSpeech) + const [voiceInputMode, setVoiceInputMode] = useState(null); + // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: VoicePanel ๋…๋ฆฝ ํ…Œ์ŠคํŠธ ์‹œ ์ถฉ๋Œ ๋ฐฉ์ง€ // Redux์—์„œ voice ์ƒํƒœ ๊ฐ€์ ธ์˜ค๊ธฐ - const { isRegistered, lastSTTText, sttTimestamp } = useSelector((state) => state.voice); + // const { isRegistered, lastSTTText, sttTimestamp } = useSelector((state) => state.voice); // Redux์—์„œ shopperHouse ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๊ฐ€์ ธ์˜ค๊ธฐ const shopperHouseData = useSelector((state) => state.search.shopperHouseData); @@ -78,25 +89,26 @@ const VoiceInputOverlay = ({ } }, [shopperHouseData, isVisible, onClose]); + // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์ฒ˜๋ฆฌ // STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์‹œ ์ฒ˜๋ฆฌ - useEffect(() => { - if (lastSTTText && sttTimestamp && isVisible) { - console.log('[VoiceInputOverlay] STT text received in overlay:', lastSTTText); + // useEffect(() => { + // if (lastSTTText && sttTimestamp && isVisible) { + // console.log('[VoiceInputOverlay] STT text received in overlay:', lastSTTText); - // ์ž…๋ ฅ์ฐฝ์— ํ…์ŠคํŠธ ํ‘œ์‹œ (๋ถ€๋ชจ ์ปดํฌ๋„ŒํŠธ๋กœ ์ „๋‹ฌ) - if (onSearchChange) { - onSearchChange({ value: lastSTTText }); - } + // // ์ž…๋ ฅ์ฐฝ์— ํ…์ŠคํŠธ ํ‘œ์‹œ (๋ถ€๋ชจ ์ปดํฌ๋„ŒํŠธ๋กœ ์ „๋‹ฌ) + // if (onSearchChange) { + // onSearchChange({ value: lastSTTText }); + // } - // listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ (์‹œ๊ฐ์  ํ”ผ๋“œ๋ฐฑ) - setCurrentMode(VOICE_MODES.LISTENING); + // // listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ (์‹œ๊ฐ์  ํ”ผ๋“œ๋ฐฑ) + // setCurrentMode(VOICE_MODES.LISTENING); - // 1์ดˆ ํ›„ ์ž๋™ ๋‹ซ๊ธฐ (์„ ํƒ์‚ฌํ•ญ) - setTimeout(() => { - onClose(); - }, 1000); - } - }, [lastSTTText, sttTimestamp, isVisible, onSearchChange, onClose]); + // // 1์ดˆ ํ›„ ์ž๋™ ๋‹ซ๊ธฐ (์„ ํƒ์‚ฌํ•ญ) + // setTimeout(() => { + // onClose(); + // }, 1000); + // } + // }, [lastSTTText, sttTimestamp, isVisible, onSearchChange, onClose]); // Overlay๊ฐ€ ์—ด๋ฆด ๋•Œ ํฌ์ปค์Šค๋ฅผ overlay ๋‚ด๋ถ€๋กœ ์ด๋™ useEffect(() => { @@ -106,13 +118,15 @@ const VoiceInputOverlay = ({ // ๋ชจ๋“œ ์ดˆ๊ธฐํ™” (ํ•ญ์ƒ prompt ๋ชจ๋“œ๋กœ ์‹œ์ž‘) setCurrentMode(mode); + setVoiceInputMode(null); // Overlay ๋‚ด๋ถ€๋กœ ํฌ์ปค์Šค ์ด๋™ setTimeout(() => { Spotlight.focus(OVERLAY_SPOTLIGHT_ID); }, 100); } else { - // Overlay๊ฐ€ ๋‹ซํž ๋•Œ ์›๋ž˜ ํฌ์ปค์Šค ๋ณต์› + // Overlay๊ฐ€ ๋‹ซํž ๋•Œ ์›๋ž˜ ํฌ์ปค์Šค ๋ณต์› ๋ฐ ์ƒํƒœ ์ดˆ๊ธฐํ™” + setVoiceInputMode(null); if (lastFocusedElement.current) { setTimeout(() => { Spotlight.focus(lastFocusedElement.current); @@ -199,7 +213,7 @@ const VoiceInputOverlay = ({ setInputFocus(false); }, []); - // ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํฌ์ปค์Šค ํ•ธ๋“ค๋Ÿฌ + // ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํฌ์ปค์Šค ํ•ธ๋“ค๋Ÿฌ (VUI) const handleMicFocus = useCallback(() => { setMicFocused(true); }, []); @@ -208,10 +222,55 @@ const VoiceInputOverlay = ({ setMicFocused(false); }, []); - // ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ (๋ชจ๋“œ ์ „ํ™˜: prompt -> listening -> close) - const handleMicClick = useCallback( + // WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํฌ์ปค์Šค ํ•ธ๋“ค๋Ÿฌ + const handleMicWebSpeechFocus = useCallback(() => { + setMicWebSpeechFocused(true); + }, []); + + const handleMicWebSpeechBlur = useCallback(() => { + setMicWebSpeechFocused(false); + }, []); + + // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: VUI ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ + // VUI ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ (๋ชจ๋“œ ์ „ํ™˜: prompt -> listening -> close) + // const handleVUIMicClick = useCallback( + // (e) => { + // console.log('[VoiceInputOverlay] handleVUIMicClick called, currentMode:', currentMode); + + // // ์ด๋ฒคํŠธ ์ „ํŒŒ ๋ฐฉ์ง€ - dim ๋ ˆ์ด์–ด์˜ onClick ์‹คํ–‰ ๋ฐฉ์ง€ + // if (e && e.stopPropagation) { + // e.stopPropagation(); + // } + // if (e && e.nativeEvent && e.nativeEvent.stopImmediatePropagation) { + // e.nativeEvent.stopImmediatePropagation(); + // } + + // if (currentMode === VOICE_MODES.PROMPT) { + // // prompt ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> VUI listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ + // console.log('[VoiceInputOverlay] Switching to VUI LISTENING mode'); + // setVoiceInputMode(VOICE_INPUT_MODE.VUI); + // setCurrentMode(VOICE_MODES.LISTENING); + // // ์ด ์‹œ์ ์—์„œ webOS Voice Framework๊ฐ€ ์ž๋™์œผ๋กœ ์Œ์„ฑ์ธ์‹ ์‹œ์ž‘ + // // (์ด๋ฏธ registerVoiceFramework()๋กœ ๋“ฑ๋ก๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ) + // } else if (currentMode === VOICE_MODES.LISTENING && voiceInputMode === VOICE_INPUT_MODE.VUI) { + // // VUI listening ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> ์ข…๋ฃŒ + // console.log('[VoiceInputOverlay] Closing from VUI LISTENING mode'); + // setVoiceInputMode(null); + // onClose(); + // } else { + // // ๊ธฐํƒ€ ๋ชจ๋“œ์—์„œ๋Š” ๋ฐ”๋กœ ์ข…๋ฃŒ + // console.log('[VoiceInputOverlay] Closing from other mode'); + // setVoiceInputMode(null); + // onClose(); + // } + // }, + // [currentMode, voiceInputMode, onClose] + // ); + + // WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ (๋ชจ๋“œ ์ „ํ™˜: prompt -> listening -> close) + const handleWebSpeechMicClick = useCallback( (e) => { - console.log('[VoiceInputOverlay] handleMicClick called, currentMode:', currentMode); + console.log('[VoiceInputOverlay] handleWebSpeechMicClick called, currentMode:', currentMode); // ์ด๋ฒคํŠธ ์ „ํŒŒ ๋ฐฉ์ง€ - dim ๋ ˆ์ด์–ด์˜ onClick ์‹คํ–‰ ๋ฐฉ์ง€ if (e && e.stopPropagation) { @@ -222,28 +281,34 @@ const VoiceInputOverlay = ({ } if (currentMode === VOICE_MODES.PROMPT) { - // prompt ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ - console.log('[VoiceInputOverlay] Switching to LISTENING mode'); + // prompt ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> WebSpeech listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ + console.log('[VoiceInputOverlay] Switching to WebSpeech LISTENING mode'); + setVoiceInputMode(VOICE_INPUT_MODE.WEBSPEECH); setCurrentMode(VOICE_MODES.LISTENING); - // ์ด ์‹œ์ ์—์„œ webOS Voice Framework๊ฐ€ ์ž๋™์œผ๋กœ ์Œ์„ฑ์ธ์‹ ์‹œ์ž‘ - // (์ด๋ฏธ registerVoiceFramework()๋กœ ๋“ฑ๋ก๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ) - } else if (currentMode === VOICE_MODES.LISTENING) { - // listening ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> ์ข…๋ฃŒ - console.log('[VoiceInputOverlay] Closing from LISTENING mode'); + // TODO: Web Speech API ์‹œ์ž‘ ๋กœ์ง ์ถ”๊ฐ€ + } else if ( + currentMode === VOICE_MODES.LISTENING && + voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH + ) { + // WebSpeech listening ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> ์ข…๋ฃŒ + console.log('[VoiceInputOverlay] Closing from WebSpeech LISTENING mode'); + setVoiceInputMode(null); onClose(); } else { // ๊ธฐํƒ€ ๋ชจ๋“œ์—์„œ๋Š” ๋ฐ”๋กœ ์ข…๋ฃŒ console.log('[VoiceInputOverlay] Closing from other mode'); + setVoiceInputMode(null); onClose(); } }, - [currentMode, onClose] + [currentMode, voiceInputMode, onClose] ); // dim ๋ ˆ์ด์–ด ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ (๋งˆ์ดํฌ ๋ฒ„ํŠผ๊ณผ ๋ถ„๋ฆฌ) const handleDimClick = useCallback( (e) => { console.log('[VoiceInputOverlay] dimBackground clicked'); + setVoiceInputMode(null); onClose(); }, [onClose] @@ -264,8 +329,8 @@ const VoiceInputOverlay = ({ {/* ๋ฐฐ๊ฒฝ dim ๋ ˆ์ด์–ด - ํด๋ฆญํ•˜๋ฉด ๋‹ซํž˜ */}
- {/* Voice ๋“ฑ๋ก ์ƒํƒœ ํ‘œ์‹œ (๋””๋ฒ„๊น…์šฉ) */} - {process.env.NODE_ENV === 'development' && ( + {/* โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: Voice ๋“ฑ๋ก ์ƒํƒœ ํ‘œ์‹œ (๋””๋ฒ„๊น…์šฉ) */} + {/* {process.env.NODE_ENV === 'development' && (
Voice: {isRegistered ? 'โœ“ Ready' : 'โœ— Not Ready'} +
+ Mode: {voiceInputMode || 'None'}
- )} + )} */} {/* ๋ชจ๋“œ๋ณ„ ์ปจํ…์ธ  ์˜์—ญ - Spotlight Container (self-only) */} + {/* VUI ๋งˆ์ดํฌ ๋ฒ„ํŠผ (โ›” ๊ธฐ๋Šฅ ๋น„ํ™œ์„ฑํ™”: ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ๋งŒ ๋ฌดํšจํ™”) */} { + // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: handleVUIMicClick ํ˜ธ์ถœ ์•ˆ ํ•จ + e.stopPropagation(); + console.log('[VoiceInputOverlay] VUI mic clicked (disabled for testing)'); + }} onKeyDown={(e) => { if (e.key === 'Enter' || e.keyCode === 13) { e.preventDefault(); e.stopPropagation(); - handleMicClick(e); + // โ›” VUI ํ…Œ์ŠคํŠธ ๋น„ํ™œ์„ฑํ™”: handleVUIMicClick ํ˜ธ์ถœ ์•ˆ ํ•จ + console.log('[VoiceInputOverlay] VUI mic Enter key (disabled for testing)'); } }} onFocus={handleMicFocus} @@ -320,21 +395,63 @@ const VoiceInputOverlay = ({ spotlightId={MIC_SPOTLIGHT_ID} >
- Microphone + Voice AI
- {currentMode === VOICE_MODES.LISTENING && ( - - - + {currentMode === VOICE_MODES.LISTENING && + voiceInputMode === VOICE_INPUT_MODE.VUI && ( + + + + )} +
+ + {/* WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ */} + { + if (e.key === 'Enter' || e.keyCode === 13) { + e.preventDefault(); + e.stopPropagation(); + handleWebSpeechMicClick(e); + } + }} + onFocus={handleMicWebSpeechFocus} + onBlur={handleMicWebSpeechBlur} + spotlightId={MIC_WEBSPEECH_SPOTLIGHT_ID} + > +
+ Voice Input +
+ {currentMode === VOICE_MODES.LISTENING && + voiceInputMode === VOICE_INPUT_MODE.WEBSPEECH && ( + + + + )}
diff --git a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.module.less b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.module.less index fd122833..e1e3df25 100644 --- a/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.module.less +++ b/com.twin.app.shoptime/src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.module.less @@ -197,6 +197,79 @@ } } +// WebSpeech ๋งˆ์ดํฌ ๋ฒ„ํŠผ (๋ธ”๋ฃจ ๊ณ„์—ด) +.microphoneButtonWebSpeech { + width: 100px; + height: 100px; + position: relative; + cursor: pointer; + display: flex; + justify-content: center; + align-items: center; + padding: 0; + z-index: 1003; + + .microphoneCircle { + width: 100%; + height: 100%; + position: relative; + background: white; + overflow: hidden; + border-radius: 1000px; + border: 5px solid #ccc; + display: flex; + justify-content: center; + align-items: center; + transition: all 0.3s ease; + + .microphoneIcon { + height: 50px; + box-sizing: border-box; + transition: filter 0.3s ease; + } + } + + &:hover { + .microphoneCircle { + border-color: white; + } + } + + // active ์ƒํƒœ (์Œ์„ฑ ์ž…๋ ฅ ๋ชจ๋“œ - ๋ธ”๋ฃจ ์ƒ‰์ƒ) + &.active { + .microphoneCircle { + background-color: #4A90E2; + border-color: #4A90E2; + box-shadow: 0 0 22px 0 rgba(74, 144, 226, 0.5); + + .microphoneIcon { + filter: brightness(0) invert(1); // ์•„์ด์ฝ˜์„ ํฐ์ƒ‰์œผ๋กœ ๋ณ€๊ฒฝ + } + } + } + + &.active.focused { + .microphoneCircle { + background-color: #4A90E2; + border-color: white; + box-shadow: 0 0 22px 0 rgba(0, 0, 0, 0.5); + } + } + + // listening ์ƒํƒœ (๋ฐฐ๊ฒฝ ํˆฌ๋ช…, ํ…Œ๋‘๋ฆฌ ripple ์• ๋‹ˆ๋ฉ”์ด์…˜) + &.listening { + .microphoneCircle { + background-color: transparent; + border-color: transparent; // ํ…Œ๋‘๋ฆฌ ํˆฌ๋ช… + box-shadow: none; + + .microphoneIcon { + filter: brightness(0) invert(1); // ์•„์ด์ฝ˜์€ ํฐ์ƒ‰ ์œ ์ง€ + } + } + } +} + // Ripple ์• ๋‹ˆ๋ฉ”์ด์…˜ (์›ํ˜• ํ…Œ๋‘๋ฆฌ๊ฐ€ ์ ์—์„œ ์‹œ์ž‘ํ•ด์„œ ๊ทธ๋ ค์ง) .rippleSvg { position: absolute; diff --git a/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx b/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx index 5b9e98e4..bc2bc5d9 100644 --- a/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx +++ b/com.twin.app.shoptime/src/views/VoicePanel/VoicePanel.jsx @@ -29,6 +29,11 @@ export default function VoicePanel({ panelInfo, isOnTop, spotlightId }) { useEffect(() => { if (isOnTop) { dispatch(sendLogGNB(LOG_MENU.SEARCH_SEARCH)); + } else { + // โญ VoicePanel์ด ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ๊ฐ€๋ฉด voice framework ์ž๋™ ํ•ด์ œ + // (SearchPanel ๋“ฑ ๋‹ค๋ฅธ ํŒจ๋„๊ณผ ์ถฉ๋Œ ๋ฐฉ์ง€) + console.log('[VoicePanel] Going to background, unregistering voice framework'); + dispatch(unregisterVoiceFramework()); } }, [isOnTop, dispatch]); diff --git a/com.twin.app.shoptime/vui-test.1.md b/com.twin.app.shoptime/vui-test.1.md new file mode 100644 index 00000000..1cec909b --- /dev/null +++ b/com.twin.app.shoptime/vui-test.1.md @@ -0,0 +1,309 @@ +1-1 Can you recommend a 4K TV with Dolby Atmos support under $1,500? + + +์˜ค๋ฒ„๋ ˆ์ด ์ธ์‹ : ํ™•์ธ + +STT Text : Can you recommend a 4K TV with Dolby Atmos support under $1,500? + +Event Logs : + +RESPONSE : + +{ + "subscribed": true, + "command": "setContext", + "returnValue": true, + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-0036" +} + +COMMAND : + +{ + "command": "setContext", + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-0036" +} + +REQUEST : +{ + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-0036", + "intentCount": 3, + "intents": [ + { + "intent": "UseIME", + "supportAsrOnly": true + }, + { + "intent": "Select", + "supportOrdinal": true, + "items": [ + { + "itemId": "voice-search-button", + "value": [ + "Search", + "Search Products", + "Find Items" + ], + "title": "Search" + }, + { + "itemId": "voice-cart-button", + "value": [ + "Cart", + "Shopping Cart", + "My Cart" + ], + "title": "Cart" + }, + { + "itemId": "voice-home-button", + "value": [ + "Home", + "Go Home", + "Main Page" + ], + "title": "Home" + }, + { + "itemId": "voice-mypage-button", + "value": [ + "My Page", + "Account", + "Profile" + ], + "title": "My Page" + } + ] + }, + { + "intent": "Scroll", + "supportOrdinal": false, + "items": [ + { + "itemId": "voice-scroll-up", + "value": [ + "Scroll Up", + "Page Up" + ] + }, + { + "itemId": "voice-scroll-down", + "value": [ + "Scroll Down", + "Page Down" + ] + } + ] + } + ] +} + +RESPONSE : + +{ + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-0036", + "intentCount": 3, + "intents": [ + { + "intent": "UseIME", + "supportAsrOnly": true + }, + { + "intent": "Select", + "supportOrdinal": true, + "items": [ + { + "itemId": "voice-search-button", + "value": [ + "Search", + "Search Products", + "Find Items" + ], + "title": "Search" + }, + { + "itemId": "voice-cart-button", + "value": [ + "Cart", + "Shopping Cart", + "My Cart" + ], + "title": "Cart" + }, + { + "itemId": "voice-home-button", + "value": [ + "Home", + "Go Home", + "Main Page" + ], + "title": "Home" + }, + { + "itemId": "voice-mypage-button", + "value": [ + "My Page", + "Account", + "Profile" + ], + "title": "My Page" + } + ] + }, + { + "intent": "Scroll", + "supportOrdinal": false, + "items": [ + { + "itemId": "voice-scroll-up", + "value": [ + "Scroll Up", + "Page Up" + ] + }, + { + "itemId": "voice-scroll-down", + "value": [ + "Scroll Down", + "Page Down" + ] + } + ] + } + ] +} + + +ACTION : + +{ + "message": "Context set successfully. Press the MIC button on remote and speak.", + "nextStep": "Waiting for performAction event...", + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-0036" +} + + + + + + + + +๊ทธ๋Ÿฌ๋‚˜ ๋‘๋ฒˆ์งธ ์‹œ๋„์—์„œ + + + +STT Text : No STT text received yet. Speak after registering to see the result. + +RESPONSE : + +{ + "subscribed": true, + "command": "setContext", + "returnValue": true, + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-003b" +} + +COMMAND : +{ + "command": "setContext", + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-003b" +} + + +REQUEST : +{ + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-003b", + "intentCount": 3, + "intents": [ + { + "intent": "UseIME", + "supportAsrOnly": true + }, + { + "intent": "Select", + "supportOrdinal": true, + "items": [ + { + "itemId": "voice-search-button", + "value": [ + "Search", + "Search Products", + "Find Items" + ], + "title": "Search" + }, + { + "itemId": "voice-cart-button", + "value": [ + "Cart", + "Shopping Cart", + "My Cart" + ], + "title": "Cart" + }, + { + "itemId": "voice-home-button", + "value": [ + "Home", + "Go Home", + "Main Page" + ], + "title": "Home" + }, + { + "itemId": "voice-mypage-button", + "value": [ + "My Page", + "Account", + "Profile" + ], + "title": "My Page" + } + ] + }, + { + "intent": "Scroll", + "supportOrdinal": false, + "items": [ + { + "itemId": "voice-scroll-up", + "value": [ + "Scroll Up", + "Page Up" + ] + }, + { + "itemId": "voice-scroll-down", + "value": [ + "Scroll Down", + "Page Down" + ] + } + ] + } + ] +} + +RESPONSE : +{ + "returnValue": true +} + +ACTION : +{ + "message": "Context set successfully. Press the MIC button on remote and speak.", + "nextStep": "Waiting for performAction event...", + "voiceTicket": "13799ec6-fd1f-4cdd-b02e-34d58fe5f34b-68ef-003b" +} + +ERROR : +{ + "message": "performAction event was not received within 15 seconds after setContext.", + "possibleReasons": [ + "1. Did you press the MIC button on the remote control?", + "2. Did you speak after pressing the MIC button?", + "3. UseIME intent might not be supported on this webOS version", + "4. Voice framework might not be routing events correctly" + ], + "suggestion": "Try pressing the remote MIC button and speaking clearly. Check VoicePanel logs for performAction event." +} \ No newline at end of file diff --git a/com.twin.app.shoptime/web-speech.md b/com.twin.app.shoptime/web-speech.md new file mode 100644 index 00000000..a46291d9 --- /dev/null +++ b/com.twin.app.shoptime/web-speech.md @@ -0,0 +1,1417 @@ +# Web Speech API ๊ตฌํ˜„ ๊ฐ€์ด๋“œ (Plan B) + +> **ํ”„๋กœ์ ํŠธ**: ShopTime webOS TV Application +> **๋ชฉ์ **: webOS VUI Framework ๋Œ€์•ˆ์œผ๋กœ Web Speech API ๊ตฌํ˜„ +> **ํ™˜๊ฒฝ**: Chrome 68, React 16.7, Enact ํ”„๋ ˆ์ž„์›Œํฌ, Redux 3.7.2 +> **์ž‘์„ฑ์ผ**: 2025-10-16 + +--- + +## ๐Ÿ“‹ ๊ฐœ์š” + +์ด ๋ฌธ์„œ๋Š” ํ˜„์žฌ ๊ตฌํ˜„ ์ค‘์ธ **webOS VUI Framework(Plan A)**์˜ ๋Œ€์•ˆ์œผ๋กœ **Web Speech API(Plan B)**๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ตฌํ˜„ ๋ฐฉ๋ฒ•์„ ์„ค๋ช…ํ•ฉ๋‹ˆ๋‹ค. + +### Plan A vs Plan B ๋น„๊ต + +| ๊ตฌ๋ถ„ | Plan A (VUI Framework) | Plan B (Web Speech API) | +|-----|----------------------|------------------------| +| **API** | webOS Voice Conductor Service | ๋ธŒ๋ผ์šฐ์ € ๋„ค์ดํ‹ฐ๋ธŒ Web Speech API | +| **์˜์กด์„ฑ** | webOS ํ”Œ๋žซํผ ์ „์šฉ | ๋ฒ”์šฉ ์›น ๋ธŒ๋ผ์šฐ์ € | +| **๋„คํŠธ์›Œํฌ** | webOS ์Œ์„ฑ ์„œ๋ฒ„ | Google ์Œ์„ฑ ์„œ๋ฒ„ | +| **ํ˜ธํ™˜์„ฑ** | webOS TV๋งŒ | Chrome 68+ ๋ชจ๋“  ํ”Œ๋žซํผ | +| **๊ถŒํ•œ** | PalmSystem API | navigator.mediaDevices | +| **์žฅ์ ** | TV ํ™˜๊ฒฝ ์ตœ์ ํ™”, ๋ฆฌ๋ชจ์ปจ ํ†ตํ•ฉ | ํฌ๋กœ์Šค ํ”Œ๋žซํผ, ๊ฐœ๋ฐœ ํŽธ์˜์„ฑ | +| **๋‹จ์ ** | webOS ์ „์šฉ, ๋ณต์žกํ•œ ๊ตฌ์กฐ | ๋„คํŠธ์›Œํฌ ์˜์กด, TV ํ™˜๊ฒฝ ์ตœ์ ํ™” ํ•„์š” | + +--- + +## ๐Ÿ” ํ™˜๊ฒฝ ๋ถ„์„ + +### 1. ํ”„๋กœ์ ํŠธ ํ™˜๊ฒฝ + +```json +{ + "platform": "webOS TV", + "browser": "Chrome 68", + "react": "16.7.0", + "redux": "3.7.2", + "enact": "3.3.0" +} +``` + +### 2. Chrome 68์˜ Web Speech API ์ง€์› + +Chrome 68(2018๋…„ 7์›” ์ถœ์‹œ)์€ **Web Speech API๋ฅผ ์™„๋ฒฝํ•˜๊ฒŒ ์ง€์›**ํ•ฉ๋‹ˆ๋‹ค: + +- โœ… `SpeechRecognition` / `webkitSpeechRecognition` +- โœ… `SpeechRecognitionEvent` +- โœ… `SpeechRecognitionResult` +- โœ… ํ•œ๊ตญ์–ด ๋ฐ ๋‹ค๊ตญ์–ด ์ง€์› +- โœ… Continuous / Interim Results +- โœ… MaxAlternatives ์ง€์› + +**ํ˜ธํ™˜์„ฑ ํ™•์ธ:** + +```javascript +// Chrome 68์—์„œ์˜ Speech Recognition API ์ง€์› ํ™•์ธ +const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; +const isSupported = !!SpeechRecognition; +console.log('Web Speech API ์ง€์›:', isSupported); // true +``` + +### 3. ๊ธฐ์กด ๊ตฌ์กฐ ๋ถ„์„ + +**ํ˜„์žฌ VUI Framework ๊ตฌ์กฐ:** +``` +SearchPanel.new.jsx + โ†“ (uses) +useSearchVoice Hook + โ†“ (dispatches) +voiceActions.js (registerVoiceFramework, sendVoiceIntents) + โ†“ (updates) +voiceReducer.js (lastSTTText, sttTimestamp) + โ†“ (displays) +VoiceInputOverlay.jsx (PROMPT/LISTENING modes) +``` + +**Web Speech API ๊ตฌ์กฐ (Plan B):** +``` +SearchPanel.new.jsx + โ†“ (uses) +useWebSpeech Hook (NEW) + โ†“ (uses) +WebSpeechService.js (NEW) + โ†“ (updates) +voiceReducer.js (๊ธฐ์กด Redux ํ™œ์šฉ) + โ†“ (displays) +VoiceInputOverlay.jsx (๊ธฐ์กด ์ปดํฌ๋„ŒํŠธ ์žฌ์‚ฌ์šฉ) +``` + +--- + +## ๐Ÿ—๏ธ ๊ตฌํ˜„ ์•„ํ‚คํ…์ฒ˜ + +### ํŒŒ์ผ ๊ตฌ์กฐ + +``` +src/ +โ”œโ”€โ”€ services/ +โ”‚ โ””โ”€โ”€ webSpeech/ +โ”‚ โ”œโ”€โ”€ WebSpeechService.js # Web Speech API ๋ž˜ํผ ์„œ๋น„์Šค (NEW) +โ”‚ โ””โ”€โ”€ webSpeechConfig.js # ์–ธ์–ด ๋ฐ ์„ค์ • (NEW) +โ”œโ”€โ”€ actions/ +โ”‚ โ””โ”€โ”€ webSpeechActions.js # Web Speech Redux ์•ก์…˜ (NEW) +โ”œโ”€โ”€ hooks/ +โ”‚ โ””โ”€โ”€ useWebSpeech.js # Web Speech Hook (NEW) +โ”œโ”€โ”€ reducers/ +โ”‚ โ””โ”€โ”€ voiceReducer.js # ๊ธฐ์กด ํ™œ์šฉ (์•ฝ๊ฐ„ ์ˆ˜์ •) +โ””โ”€โ”€ views/ + โ””โ”€โ”€ SearchPanel/ + โ”œโ”€โ”€ SearchPanel.new.jsx # ๊ธฐ์กด ํ™œ์šฉ (useWebSpeech ํ†ตํ•ฉ) + โ””โ”€โ”€ VoiceInputOverlay/ + โ””โ”€โ”€ VoiceInputOverlay.jsx # ๊ธฐ์กด ์žฌ์‚ฌ์šฉ +``` + +--- + +## ๐Ÿ“ ๋‹จ๊ณ„๋ณ„ ๊ตฌํ˜„ + +### Step 1: WebSpeechService ๊ตฌํ˜„ + +**ํŒŒ์ผ**: `src/services/webSpeech/WebSpeechService.js` + +```javascript +// src/services/webSpeech/WebSpeechService.js + +/** + * Web Speech API ๋ž˜ํผ ์„œ๋น„์Šค + * - SpeechRecognition ๊ฐ์ฒด ๊ด€๋ฆฌ + * - ์ด๋ฒคํŠธ ํ•ธ๋“ค๋ง + * - ์ƒํƒœ ๊ด€๋ฆฌ + */ +class WebSpeechService { + constructor() { + this.recognition = null; + this.isSupported = this.checkSupport(); + this.isListening = false; + this.callbacks = { + onResult: null, + onError: null, + onStart: null, + onEnd: null, + }; + } + + /** + * Web Speech API ์ง€์› ์—ฌ๋ถ€ ํ™•์ธ + */ + checkSupport() { + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + return !!SpeechRecognition; + } + + /** + * Speech Recognition ์ดˆ๊ธฐํ™” + * @param {Object} config - ์„ค์ • ์˜ต์…˜ + * @param {string} config.lang - ์–ธ์–ด ์ฝ”๋“œ (์˜ˆ: 'ko-KR', 'en-US') + * @param {boolean} config.continuous - ์—ฐ์† ์ธ์‹ ์—ฌ๋ถ€ + * @param {boolean} config.interimResults - ์ค‘๊ฐ„ ๊ฒฐ๊ณผ ํ‘œ์‹œ ์—ฌ๋ถ€ + * @param {number} config.maxAlternatives - ๋Œ€์ฒด ๊ฒฐ๊ณผ ์ตœ๋Œ€ ๊ฐœ์ˆ˜ + */ + initialize(config = {}) { + if (!this.isSupported) { + console.error('[WebSpeech] Speech Recognition not supported'); + return false; + } + + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + this.recognition = new SpeechRecognition(); + + // ์„ค์ • ์ ์šฉ + this.recognition.lang = config.lang || 'ko-KR'; + this.recognition.continuous = config.continuous || false; + this.recognition.interimResults = config.interimResults !== false; // default true + this.recognition.maxAlternatives = config.maxAlternatives || 1; + + // ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ๋“ฑ๋ก + this.setupEventHandlers(); + + console.log('[WebSpeech] Initialized with config:', { + lang: this.recognition.lang, + continuous: this.recognition.continuous, + interimResults: this.recognition.interimResults, + }); + + return true; + } + + /** + * ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์„ค์ • + */ + setupEventHandlers() { + // ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + this.recognition.onstart = () => { + console.log('[WebSpeech] Recognition started'); + this.isListening = true; + if (this.callbacks.onStart) { + this.callbacks.onStart(); + } + }; + + // ์Œ์„ฑ ์ธ์‹ ๊ฒฐ๊ณผ + this.recognition.onresult = (event) => { + const results = event.results; + const lastResult = results[results.length - 1]; + const transcript = lastResult[0].transcript; + const isFinal = lastResult.isFinal; + const confidence = lastResult[0].confidence; + + console.log('[WebSpeech] Result:', { transcript, isFinal, confidence }); + + if (this.callbacks.onResult) { + this.callbacks.onResult({ + transcript, + isFinal, + confidence, + alternatives: Array.from(lastResult).map((alt) => ({ + transcript: alt.transcript, + confidence: alt.confidence, + })), + }); + } + }; + + // ์—๋Ÿฌ ์ฒ˜๋ฆฌ + this.recognition.onerror = (event) => { + console.error('[WebSpeech] Recognition error:', event.error); + this.isListening = false; + + if (this.callbacks.onError) { + this.callbacks.onError({ + error: event.error, + message: this.getErrorMessage(event.error), + }); + } + }; + + // ์Œ์„ฑ ์ธ์‹ ์ข…๋ฃŒ + this.recognition.onend = () => { + console.log('[WebSpeech] Recognition ended'); + this.isListening = false; + + if (this.callbacks.onEnd) { + this.callbacks.onEnd(); + } + }; + } + + /** + * ์—๋Ÿฌ ๋ฉ”์‹œ์ง€ ๋ฒˆ์—ญ + */ + getErrorMessage(error) { + const errorMessages = { + 'no-speech': '์Œ์„ฑ์ด ๊ฐ์ง€๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.', + 'audio-capture': '๋งˆ์ดํฌ์— ์ ‘๊ทผํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.', + 'not-allowed': '๋งˆ์ดํฌ ์‚ฌ์šฉ ๊ถŒํ•œ์ด ๊ฑฐ๋ถ€๋˜์—ˆ์Šต๋‹ˆ๋‹ค.', + 'network': '๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค.', + 'aborted': '์Œ์„ฑ ์ธ์‹์ด ์ค‘๋‹จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.', + 'service-not-allowed': '์Œ์„ฑ ์ธ์‹ ์„œ๋น„์Šค๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.', + }; + + return errorMessages[error] || `์•Œ ์ˆ˜ ์—†๋Š” ์˜ค๋ฅ˜: ${error}`; + } + + /** + * ์ฝœ๋ฐฑ ๋“ฑ๋ก + * @param {string} event - ์ด๋ฒคํŠธ ์ด๋ฆ„ ('result', 'error', 'start', 'end') + * @param {Function} callback - ์ฝœ๋ฐฑ ํ•จ์ˆ˜ + */ + on(event, callback) { + const eventKey = `on${event.charAt(0).toUpperCase() + event.slice(1)}`; + if (this.callbacks.hasOwnProperty(eventKey)) { + this.callbacks[eventKey] = callback; + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + */ + start() { + if (!this.recognition) { + console.error('[WebSpeech] Recognition not initialized. Call initialize() first.'); + return false; + } + + if (this.isListening) { + console.warn('[WebSpeech] Already listening'); + return false; + } + + try { + this.recognition.start(); + console.log('[WebSpeech] Starting recognition...'); + return true; + } catch (error) { + console.error('[WebSpeech] Failed to start:', error); + return false; + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์ค‘์ง€ + */ + stop() { + if (!this.recognition) { + return; + } + + if (!this.isListening) { + console.warn('[WebSpeech] Not listening'); + return; + } + + try { + this.recognition.stop(); + console.log('[WebSpeech] Stopping recognition...'); + } catch (error) { + console.error('[WebSpeech] Failed to stop:', error); + } + } + + /** + * ์Œ์„ฑ ์ธ์‹ ์ค‘๋‹จ (์ฆ‰์‹œ ์ข…๋ฃŒ) + */ + abort() { + if (this.recognition) { + this.recognition.abort(); + this.isListening = false; + } + } + + /** + * ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ + */ + cleanup() { + this.abort(); + this.callbacks = { + onResult: null, + onError: null, + onStart: null, + onEnd: null, + }; + } +} + +// Singleton ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ +const webSpeechService = new WebSpeechService(); + +export default webSpeechService; +``` + +--- + +### Step 2: Redux ์•ก์…˜ ์ถ”๊ฐ€ + +**ํŒŒ์ผ**: `src/actions/webSpeechActions.js` + +```javascript +// src/actions/webSpeechActions.js + +import { types } from './actionTypes'; +import webSpeechService from '../services/webSpeech/WebSpeechService'; + +/** + * Web Speech ์ดˆ๊ธฐํ™” ๋ฐ ์‹œ์ž‘ + * @param {Object} config - ์–ธ์–ด ๋ฐ ์„ค์ • + */ +export const initializeWebSpeech = (config = {}) => (dispatch) => { + console.log('[WebSpeechActions] Initializing Web Speech...'); + + // ์ง€์› ์—ฌ๋ถ€ ํ™•์ธ + if (!webSpeechService.isSupported) { + const error = 'Web Speech API is not supported in this browser'; + console.error('[WebSpeechActions]', error); + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error, message: error }, + }); + return false; + } + + // ์ดˆ๊ธฐํ™” + const initialized = webSpeechService.initialize({ + lang: config.lang || 'ko-KR', + continuous: config.continuous || false, + interimResults: config.interimResults !== false, + maxAlternatives: config.maxAlternatives || 1, + }); + + if (!initialized) { + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error: 'Failed to initialize', message: 'Failed to initialize Web Speech' }, + }); + return false; + } + + // ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ๋“ฑ๋ก + webSpeechService.on('start', () => { + dispatch({ + type: types.WEB_SPEECH_START, + }); + }); + + webSpeechService.on('result', (result) => { + console.log('[WebSpeechActions] Result:', result); + + // Interim ๊ฒฐ๊ณผ (์ค‘๊ฐ„ ๊ฒฐ๊ณผ) + if (!result.isFinal) { + dispatch({ + type: types.WEB_SPEECH_INTERIM_RESULT, + payload: result.transcript, + }); + } + // Final ๊ฒฐ๊ณผ (์ตœ์ข… ๊ฒฐ๊ณผ) + else { + dispatch({ + type: types.VOICE_STT_TEXT_RECEIVED, // ๊ธฐ์กด VUI์™€ ๋™์ผํ•œ ์•ก์…˜ ์‚ฌ์šฉ + payload: result.transcript, + }); + } + }); + + webSpeechService.on('error', (errorInfo) => { + console.error('[WebSpeechActions] Error:', errorInfo); + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: errorInfo, + }); + }); + + webSpeechService.on('end', () => { + dispatch({ + type: types.WEB_SPEECH_END, + }); + }); + + dispatch({ + type: types.WEB_SPEECH_INITIALIZED, + }); + + return true; +}; + +/** + * ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + */ +export const startWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Starting recognition...'); + const started = webSpeechService.start(); + + if (!started) { + dispatch({ + type: types.WEB_SPEECH_ERROR, + payload: { error: 'Failed to start', message: 'Failed to start recognition' }, + }); + } +}; + +/** + * ์Œ์„ฑ ์ธ์‹ ์ค‘์ง€ + */ +export const stopWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Stopping recognition...'); + webSpeechService.stop(); +}; + +/** + * ์Œ์„ฑ ์ธ์‹ ์ค‘๋‹จ + */ +export const abortWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Aborting recognition...'); + webSpeechService.abort(); +}; + +/** + * ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ + */ +export const cleanupWebSpeech = () => (dispatch) => { + console.log('[WebSpeechActions] Cleaning up...'); + webSpeechService.cleanup(); + dispatch({ + type: types.WEB_SPEECH_CLEANUP, + }); +}; +``` + +--- + +### Step 3: actionTypes ์—…๋ฐ์ดํŠธ + +**ํŒŒ์ผ**: `src/actions/actionTypes.js` + +๊ธฐ์กด ํŒŒ์ผ์— ๋‹ค์Œ ํƒ€์ž…๋“ค์„ ์ถ”๊ฐ€: + +```javascript +// src/actions/actionTypes.js + +export const types = { + // ... ๊ธฐ์กด types + + // Web Speech API ๊ด€๋ จ + WEB_SPEECH_INITIALIZED: 'WEB_SPEECH_INITIALIZED', + WEB_SPEECH_START: 'WEB_SPEECH_START', + WEB_SPEECH_INTERIM_RESULT: 'WEB_SPEECH_INTERIM_RESULT', + WEB_SPEECH_END: 'WEB_SPEECH_END', + WEB_SPEECH_ERROR: 'WEB_SPEECH_ERROR', + WEB_SPEECH_CLEANUP: 'WEB_SPEECH_CLEANUP', + + // ๊ธฐ์กด VOICE_STT_TEXT_RECEIVED๋„ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉ (ํ˜ธํ™˜์„ฑ) +}; +``` + +--- + +### Step 4: voiceReducer ์—…๋ฐ์ดํŠธ + +**ํŒŒ์ผ**: `src/reducers/voiceReducer.js` + +๊ธฐ์กด voiceReducer์— Web Speech ์ƒํƒœ ์ถ”๊ฐ€: + +```javascript +// src/reducers/voiceReducer.js (์ˆ˜์ •) + +import { types } from '../actions/actionTypes'; + +const initialState = { + // ... ๊ธฐ์กด VUI Framework state + + // Web Speech API state (NEW) + webSpeech: { + isInitialized: false, + isListening: false, + interimText: null, + error: null, + }, + + // STT text state (๊ธฐ์กด - ๋‘ ๋ฐฉ์‹ ๋ชจ๋‘ ๊ณต์œ ) + lastSTTText: null, + sttTimestamp: null, +}; + +export const voiceReducer = (state = initialState, action) => { + switch (action.type) { + // ... ๊ธฐ์กด VUI Framework cases + + // Web Speech API cases (NEW) + case types.WEB_SPEECH_INITIALIZED: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isInitialized: true, + error: null, + }, + }; + + case types.WEB_SPEECH_START: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: true, + interimText: null, + error: null, + }, + }; + + case types.WEB_SPEECH_INTERIM_RESULT: + return { + ...state, + webSpeech: { + ...state.webSpeech, + interimText: action.payload, + }, + }; + + case types.WEB_SPEECH_END: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: false, + interimText: null, + }, + }; + + case types.WEB_SPEECH_ERROR: + return { + ...state, + webSpeech: { + ...state.webSpeech, + isListening: false, + error: action.payload, + }, + }; + + case types.WEB_SPEECH_CLEANUP: + return { + ...state, + webSpeech: { + isInitialized: false, + isListening: false, + interimText: null, + error: null, + }, + }; + + // VOICE_STT_TEXT_RECEIVED๋Š” ๊ธฐ์กด ๊ทธ๋Œ€๋กœ ์œ ์ง€ (VUI์™€ Web Speech ๋ชจ๋‘ ์‚ฌ์šฉ) + case types.VOICE_STT_TEXT_RECEIVED: + return { + ...state, + lastSTTText: action.payload, + sttTimestamp: new Date().toISOString(), + }; + + default: + return state; + } +}; + +export default voiceReducer; +``` + +--- + +### Step 5: useWebSpeech Hook ๊ตฌํ˜„ + +**ํŒŒ์ผ**: `src/hooks/useWebSpeech.js` + +```javascript +// src/hooks/useWebSpeech.js + +import { useEffect, useCallback } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { + initializeWebSpeech, + startWebSpeech, + stopWebSpeech, + cleanupWebSpeech, +} from '../actions/webSpeechActions'; + +/** + * Web Speech API Hook + * - SearchPanel์—์„œ ์‚ฌ์šฉํ•˜๋Š” ์Œ์„ฑ ์ž…๋ ฅ Hook + * - VoiceInputOverlay์™€ ํ†ตํ•ฉ + * + * @param {boolean} isActive - Hook ํ™œ์„ฑํ™” ์—ฌ๋ถ€ (์˜ˆ: SearchPanel์ด foreground์ธ์ง€) + * @param {function} onSTTText - STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์ฝœ๋ฐฑ + * @param {Object} config - Web Speech ์„ค์ • + */ +export const useWebSpeech = (isActive, onSTTText, config = {}) => { + const dispatch = useDispatch(); + const { lastSTTText, sttTimestamp, webSpeech } = useSelector((state) => state.voice); + + // Web Speech ์ดˆ๊ธฐํ™” + useEffect(() => { + if (isActive) { + console.log('[useWebSpeech] Initializing Web Speech API'); + dispatch( + initializeWebSpeech({ + lang: config.lang || 'ko-KR', + continuous: config.continuous || false, + interimResults: config.interimResults !== false, + }) + ); + } else { + console.log('[useWebSpeech] Cleaning up Web Speech API'); + dispatch(cleanupWebSpeech()); + } + + // Cleanup on unmount + return () => { + dispatch(cleanupWebSpeech()); + }; + }, [isActive, dispatch]); + + // STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์ฒ˜๋ฆฌ + useEffect(() => { + if (lastSTTText && sttTimestamp) { + console.log('[useWebSpeech] STT text received:', lastSTTText); + if (onSTTText) { + onSTTText(lastSTTText); + } + } + }, [lastSTTText, sttTimestamp, onSTTText]); + + // ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘/์ค‘์ง€ ํ•จ์ˆ˜ ๋ฐ˜ํ™˜ + const startListening = useCallback(() => { + dispatch(startWebSpeech()); + }, [dispatch]); + + const stopListening = useCallback(() => { + dispatch(stopWebSpeech()); + }, [dispatch]); + + return { + isInitialized: webSpeech.isInitialized, + isListening: webSpeech.isListening, + interimText: webSpeech.interimText, + error: webSpeech.error, + startListening, + stopListening, + }; +}; + +export default useWebSpeech; +``` + +--- + +### Step 6: SearchPanel ํ†ตํ•ฉ + +**ํŒŒ์ผ**: `src/views/SearchPanel/SearchPanel.new.jsx` + +๊ธฐ์กด SearchPanel์—์„œ `useSearchVoice` ๋Œ€์‹  `useWebSpeech` ์‚ฌ์šฉ: + +```javascript +// src/views/SearchPanel/SearchPanel.new.jsx (์ˆ˜์ •) + +import React, { useCallback, useState } from 'react'; +import { useDispatch } from 'react-redux'; +// import { useSearchVoice } from '../../hooks/useSearchVoice'; // VUI Framework (Plan A) +import { useWebSpeech } from '../../hooks/useWebSpeech'; // Web Speech API (Plan B) +import VoiceInputOverlay, { VOICE_MODES } from './VoiceInputOverlay/VoiceInputOverlay'; + +export default function SearchPanel({ panelInfo, isOnTop, spotlightId }) { + const dispatch = useDispatch(); + const [searchQuery, setSearchQuery] = useState(panelInfo.searchVal || ''); + const [isVoiceOverlayVisible, setIsVoiceOverlayVisible] = useState(false); + const [voiceMode, setVoiceMode] = useState(VOICE_MODES.PROMPT); + + // STT ํ…์ŠคํŠธ ์ˆ˜์‹  ํ•ธ๋“ค๋Ÿฌ + const handleSTTText = useCallback( + (sttText) => { + console.log('[SearchPanel] STT text received:', sttText); + + // 1. searchQuery ์—…๋ฐ์ดํŠธ + setSearchQuery(sttText); + + // 2. ShopperHouse ๊ฒ€์ƒ‰ ์‹คํ–‰ + if (sttText && sttText.trim()) { + dispatch(getShopperHouseSearch(sttText.trim())); + } + + // 3. Voice Overlay ๋‹ซ๊ธฐ + setTimeout(() => { + setIsVoiceOverlayVisible(false); + }, 500); + }, + [dispatch] + ); + + // โญ Web Speech Hook ํ™œ์„ฑํ™” (Plan B) + const { isListening, interimText, startListening, stopListening } = useWebSpeech( + isOnTop, + handleSTTText, + { + lang: 'ko-KR', // ํ•œ๊ตญ์–ด + continuous: false, // ํ•œ ๋ฒˆ๋งŒ ์ธ์‹ + interimResults: true, // ์ค‘๊ฐ„ ๊ฒฐ๊ณผ ํ‘œ์‹œ + } + ); + + // ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ + const onClickMic = useCallback(() => { + if (!isOnTop) return; + + // Voice Overlay ์—ด๊ธฐ + setVoiceMode(VOICE_MODES.PROMPT); + setIsVoiceOverlayVisible(true); + }, [isOnTop]); + + // Voice Overlay ๋‹ซ๊ธฐ + const handleVoiceOverlayClose = useCallback(() => { + setIsVoiceOverlayVisible(false); + // ์Œ์„ฑ ์ธ์‹ ์ค‘๋‹จ + if (isListening) { + stopListening(); + } + }, [isListening, stopListening]); + + return ( + + + {/* ๊ธฐ์กด SearchPanel UI */} + handleSearchSubmit(searchQuery)} + // ... + /> + + {/* ๋งˆ์ดํฌ ๋ฒ„ํŠผ */} + + Microphone + + + {/* Voice Overlay */} + setSearchQuery(e.value)} + suggestions={voiceSuggestions} + // Web Speech ์ „์šฉ props + onStartListening={startListening} + isListening={isListening} + interimText={interimText} + /> + + + ); +} +``` + +--- + +### Step 7: VoiceInputOverlay ์ˆ˜์ • + +**ํŒŒ์ผ**: `src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx` + +๊ธฐ์กด VoiceInputOverlay์— Web Speech ์ง€์› ์ถ”๊ฐ€: + +```javascript +// src/views/SearchPanel/VoiceInputOverlay/VoiceInputOverlay.jsx (์ˆ˜์ •) + +const VoiceInputOverlay = ({ + isVisible, + onClose, + mode, + suggestions, + searchQuery, + onSearchChange, + onSearchSubmit, + // โญ Web Speech ์ „์šฉ props (NEW) + onStartListening, + isListening, + interimText, +}) => { + const dispatch = useDispatch(); + const [currentMode, setCurrentMode] = useState(mode); + + // Redux์—์„œ voice ์ƒํƒœ ๊ฐ€์ ธ์˜ค๊ธฐ (๊ธฐ์กด VUI Framework ๋˜๋Š” Web Speech) + const { lastSTTText, sttTimestamp } = useSelector((state) => state.voice); + + // STT ํ…์ŠคํŠธ ์ˆ˜์‹  ์‹œ ์ฒ˜๋ฆฌ (๊ธฐ์กด๊ณผ ๋™์ผ) + useEffect(() => { + if (lastSTTText && sttTimestamp && isVisible) { + console.log('[VoiceInputOverlay] STT text received:', lastSTTText); + + // ์ž…๋ ฅ์ฐฝ์— ํ…์ŠคํŠธ ํ‘œ์‹œ + if (onSearchChange) { + onSearchChange({ value: lastSTTText }); + } + + // listening ๋ชจ๋“œ๋กœ ์ „ํ™˜ + setCurrentMode(VOICE_MODES.LISTENING); + + // 1์ดˆ ํ›„ ์ž๋™ ๋‹ซ๊ธฐ + setTimeout(() => { + onClose(); + }, 1000); + } + }, [lastSTTText, sttTimestamp, isVisible, onSearchChange, onClose]); + + // โญ Interim ํ…์ŠคํŠธ ํ‘œ์‹œ (Web Speech ์ „์šฉ) + useEffect(() => { + if (interimText && isVisible) { + console.log('[VoiceInputOverlay] Interim text:', interimText); + // ์ž…๋ ฅ์ฐฝ์— ์ค‘๊ฐ„ ๊ฒฐ๊ณผ ํ‘œ์‹œ (ํšŒ์ƒ‰ ํ…์ŠคํŠธ ๋“ฑ์œผ๋กœ ํ‘œ์‹œ ๊ฐ€๋Šฅ) + if (onSearchChange) { + onSearchChange({ value: interimText }); + } + } + }, [interimText, isVisible, onSearchChange]); + + // ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ ํ•ธ๋“ค๋Ÿฌ + const handleMicClick = useCallback( + (e) => { + e?.stopPropagation(); + + if (currentMode === VOICE_MODES.PROMPT) { + // โญ Web Speech API ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + console.log('[VoiceInputOverlay] Starting Web Speech recognition'); + setCurrentMode(VOICE_MODES.LISTENING); + + if (onStartListening) { + onStartListening(); // Web Speech startListening ํ˜ธ์ถœ + } + } else if (currentMode === VOICE_MODES.LISTENING) { + // listening ๋ชจ๋“œ์—์„œ ํด๋ฆญ ์‹œ -> ์ข…๋ฃŒ + console.log('[VoiceInputOverlay] Closing from LISTENING mode'); + onClose(); + } + }, + [currentMode, onClose, onStartListening] + ); + + return ( + + {/* ... ๊ธฐ์กด UI ... */} + + {/* Web Speech ์ƒํƒœ ํ‘œ์‹œ (๋””๋ฒ„๊น…์šฉ) */} + {process.env.NODE_ENV === 'development' && ( +
+ Web Speech: {isListening ? '๐ŸŽค Listening' : 'โธ Ready'} + {interimText &&
Interim: {interimText}
} +
+ )} + + {/* ๋งˆ์ดํฌ ๋ฒ„ํŠผ */} + +
+ Microphone +
+ {isListening && ( + + + + )} +
+ + {/* ๋ชจ๋“œ๋ณ„ ์ปจํ…์ธ  */} + {renderModeContent()} +
+ ); +}; +``` + +--- + +## ๐Ÿงช ํ…Œ์ŠคํŠธ ๊ฐ€์ด๋“œ + +### 1. ๊ฐœ๋ฐœ ํ™˜๊ฒฝ ํ…Œ์ŠคํŠธ (Chrome ๋ธŒ๋ผ์šฐ์ €) + +**โš ๏ธ ์ฃผ์˜**: ๊ฐœ๋ฐœ ํ™˜๊ฒฝ์—์„œ๋Š” ๋งˆ์ดํฌ ๊ถŒํ•œ ํŒ์—…์ด ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค (webOS TV์™€ ๋‹ค๋ฆ„) + +```bash +# 1. ํ”„๋กœ์ ํŠธ ์‹คํ–‰ +npm run serve + +# 2. Chrome ๋ธŒ๋ผ์šฐ์ €์—์„œ ์ ‘์† +# http://localhost:8080 + +# 3. ์ฝ˜์†”์—์„œ Web Speech API ์ง€์› ํ™•์ธ +console.log('Web Speech API ์ง€์›:', !!(window.SpeechRecognition || window.webkitSpeechRecognition)); + +# 4. SearchPanel ์—ด๊ธฐ +# 5. ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ +# 6. โš ๏ธ ๋ธŒ๋ผ์šฐ์ €์—์„œ ๋งˆ์ดํฌ ๊ถŒํ•œ ํ—ˆ์šฉ ํŒ์—… ํ‘œ์‹œ๋จ (์ตœ์ดˆ 1ํšŒ) +# - "Allow" ๋ฒ„ํŠผ ํด๋ฆญ +# 7. ์Œ์„ฑ ๋ฐœํ™” ("์•„์ดํฐ" ๋“ฑ) +# 8. ์ฝ˜์†”์—์„œ STT ๊ฒฐ๊ณผ ํ™•์ธ: +# - [WebSpeech] Result: { transcript: "์•„์ดํฐ", isFinal: true, confidence: 0.9 } +# - [WebSpeechActions] Result: ... +# - [useWebSpeech] STT text received: ์•„์ดํฐ +``` + +**๊ฐœ๋ฐœ ํ™˜๊ฒฝ ๋งˆ์ดํฌ ๊ถŒํ•œ ๋ฌธ์ œ ํ•ด๊ฒฐ:** + +```javascript +// Chrome ๋ธŒ๋ผ์šฐ์ €์—์„œ ๋งˆ์ดํฌ ๊ถŒํ•œ์ด ์ฐจ๋‹จ๋˜์—ˆ์„ ๊ฒฝ์šฐ: +// 1. ์ฃผ์†Œ์ฐฝ ์™ผ์ชฝ์˜ ์ž๋ฌผ์‡  ์•„์ด์ฝ˜ ํด๋ฆญ +// 2. "๋งˆ์ดํฌ" ๊ถŒํ•œ ์„ค์ • +// 3. "ํ—ˆ์šฉ"์œผ๋กœ ๋ณ€๊ฒฝ +// 4. ํŽ˜์ด์ง€ ์ƒˆ๋กœ๊ณ ์นจ +``` + +### 2. webOS TV ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ ํ…Œ์ŠคํŠธ + +**โœ… ์žฅ์ **: webOS TV ํ™˜๊ฒฝ์—์„œ๋Š” ๋งˆ์ดํฌ ๊ถŒํ•œ ํŒ์—…์ด ์—†์Œ! + +```bash +# 1. appinfo.json ๊ถŒํ•œ ํ™•์ธ (ํ•„์ˆ˜!) +# webos-meta/appinfo.json์— "audio.capture" ๊ถŒํ•œ์ด ์žˆ๋Š”์ง€ ํ™•์ธ + +# 2. ๋นŒ๋“œ +npm run build + +# 3. ํŒจํ‚ค์ง• +npm run package + +# 4. ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ์— ์„ค์น˜ +npm run install-tv + +# 5. ์‹คํ–‰ +npm run launch-tv + +# 6. ์Œ์„ฑ ์ž…๋ ฅ ํ…Œ์ŠคํŠธ +# - SearchPanel ์—ด๊ธฐ +# - ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ +# - โœ… ๊ถŒํ•œ ํŒ์—… ์—†์ด ๋ฐ”๋กœ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘๋จ +# - ์Œ์„ฑ ๋ฐœํ™” ("์•„์ดํฐ" ๋“ฑ) +# - ๊ฒฐ๊ณผ ํ™•์ธ (SearchPanel์— ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํ‘œ์‹œ) + +# 7. ๋กœ๊ทธ ํ™•์ธ (Remote Inspector) +# - Chrome์—์„œ chrome://webos-devtools ์ ‘์† +# - ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ ์•ฑ ์„ ํƒ โ†’ Inspect +# - ์ฝ˜์†”์—์„œ [WebSpeech] ๋กœ๊ทธ ํ™•์ธ +``` + +**์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ ๋งˆ์ดํฌ ํ…Œ์ŠคํŠธ:** + +```bash +# ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ์—์„œ๋Š” PC์˜ ๋งˆ์ดํฌ๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค +# - macOS/Windows: ๊ธฐ๋ณธ ๋งˆ์ดํฌ ์ž๋™ ์‚ฌ์šฉ +# - ๋งˆ์ดํฌ๊ฐ€ ์—†์œผ๋ฉด ์Œ์„ฑ ์ธ์‹ ๋ถˆ๊ฐ€ ("no-speech" ์—๋Ÿฌ) +``` + +### 3. ์‹ค์ œ TV ํ…Œ์ŠคํŠธ + +**โœ… ์ตœ์ข… ํ…Œ์ŠคํŠธ**: ์‹ค์ œ webOS TV์—์„œ ๋ฆฌ๋ชจ์ปจ ๋งˆ์ดํฌ ์‚ฌ์šฉ + +```bash +# 1. TV๋ฅผ ๊ฐœ๋ฐœ์ž ๋ชจ๋“œ๋กœ ์„ค์ • +# - TV ์„ค์ • โ†’ ์ผ๋ฐ˜ โ†’ ์ •๋ณด โ†’ TV ์ •๋ณด +# - "๊ฐœ๋ฐœ์ž ๋ชจ๋“œ ์•ฑ" ๋‹ค์šด๋กœ๋“œ +# - Dev Mode ํ™œ์„ฑํ™” + +# 2. ares-setup-device๋กœ TV ๋“ฑ๋ก +ares-setup-device --add tv --info "{'host': '192.168.x.x', 'port': '9922', 'username': 'prisoner'}" + +# 3. appinfo.json ๊ถŒํ•œ ํ™•์ธ (ํ•„์ˆ˜!) +# webos-meta/appinfo.json์— "audio.capture" ๊ถŒํ•œ ์ถ”๊ฐ€๋˜์–ด ์žˆ๋Š”์ง€ ํ™•์ธ + +# 4. ์„ค์น˜ ๋ฐ ์‹คํ–‰ +npm run build-ipk +npm run install-tv +npm run launch-tv +``` + +**์‹ค์ œ TV ํ…Œ์ŠคํŠธ ์‹œ๋‚˜๋ฆฌ์˜ค:** + +1. **์•ฑ ์„ค์น˜ ์‹œ**: + - โœ… ๋งˆ์ดํฌ ๊ถŒํ•œ ํŒ์—… ์—†์Œ (appinfo.json ๊ถŒํ•œ์œผ๋กœ ์ž๋™ ํ—ˆ์šฉ) + +2. **๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ ํ…Œ์ŠคํŠธ**: + ``` + SearchPanel โ†’ ๋งˆ์ดํฌ ๋ฒ„ํŠผ ํด๋ฆญ + โ†’ VoiceInputOverlay ํ‘œ์‹œ + โ†’ ๋งˆ์ดํฌ ๋ฒ„ํŠผ ๋‹ค์‹œ ํด๋ฆญ + โ†’ ์ฆ‰์‹œ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ (๊ถŒํ•œ ํŒ์—… ์—†์Œ) + โ†’ "์•„์ดํฐ" ๋ฐœํ™” + โ†’ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํ‘œ์‹œ + ``` + +3. **๋ฆฌ๋ชจ์ปจ ์Œ์„ฑ ๋ฒ„ํŠผ ํ…Œ์ŠคํŠธ** (์„ ํƒ์‚ฌํ•ญ): + ``` + SearchPanel ํ™”๋ฉด์—์„œ + โ†’ ๋ฆฌ๋ชจ์ปจ ์Œ์„ฑ ๋ฒ„ํŠผ(๐ŸŽค) ๋ˆ„๋ฆ„ + โ†’ VoiceInputOverlay ์ž๋™ ํ‘œ์‹œ + ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + โ†’ "๊ฐค๋Ÿญ์‹œ" ๋ฐœํ™” + โ†’ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํ‘œ์‹œ + ``` + +4. **๋กœ๊ทธ ํ™•์ธ**: + ```bash + # Remote Inspector๋กœ ์‹ค์ œ TV ๋””๋ฒ„๊น… + ares-inspect com.lgshop.app --device tv --open + + # ์ฝ˜์†” ๋กœ๊ทธ ํ™•์ธ: + # [WebSpeech] Initialized with config: { lang: 'ko-KR', ... } + # [WebSpeech] Starting recognition... + # [WebSpeech] Recognition started + # [WebSpeech] Result: { transcript: "์•„์ดํฐ", isFinal: true, ... } + # [useWebSpeech] STT text received: ์•„์ดํฐ + ``` + +--- + +## ๐Ÿ”ง webOS TV ํ™˜๊ฒฝ ์ตœ์ ํ™” + +### 1. ๋งˆ์ดํฌ ๊ถŒํ•œ ์ฒ˜๋ฆฌ (์ค‘์š”!) + +**webOS TV ํ™˜๊ฒฝ์˜ ํŠน๋ณ„ํ•œ ์ :** + +webOS TV์—์„œ๋Š” ์ผ๋ฐ˜ ์›น ๋ธŒ๋ผ์šฐ์ €์™€ ๋‹ฌ๋ฆฌ **๋ณ„๋„์˜ ๋Ÿฐํƒ€์ž„ ๋งˆ์ดํฌ ๊ถŒํ•œ ์š”์ฒญ ํŒ์—…์ด ์—†์Šต๋‹ˆ๋‹ค** (LG ๋‹ด๋‹น์ž ํ™•์ธ). + +**๊ถŒํ•œ ์„ค์ • ๋ฐฉ๋ฒ•:** + +1. `webos-meta/appinfo.json`์— `audio.capture` ๊ถŒํ•œ ์ถ”๊ฐ€ +2. ์•ฑ ์„ค์น˜ ์‹œ ์ž๋™์œผ๋กœ ๊ถŒํ•œ ๋ถ€์—ฌ +3. ์‚ฌ์šฉ์ž์—๊ฒŒ ๋ณ„๋„ ํŒ์—… ํ‘œ์‹œ ์—†์Œ + +**ํŒŒ์ผ**: `webos-meta/appinfo.json` + +```json +{ + "id": "com.lgshop.app", + "version": "2.0.0", + "vendor": "LG", + "type": "web", + "main": "index.html", + "title": "ShopTime", + "icon": "icon.png", + "requiredPermissions": [ + "audio.capture" + ] +} +``` + +**์ค‘์š” ์‚ฌํ•ญ:** + +- โœ… **webOS TV**: ๊ถŒํ•œ ํŒ์—… ์—†์Œ, appinfo.json๋งŒ ์„ค์ • +- โš ๏ธ **Chrome ๋ธŒ๋ผ์šฐ์ € (๊ฐœ๋ฐœ ํ™˜๊ฒฝ)**: ์ตœ์ดˆ 1ํšŒ ๊ถŒํ•œ ์š”์ฒญ ํŒ์—… ํ‘œ์‹œ๋จ +- ๐Ÿ’ก **๊ฒฐ๋ก **: TV ํ™˜๊ฒฝ์—์„œ๋Š” ์‚ฌ์šฉ์ž ๊ฒฝํ—˜ ๋‹จ์ ˆ ์—†์ด ๋ฐ”๋กœ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ ๊ฐ€๋Šฅ + +**๊ถŒํ•œ ์ฒดํฌ ์ฝ”๋“œ (๋ถˆํ•„์š”):** + +webOS TV์—์„œ๋Š” ์•„๋ž˜ ๊ถŒํ•œ ์ฒดํฌ ์ฝ”๋“œ๊ฐ€ ๋ถˆํ•„์š”ํ•ฉ๋‹ˆ๋‹ค: + +```javascript +// โŒ webOS TV์—์„œ๋Š” ๋ถˆํ•„์š”ํ•œ ์ฝ”๋“œ +// (Chrome ๋ธŒ๋ผ์šฐ์ € ๊ฐœ๋ฐœ ํ™˜๊ฒฝ์—์„œ๋งŒ ํ•„์š”) +const checkMicrophonePermission = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + stream.getTracks().forEach(track => track.stop()); + return true; + } catch (error) { + console.error('Microphone permission denied:', error); + return false; + } +}; +``` + +**์‹ค์ œ ํ•„์š”ํ•œ ๊ฒƒ:** + +```javascript +// โœ… webOS TV์—์„œ ํ•„์š”ํ•œ ๊ฒƒ: appinfo.json ์„ค์ •๋งŒ +// WebSpeechService.start()๋ฅผ ๋ฐ”๋กœ ํ˜ธ์ถœํ•˜๋ฉด ๋จ +``` + +### 2. ๋ฆฌ๋ชจ์ปจ ๋ฒ„ํŠผ ํ†ตํ•ฉ + +webOS TV ๋ฆฌ๋ชจ์ปจ์˜ ์Œ์„ฑ ๋ฒ„ํŠผ๊ณผ ํ†ตํ•ฉ: + +```javascript +// src/views/SearchPanel/SearchPanel.new.jsx + +useEffect(() => { + if (!isOnTop) return; + + // ๋ฆฌ๋ชจ์ปจ ์Œ์„ฑ ๋ฒ„ํŠผ (KeyCode 461) ๊ฐ์ง€ + const handleKeyDown = (event) => { + if (event.keyCode === 461) { + // ์Œ์„ฑ ๋ฒ„ํŠผ + event.preventDefault(); + setIsVoiceOverlayVisible(true); + // ์ž๋™์œผ๋กœ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘ + setTimeout(() => { + startListening(); + }, 300); + } + }; + + document.addEventListener('keydown', handleKeyDown); + + return () => { + document.removeEventListener('keydown', handleKeyDown); + }; +}, [isOnTop, startListening]); +``` + +### 3. ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ + +Web Speech API๋Š” Google ์„œ๋ฒ„๋ฅผ ์‚ฌ์šฉํ•˜๋ฏ€๋กœ ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ ํ•„์š”: + +```javascript +// src/hooks/useWebSpeech.js (์ˆ˜์ •) + +useEffect(() => { + if (webSpeech.error) { + console.error('[useWebSpeech] Error:', webSpeech.error); + + // ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜ ์‹œ Toast ํ‘œ์‹œ + if (webSpeech.error.error === 'network') { + dispatch( + showErrorToast('๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค. ์ธํ„ฐ๋„ท ์—ฐ๊ฒฐ์„ ํ™•์ธํ•ด์ฃผ์„ธ์š”.', { + duration: 3000, + }) + ); + } + // ๋งˆ์ดํฌ ๊ถŒํ•œ ์˜ค๋ฅ˜ + else if (webSpeech.error.error === 'not-allowed') { + dispatch( + showErrorToast('๋งˆ์ดํฌ ์‚ฌ์šฉ ๊ถŒํ•œ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.', { + duration: 3000, + }) + ); + } + // ์Œ์„ฑ ๊ฐ์ง€ ์‹คํŒจ + else if (webSpeech.error.error === 'no-speech') { + dispatch( + showWarningToast('์Œ์„ฑ์ด ๊ฐ์ง€๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.', { + duration: 3000, + }) + ); + } + } +}, [webSpeech.error, dispatch]); +``` + +--- + +## ๐Ÿ“Š Plan A vs Plan B ์ „ํ™˜ + +### Plan A (VUI Framework) ์‚ฌ์šฉ ์‹œ + +```javascript +// src/views/SearchPanel/SearchPanel.new.jsx + +import { useSearchVoice } from '../../hooks/useSearchVoice'; + +// ... + +useSearchVoice(isOnTop, handleSTTText); +``` + +### Plan B (Web Speech API) ์‚ฌ์šฉ ์‹œ + +```javascript +// src/views/SearchPanel/SearchPanel.new.jsx + +import { useWebSpeech } from '../../hooks/useWebSpeech'; + +// ... + +const { isListening, startListening, stopListening } = useWebSpeech( + isOnTop, + handleSTTText, + { lang: 'ko-KR' } +); +``` + +### ๋‘ ๊ฐ€์ง€ ๋ฐฉ์‹ ๋ณ‘ํ–‰ ์‚ฌ์šฉ (๊ถŒ์žฅ) + +```javascript +// src/views/SearchPanel/SearchPanel.new.jsx + +import { useSearchVoice } from '../../hooks/useSearchVoice'; // Plan A +import { useWebSpeech } from '../../hooks/useWebSpeech'; // Plan B + +// ... + +// ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ ์ œ์–ด +const USE_WEB_SPEECH_API = process.env.USE_WEB_SPEECH_API === 'true'; + +if (USE_WEB_SPEECH_API) { + // Plan B: Web Speech API + const { isListening, startListening, stopListening } = useWebSpeech( + isOnTop, + handleSTTText, + { lang: 'ko-KR' } + ); +} else { + // Plan A: VUI Framework + useSearchVoice(isOnTop, handleSTTText); +} +``` + +--- + +## ๐Ÿš€ ๊ตฌํ˜„ ์ฒดํฌ๋ฆฌ์ŠคํŠธ + +### ํ•„์ˆ˜ ๊ตฌํ˜„ + +- [ ] `WebSpeechService.js` ์ƒ์„ฑ +- [ ] `webSpeechActions.js` ์ƒ์„ฑ +- [ ] `actionTypes.js`์— Web Speech ํƒ€์ž… ์ถ”๊ฐ€ +- [ ] `voiceReducer.js`์— Web Speech ์ƒํƒœ ์ถ”๊ฐ€ +- [ ] `useWebSpeech.js` Hook ์ƒ์„ฑ +- [ ] `SearchPanel.new.jsx`์— useWebSpeech ํ†ตํ•ฉ +- [ ] `VoiceInputOverlay.jsx`์— Web Speech ์ง€์› ์ถ”๊ฐ€ + +### ์„ ํƒ์  ์ตœ์ ํ™” + +- [ ] ๋ฆฌ๋ชจ์ปจ ์Œ์„ฑ ๋ฒ„ํŠผ ํ†ตํ•ฉ +- [ ] ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜ Toast ํ‘œ์‹œ +- [ ] Interim ๊ฒฐ๊ณผ ์‹œ๊ฐ์  ํ‘œ์‹œ +- [ ] ๋‹ค๊ตญ์–ด ์ง€์› (en-US, ja-JP ๋“ฑ) +- [ ] webos-meta/appinfo.json์— ๊ถŒํ•œ ์ถ”๊ฐ€ +- [ ] ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ Plan A/B ์ „ํ™˜ ๊ฐ€๋Šฅํ•˜๋„๋ก + +### ํ…Œ์ŠคํŠธ + +- [ ] Chrome ๋ธŒ๋ผ์šฐ์ €์—์„œ ํ…Œ์ŠคํŠธ +- [ ] webOS ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ์—์„œ ํ…Œ์ŠคํŠธ +- [ ] ์‹ค์ œ TV์—์„œ ํ…Œ์ŠคํŠธ +- [ ] ํ•œ๊ตญ์–ด/์˜์–ด ์Œ์„ฑ ์ธ์‹ ํ…Œ์ŠคํŠธ +- [ ] ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜ ์‹œ๋‚˜๋ฆฌ์˜ค ํ…Œ์ŠคํŠธ + +--- + +## ๐Ÿ“š ์ฐธ๊ณ  ์ž๋ฃŒ + +### Web Speech API ๋ฌธ์„œ + +- [MDN Web Speech API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Speech_API) +- [SpeechRecognition Interface](https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition) +- [Chrome Platform Status - Web Speech](https://chromestatus.com/feature/4782875580825600) + +### webOS ๊ด€๋ จ + +- [webOS TV Developer Guide](https://webostv.developer.lge.com/) +- [webOS TV API Reference](https://webostv.developer.lge.com/api/) + +### ํ”„๋กœ์ ํŠธ ๋ฌธ์„œ + +- `vui-implement.md` - VUI Framework ๊ตฌํ˜„ (Plan A) +- `web-speech.md` - Web Speech API ๊ตฌํ˜„ (Plan B, ์ด ๋ฌธ์„œ) + +--- + +## ๐Ÿ’ก ์ฃผ์˜์‚ฌํ•ญ + +1. **๋„คํŠธ์›Œํฌ ์˜์กด์„ฑ**: Web Speech API๋Š” Google ์„œ๋ฒ„๋ฅผ ์‚ฌ์šฉํ•˜๋ฏ€๋กœ **์ธํ„ฐ๋„ท ์—ฐ๊ฒฐ ํ•„์ˆ˜** +2. **HTTPS ํ•„์š”**: ๋กœ์ปฌํ˜ธ์ŠคํŠธ ์™ธ ํ™˜๊ฒฝ์—์„œ๋Š” HTTPS ํ•„์ˆ˜ (webOS TV๋Š” ๋‚ด๋ถ€์ ์œผ๋กœ ์ฒ˜๋ฆฌ) +3. **๋งˆ์ดํฌ ๊ถŒํ•œ (์ค‘์š”!)**: + - **webOS TV**: `appinfo.json`์— `audio.capture` ๊ถŒํ•œ๋งŒ ์ถ”๊ฐ€ํ•˜๋ฉด ๋จ. **๋ณ„๋„ ํŒ์—… ์—†์Œ** + - **Chrome ๋ธŒ๋ผ์šฐ์ € (๊ฐœ๋ฐœ ํ™˜๊ฒฝ)**: ์ตœ์ดˆ 1ํšŒ ๊ถŒํ•œ ์š”์ฒญ ํŒ์—… ํ‘œ์‹œ + - LG ๋‹ด๋‹น์ž ํ™•์ธ: webOS TV์—์„œ๋Š” ๋Ÿฐํƒ€์ž„ ๊ถŒํ•œ ์š”์ฒญ ์—†์Œ +4. **๋ธŒ๋ผ์šฐ์ € ํ˜ธํ™˜์„ฑ**: Chrome/Chromium ๊ธฐ๋ฐ˜ ๋ธŒ๋ผ์šฐ์ €๋งŒ ์ง€์› (Chrome 68 ์™„๋ฒฝ ์ง€์›) +5. **์–ธ์–ด ์ง€์›**: ํ•œ๊ตญ์–ด('ko-KR'), ์˜์–ด('en-US'), ์ผ๋ณธ์–ด('ja-JP') ๋“ฑ ์ฃผ์š” ์–ธ์–ด ์ง€์› +6. **์—ฐ์† ์ธ์‹**: `continuous: false` ๊ถŒ์žฅ (ํ•œ ๋ฒˆ ๋ฐœํ™” โ†’ ํ•œ ๋ฒˆ ์ธ์‹์ด TV UX์— ์ ํ•ฉ) + +--- + +## ๐ŸŽฏ ๋‹ค์Œ ๋‹จ๊ณ„ + +1. **WebSpeechService ๊ตฌํ˜„** - ๊ฐ€์žฅ ๋จผ์ € ๊ตฌํ˜„ +2. **Redux ํ†ตํ•ฉ** - ์•ก์…˜ ๋ฐ ๋ฆฌ๋“€์„œ ์ถ”๊ฐ€ +3. **useWebSpeech Hook** - SearchPanel์—์„œ ์‚ฌ์šฉํ•  Hook +4. **SearchPanel ํ†ตํ•ฉ** - ๊ธฐ์กด ์ฝ”๋“œ ์ตœ์†Œ ์ˆ˜์ • +5. **ํ…Œ์ŠคํŠธ** - Chrome โ†’ ์‹œ๋ฎฌ๋ ˆ์ดํ„ฐ โ†’ ์‹ค์ œ TV ์ˆœ์„œ๋กœ +6. **์ตœ์ ํ™”** - ๋ฆฌ๋ชจ์ปจ ๋ฒ„ํŠผ, ์—๋Ÿฌ ์ฒ˜๋ฆฌ ๋“ฑ + +--- + +## ๐Ÿ“Œ ํ•ต์‹ฌ ์š”์•ฝ (TL;DR) + +### webOS TV ํ™˜๊ฒฝ์˜ ํŠน๋ณ„ํ•œ ์  + +| ํ•ญ๋ชฉ | Chrome ๋ธŒ๋ผ์šฐ์ € (๊ฐœ๋ฐœ) | webOS TV (์‹ค์ œ ํ™˜๊ฒฝ) | +|------|---------------------|-------------------| +| **๋งˆ์ดํฌ ๊ถŒํ•œ** | ๋Ÿฐํƒ€์ž„ ํŒ์—… ํ‘œ์‹œ (์ตœ์ดˆ 1ํšŒ) | **ํŒ์—… ์—†์Œ** (appinfo.json๋งŒ ์„ค์ •) | +| **๊ถŒํ•œ ์„ค์ •** | ๋ธŒ๋ผ์šฐ์ € ์„ค์ •์—์„œ ์ˆ˜๋™ ํ—ˆ์šฉ | ์•ฑ ์„ค์น˜ ์‹œ ์ž๋™ ํ—ˆ์šฉ | +| **์‚ฌ์šฉ์ž ๊ฒฝํ—˜** | ๊ถŒํ•œ ํ—ˆ์šฉ ๋‹จ๊ณ„ ํ•„์š” | **์ฆ‰์‹œ ์Œ์„ฑ ์ธ์‹ ์‹œ์ž‘** | + +### Plan A vs Plan B ์ตœ์ข… ๋น„๊ต + +| ๊ตฌ๋ถ„ | Plan A (VUI Framework) | Plan B (Web Speech API) | +|------|----------------------|------------------------| +| **API** | webOS Voice Conductor | Web Speech API | +| **๊ตฌํ˜„ ๋ณต์žก๋„** | โญโญโญโญ ๋†’์Œ | โญโญ ์ค‘๊ฐ„ | +| **ํ”Œ๋žซํผ ์˜์กด์„ฑ** | webOS ์ „์šฉ | ๋ฒ”์šฉ (Chrome 68+) | +| **๋งˆ์ดํฌ ๊ถŒํ•œ** | appinfo.json | appinfo.json (๋™์ผ) | +| **๊ฐœ๋ฐœ ํŽธ์˜์„ฑ** | ๋ณต์žกํ•œ Luna ์„œ๋น„์Šค | ๊ฐ„๋‹จํ•œ ๋ธŒ๋ผ์šฐ์ € API | +| **๋””๋ฒ„๊น…** | VoicePanel ํ•„์š” | Chrome DevTools | +| **๋ฆฌ๋ชจ์ปจ ํ†ตํ•ฉ** | ์ž๋™ ํ†ตํ•ฉ | ์ˆ˜๋™ ๊ตฌํ˜„ ํ•„์š” | +| **๋„คํŠธ์›Œํฌ ์˜์กด** | webOS ์„œ๋ฒ„ | Google ์„œ๋ฒ„ | + +### ๊ฒฐ๋ก  ๋ฐ ๊ถŒ์žฅ์‚ฌํ•ญ + +**Plan B (Web Speech API) ์ถ”์ฒœ ์ƒํ™ฉ:** +- โœ… ๋น ๋ฅธ ํ”„๋กœํ† ํƒ€์ดํ•‘์ด ํ•„์š”ํ•  ๋•Œ +- โœ… ํฌ๋กœ์Šค ํ”Œ๋žซํผ ๊ฐœ๋ฐœ์„ ๊ณ ๋ คํ•  ๋•Œ +- โœ… Chrome ๋ธŒ๋ผ์šฐ์ €์—์„œ๋„ ํ…Œ์ŠคํŠธํ•˜๊ณ  ์‹ถ์„ ๋•Œ +- โœ… ๊ฐ„๋‹จํ•˜๊ณ  ์ง๊ด€์ ์ธ API๋ฅผ ์„ ํ˜ธํ•  ๋•Œ + +**Plan A (VUI Framework) ์ถ”์ฒœ ์ƒํ™ฉ:** +- โœ… webOS TV ์ „์šฉ ์•ฑ์ผ ๋•Œ +- โœ… ๋ฆฌ๋ชจ์ปจ ํ†ตํ•ฉ์ด ํ•„์ˆ˜์ผ ๋•Œ +- โœ… LG์˜ ๊ณต์‹ ์Œ์„ฑ ์„œ๋น„์Šค ์‚ฌ์šฉ์ด ํ•„์š”ํ•  ๋•Œ +- โœ… ์˜คํ”„๋ผ์ธ ํ™˜๊ฒฝ์—์„œ๋„ ๋™์ž‘ํ•ด์•ผ ํ•  ๋•Œ (์ผ๋ถ€ ๊ธฐ๋Šฅ) + +**ํ˜„์žฌ ์ƒํ™ฉ:** +- ํ˜„์žฌ Plan A (VUI Framework)๋ฅผ ํ…Œ์ŠคํŠธ ์ค‘ +- Plan B (Web Speech API)๋Š” ๋Œ€์•ˆ (fallback)์œผ๋กœ ์ค€๋น„ +- ๋‘ ๊ฐ€์ง€ ๋ชจ๋‘ Redux ์ƒํƒœ๋ฅผ ๊ณต์œ ํ•˜๋ฏ€๋กœ ์ „ํ™˜ ์šฉ์ด + +--- + +## ๐Ÿš€ ์ฆ‰์‹œ ์‹œ์ž‘ํ•˜๊ธฐ + +**1๋ถ„ ๋งŒ์— Web Speech API ํ…Œ์ŠคํŠธ:** + +```javascript +// Chrome ์ฝ˜์†”์—์„œ ๋ฐ”๋กœ ํ…Œ์ŠคํŠธ ๊ฐ€๋Šฅ! +const recognition = new (window.SpeechRecognition || window.webkitSpeechRecognition)(); +recognition.lang = 'ko-KR'; +recognition.onresult = (event) => { + console.log('์ธ์‹ ๊ฒฐ๊ณผ:', event.results[0][0].transcript); +}; +recognition.start(); +// ๋ฐœํ™”: "์•ˆ๋…•ํ•˜์„ธ์š”" +// ์ฝ˜์†” ์ถœ๋ ฅ: ์ธ์‹ ๊ฒฐ๊ณผ: ์•ˆ๋…•ํ•˜์„ธ์š” +``` + +**webOS TV์—์„œ ๋ฐ”๋กœ ์‚ฌ์šฉํ•˜๋ ค๋ฉด:** + +1. `webos-meta/appinfo.json`์— `"audio.capture"` ๊ถŒํ•œ ์ถ”๊ฐ€ +2. `WebSpeechService.js` ๋ณต์‚ฌ +3. `useWebSpeech` Hook ์ ์šฉ +4. ๋! ๐ŸŽ‰ + +--- + +์ด ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹จ๊ณ„๋ณ„๋กœ ๊ตฌํ˜„ํ•˜์‹œ๋ฉด ๋ฉ๋‹ˆ๋‹ค. ์งˆ๋ฌธ์ด ์žˆ์œผ์‹œ๋ฉด ์–ธ์ œ๋“ ์ง€ ๋ง์”€ํ•ด์ฃผ์„ธ์š”! ๐Ÿš€