From 0e988cc85487f61aa3b61131c22bed135ddfd76d Mon Sep 17 00:00:00 2001 From: Eunjae Lee Date: Thu, 16 May 2019 16:05:06 +0200 Subject: [PATCH] fix(voiceSearch): remove event listeners on dispose (#3779) * fix(voiceSearch): remove event listeners on dispose * test(voiceSearch): remove unnecessary wrapper method * chore(voiceSearch): add comment * test(voiceSearch): clean up duplicated code * fix(voiceSearch): stop listening on disposal * type(voiceSearch): add return type * chore(voiceSearch): add comments * fix(voiceSearch): remove resetState on disposal * chore(voiceSearch): rename voiceSearchHelper to createVoiceSearchHelper * chore(voiceSearch): rename titles of tests * test(voiceSearch): rename listeners and remove redundant comments --- .../__tests__/connectVoiceSearch-test.js | 10 +- .../voice-search/connectVoiceSearch.ts | 5 +- .../voiceSearchHelper/__tests__/index-test.ts | 128 ++++++++++-------- src/lib/voiceSearchHelper/index.ts | 85 +++++++----- 4 files changed, 139 insertions(+), 89 deletions(-) diff --git a/src/connectors/voice-search/__tests__/connectVoiceSearch-test.js b/src/connectors/voice-search/__tests__/connectVoiceSearch-test.js index 336e7d73a7..c0561b9034 100644 --- a/src/connectors/voice-search/__tests__/connectVoiceSearch-test.js +++ b/src/connectors/voice-search/__tests__/connectVoiceSearch-test.js @@ -8,6 +8,7 @@ jest.mock('../../../lib/voiceSearchHelper', () => { isBrowserSupported: () => true, isListening: () => false, toggleListening: () => {}, + dispose: jest.fn(), // ⬇️ for test changeState: () => onStateChange(), changeQuery: query => onQueryChange(query), @@ -77,12 +78,19 @@ See documentation: https://www.algolia.com/doc/api-reference/widgets/voice-searc ); }); - it('calls unmount on dispose', () => { + it('calls unmount on `dispose`', () => { const { unmountFn, widget, helper } = getDefaultSetup(); widget.init({ helper }); widget.dispose({ helper, state: helper.state }); expect(unmountFn).toHaveBeenCalledTimes(1); }); + + it('removes event listeners on `dispose`', () => { + const { widget, helper } = getDefaultSetup(); + widget.init({ helper }); + widget.dispose({ helper, state: helper.state }); + expect(widget._voiceSearchHelper.dispose).toHaveBeenCalledTimes(1); + }); }); it('triggers render when state changes', () => { diff --git a/src/connectors/voice-search/connectVoiceSearch.ts b/src/connectors/voice-search/connectVoiceSearch.ts index fa1f78b507..3f2bf21617 100644 --- a/src/connectors/voice-search/connectVoiceSearch.ts +++ b/src/connectors/voice-search/connectVoiceSearch.ts @@ -4,7 +4,7 @@ import { noop, } from '../../lib/utils'; import { Renderer, RenderOptions, WidgetFactory } from '../../types'; -import voiceSearchHelper, { +import createVoiceSearchHelper, { VoiceListeningState, ToggleListening, } from '../../lib/voiceSearchHelper'; @@ -91,7 +91,7 @@ const connectVoiceSearch: VoiceSearchConnector = ( }; return setQueryAndSearch; })(); - (this as any)._voiceSearchHelper = voiceSearchHelper({ + (this as any)._voiceSearchHelper = createVoiceSearchHelper({ searchAsYouSpeak, onQueryChange: query => (this as any)._refine(query), onStateChange: () => { @@ -116,6 +116,7 @@ const connectVoiceSearch: VoiceSearchConnector = ( }); }, dispose({ state }) { + (this as any)._voiceSearchHelper.dispose(); unmountFn(); return state.setQuery(''); }, diff --git a/src/lib/voiceSearchHelper/__tests__/index-test.ts b/src/lib/voiceSearchHelper/__tests__/index-test.ts index 2ae8a05c03..6aea11d65d 100644 --- a/src/lib/voiceSearchHelper/__tests__/index-test.ts +++ b/src/lib/voiceSearchHelper/__tests__/index-test.ts @@ -1,18 +1,4 @@ -import createVoiceSearchHelper, { - VoiceSearchHelper, - VoiceSearchHelperParams, -} from '..'; - -const getVoiceSearchHelper = ( - opts?: VoiceSearchHelperParams -): VoiceSearchHelper => - createVoiceSearchHelper( - opts || { - searchAsYouSpeak: false, - onQueryChange: () => {}, - onStateChange: () => {}, - } - ); +import createVoiceSearchHelper from '..'; type DummySpeechRecognition = () => void; declare global { @@ -22,14 +8,35 @@ declare global { } } +const start = jest.fn(); +const stop = jest.fn(); + +const createFakeSpeechRecognition = (): jest.Mock => { + const simulateListener: any = {}; + const mock = jest.fn().mockImplementation(() => ({ + start, + stop, + addEventListener(eventName: string, callback: () => void) { + simulateListener[eventName] = callback; + }, + removeEventListener() {}, + })); + (mock as any).simulateListener = simulateListener; + return mock; +}; + describe('VoiceSearchHelper', () => { - beforeEach(() => { + afterEach(() => { delete window.webkitSpeechRecognition; delete window.SpeechRecognition; }); it('has initial state correctly', () => { - const voiceSearchHelper = getVoiceSearchHelper(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); expect(voiceSearchHelper.getState()).toEqual({ errorCode: undefined, isSpeechFinal: false, @@ -39,38 +46,49 @@ describe('VoiceSearchHelper', () => { }); it('is not supported', () => { - const voiceSearchHelper = getVoiceSearchHelper(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); expect(voiceSearchHelper.isBrowserSupported()).toBe(false); }); it('is not listening', () => { - const voiceSearchHelper = getVoiceSearchHelper(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); expect(voiceSearchHelper.isListening()).toBe(false); }); it('is supported with webkitSpeechRecognition', () => { window.webkitSpeechRecognition = () => {}; - const voiceSearchHelper = getVoiceSearchHelper(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); expect(voiceSearchHelper.isBrowserSupported()).toBe(true); }); it('is supported with SpeechRecognition', () => { - window.SpeechRecognition = () => {}; - const voiceSearchHelper = getVoiceSearchHelper(); + window.SpeechRecognition = createFakeSpeechRecognition(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); expect(voiceSearchHelper.isBrowserSupported()).toBe(true); }); it('works with mock SpeechRecognition (searchAsYouSpeak:false)', () => { - let recognition; - window.SpeechRecognition = jest.fn().mockImplementation(() => ({ - start() { - /* eslint-disable-next-line consistent-this */ - recognition = this; - }, - })); + window.SpeechRecognition = createFakeSpeechRecognition(); + const { simulateListener } = window.SpeechRecognition as any; const onQueryChange = jest.fn(); const onStateChange = jest.fn(); - const voiceSearchHelper = getVoiceSearchHelper({ + const voiceSearchHelper = createVoiceSearchHelper({ searchAsYouSpeak: false, onQueryChange, onStateChange, @@ -79,9 +97,9 @@ describe('VoiceSearchHelper', () => { voiceSearchHelper.toggleListening(); expect(onStateChange).toHaveBeenCalledTimes(1); expect(voiceSearchHelper.getState().status).toEqual('askingPermission'); - recognition.onstart(); + simulateListener.start(); expect(voiceSearchHelper.getState().status).toEqual('waiting'); - recognition.onresult({ + simulateListener.result({ results: [ (() => { const obj = [ @@ -98,22 +116,17 @@ describe('VoiceSearchHelper', () => { expect(voiceSearchHelper.getState().transcript).toEqual('Hello World'); expect(voiceSearchHelper.getState().isSpeechFinal).toBe(true); expect(onQueryChange).toHaveBeenCalledTimes(0); - recognition.onend(); + simulateListener.end(); expect(onQueryChange).toHaveBeenCalledWith('Hello World'); expect(voiceSearchHelper.getState().status).toEqual('finished'); }); it('works with mock SpeechRecognition (searchAsYouSpeak:true)', () => { - let recognition; - window.SpeechRecognition = jest.fn().mockImplementation(() => ({ - start() { - /* eslint-disable-next-line consistent-this */ - recognition = this; - }, - })); + window.SpeechRecognition = createFakeSpeechRecognition(); + const { simulateListener } = window.SpeechRecognition as any; const onQueryChange = jest.fn(); const onStateChange = jest.fn(); - const voiceSearchHelper = getVoiceSearchHelper({ + const voiceSearchHelper = createVoiceSearchHelper({ searchAsYouSpeak: true, onQueryChange, onStateChange, @@ -122,9 +135,9 @@ describe('VoiceSearchHelper', () => { voiceSearchHelper.toggleListening(); expect(onStateChange).toHaveBeenCalledTimes(1); expect(voiceSearchHelper.getState().status).toEqual('askingPermission'); - recognition.onstart(); + simulateListener.start(); expect(voiceSearchHelper.getState().status).toEqual('waiting'); - recognition.onresult({ + simulateListener.result({ results: [ (() => { const obj = [ @@ -141,34 +154,41 @@ describe('VoiceSearchHelper', () => { expect(voiceSearchHelper.getState().transcript).toEqual('Hello World'); expect(voiceSearchHelper.getState().isSpeechFinal).toBe(true); expect(onQueryChange).toHaveBeenCalledWith('Hello World'); - recognition.onend(); + simulateListener.end(); expect(onQueryChange).toHaveBeenCalledTimes(1); expect(voiceSearchHelper.getState().status).toEqual('finished'); }); it('works with onerror', () => { - let recognition; - window.SpeechRecognition = jest.fn().mockImplementation(() => ({ - start() { - /* eslint-disable-next-line consistent-this */ - recognition = this; - }, - })); + window.SpeechRecognition = createFakeSpeechRecognition(); + const { simulateListener } = window.SpeechRecognition as any; const onQueryChange = jest.fn(); const onStateChange = jest.fn(); - const voiceSearchHelper = getVoiceSearchHelper({ + const voiceSearchHelper = createVoiceSearchHelper({ searchAsYouSpeak: true, onQueryChange, onStateChange, }); voiceSearchHelper.toggleListening(); expect(voiceSearchHelper.getState().status).toEqual('askingPermission'); - recognition.onerror({ + simulateListener.error({ error: 'not-allowed', }); expect(voiceSearchHelper.getState().status).toEqual('error'); expect(voiceSearchHelper.getState().errorCode).toEqual('not-allowed'); - recognition.onend(); + simulateListener.end(); expect(onQueryChange).toHaveBeenCalledTimes(0); }); + + it('stops listening on `dispose`', () => { + window.SpeechRecognition = createFakeSpeechRecognition(); + const voiceSearchHelper = createVoiceSearchHelper({ + searchAsYouSpeak: false, + onQueryChange: () => {}, + onStateChange: () => {}, + }); + voiceSearchHelper.toggleListening(); + voiceSearchHelper.dispose(); + expect(stop).toHaveBeenCalledTimes(1); + }); }); diff --git a/src/lib/voiceSearchHelper/index.ts b/src/lib/voiceSearchHelper/index.ts index 77ddc9765f..56911d6a5e 100644 --- a/src/lib/voiceSearchHelper/index.ts +++ b/src/lib/voiceSearchHelper/index.ts @@ -23,11 +23,12 @@ export type VoiceSearchHelper = { isBrowserSupported: () => boolean; isListening: () => boolean; toggleListening: () => void; + dispose: () => void; }; export type ToggleListening = () => void; -export default function voiceSearchHelper({ +export default function createVoiceSearchHelper({ searchAsYouSpeak, onQueryChange, onStateChange, @@ -62,6 +63,40 @@ export default function voiceSearchHelper({ setState(getDefaultState(status)); }; + const onStart = (): void => { + setState({ + status: STATUS_WAITING, + }); + }; + + const onError = (event: SpeechRecognitionError): void => { + setState({ status: STATUS_ERROR, errorCode: event.error }); + }; + + const onResult = (event: SpeechRecognitionEvent): void => { + setState({ + status: STATUS_RECOGNIZING, + transcript: + (event.results[0] && + event.results[0][0] && + event.results[0][0].transcript) || + '', + isSpeechFinal: event.results[0] && event.results[0].isFinal, + }); + if (searchAsYouSpeak && state.transcript) { + onQueryChange(state.transcript); + } + }; + + const onEnd = (): void => { + if (!state.errorCode && state.transcript && !searchAsYouSpeak) { + onQueryChange(state.transcript); + } + if (state.status !== STATUS_ERROR) { + setState({ status: STATUS_FINISHED }); + } + }; + const stop = (): void => { if (recognition) { recognition.stop(); @@ -77,40 +112,25 @@ export default function voiceSearchHelper({ } resetState(STATUS_ASKING_PERMISSION); recognition.interimResults = true; - recognition.onstart = () => { - setState({ - status: STATUS_WAITING, - }); - }; - recognition.onerror = (event: SpeechRecognitionError) => { - setState({ status: STATUS_ERROR, errorCode: event.error }); - }; - recognition.onresult = (event: SpeechRecognitionEvent) => { - setState({ - status: STATUS_RECOGNIZING, - transcript: - (event.results[0] && - event.results[0][0] && - event.results[0][0].transcript) || - '', - isSpeechFinal: event.results[0] && event.results[0].isFinal, - }); - if (searchAsYouSpeak && state.transcript) { - onQueryChange(state.transcript); - } - }; - recognition.onend = () => { - if (!state.errorCode && state.transcript && !searchAsYouSpeak) { - onQueryChange(state.transcript); - } - if (state.status !== STATUS_ERROR) { - setState({ status: STATUS_FINISHED }); - } - }; - + recognition.addEventListener('start', onStart); + recognition.addEventListener('error', onError); + recognition.addEventListener('result', onResult); + recognition.addEventListener('end', onEnd); recognition.start(); }; + const dispose = (): void => { + if (!recognition) { + return; + } + recognition.stop(); + recognition.removeEventListener('start', onStart); + recognition.removeEventListener('error', onError); + recognition.removeEventListener('result', onResult); + recognition.removeEventListener('end', onEnd); + recognition = undefined; + }; + const toggleListening = (): void => { if (!isBrowserSupported()) { return; @@ -127,5 +147,6 @@ export default function voiceSearchHelper({ isBrowserSupported, isListening, toggleListening, + dispose, }; }