From 9b26b6fe635c7ba527e414fcfe39351f386d86bb Mon Sep 17 00:00:00 2001 From: huyue57 Date: Mon, 28 Aug 2023 20:19:53 +0800 Subject: [PATCH 1/4] add sync api ut Signed-off-by: huyue57 Change-Id: Ic66560325f656697ac881f6db48262dc333198e1 --- .../AudioCapturerJsUnitTest.js | 199 ++ .../unittest/audio_capturer_test/BUILD.gn | 28 + .../unittest/audio_capturer_test/config.json | 62 + .../signature/openharmony_sx.p7b | Bin 0 -> 3504 bytes .../AudioManagerJsUnitTest.js | 166 ++ .../test/unittest/audio_manager_test/BUILD.gn | 28 + .../unittest/audio_manager_test/config.json | 62 + .../signature/openharmony_sx.p7b | Bin 0 -> 3504 bytes .../AudioGroupManagerJsUnitTest.js | 1229 ++++++++++++ .../test/unittest/group_manager_test/BUILD.gn | 2 +- .../AudioRoutingManagerJsTest.js | 316 ++++ .../AudioStreamManagerJsTest.js | 961 ++++++++++ .../AudioVolumeManagerJsUnitTest.js | 86 + .../unittest/volume_manager_test/BUILD.gn | 28 + .../unittest/volume_manager_test/config.json | 62 + .../signature/openharmony_sx.p7b | Bin 0 -> 3504 bytes ...RendererInterruptSyncCommonTypeUnitTest.js | 1655 +++++++++++++++++ ...ioRendererInterruptSyncRareTypeUnitTest.js | 912 +++++++++ .../AudioRendererJsUnitTest.js | 352 ++++ .../unittest/audio_renderer_test/BUILD.gn | 28 + .../unittest/audio_renderer_test/config.json | 62 + .../signature/openharmony_sx.p7b | Bin 0 -> 3504 bytes .../test/unittest/tone_player_test/BUILD.gn | 28 + .../tone_player_test/TonePlayerJsUnitTest.js | 69 + .../unittest/tone_player_test/config.json | 62 + .../signature/openharmony_sx.p7b | Bin 0 -> 3504 bytes 26 files changed, 6396 insertions(+), 1 deletion(-) create mode 100644 frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/AudioCapturerJsUnitTest.js create mode 100644 frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/BUILD.gn create mode 100644 frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json create mode 100644 frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/signature/openharmony_sx.p7b create mode 100644 frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/AudioManagerJsUnitTest.js create mode 100644 frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/BUILD.gn create mode 100644 frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json create mode 100644 frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/signature/openharmony_sx.p7b create mode 100644 frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/AudioVolumeManagerJsUnitTest.js create mode 100644 frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/BUILD.gn create mode 100644 frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json create mode 100644 frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/signature/openharmony_sx.p7b create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncCommonTypeUnitTest.js create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/AudioRendererJsUnitTest.js create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/BUILD.gn create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/signature/openharmony_sx.p7b create mode 100644 frameworks/js/napi/toneplayer/test/unittest/tone_player_test/BUILD.gn create mode 100644 frameworks/js/napi/toneplayer/test/unittest/tone_player_test/TonePlayerJsUnitTest.js create mode 100644 frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json create mode 100644 frameworks/js/napi/toneplayer/test/unittest/tone_player_test/signature/openharmony_sx.p7b diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/AudioCapturerJsUnitTest.js b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/AudioCapturerJsUnitTest.js new file mode 100644 index 0000000000..ea7ffe0099 --- /dev/null +++ b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/AudioCapturerJsUnitTest.js @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import audio from '@ohos.multimedia.audio'; +import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from 'deccjsunit/index' + +const TAG = "[AudioCapturerJsUnitTest]"; + +describe("AudioCapturerJsUnitTest", function() { + let audioStreamInfo = { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, + channels: audio.AudioChannel.CHANNEL_1, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + } + let audioCapturerInfo = { + source: audio.SourceType.SOURCE_TYPE_MIC, + capturerFlags: 0 + } + let audioCapturerOptions = { + streamInfo: audioStreamInfo, + rendererInfo: audioCapturerInfo + } + + let audioCapturer; + + beforeAll(async function () { + // input testsuit setup step, setup invoked before all testcases + try { + audioCapturer = audio.createAudioCapturerSync(audioCapturerOptions); + console.info(`${TAG}: AudioCapturer created SUCCESS, state: ${audioCapturer.state}`); + } catch (err) { + console.error(`${TAG}: AudioCapturer created ERROR: ${err.message}`); + } + console.info(TAG + 'beforeAll called') + }) + + afterAll(function () { + + // input testsuit teardown step, teardown invoked after all testcases + audioCapturer.release().then(() => { + console.info(`${TAG}: AudioCapturer release : SUCCESS`); + }).catch((err) => { + console.info(`${TAG}: AudioCapturer release :ERROR : ${err.message}`); + }); + console.info(TAG + 'afterAll called') + }) + + beforeEach(function () { + + // input testcase setup step, setup invoked before each testcases + console.info(TAG + 'beforeEach called') + }) + + afterEach(function () { + + // input testcase teardown step, teardown invoked after each testcases + console.info(TAG + 'afterEach called') + }) + + /* + * @tc.name:SUB_AUDIO_CREATE_AUDIO_CAPUTURER_SYNC_001 + * @tc.desc:createAudioCapturerSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_CREATE_AUDIO_CAPUTURER_SYNC_001", 0, async function (done) { + try { + let value = audio.createAudioCapturerSync(audioCapturerOptions); + console.info(`SUB_AUDIO_CREATE_AUDIO_CAPUTURER_SYNC_001 SUCCESS: ${value}.`); + expect(typeof value).assertEqual('object'); + done(); + } catch (err) { + console.error(`SUB_AUDIO_CREATE_AUDIO_CAPUTURER_SYNC_001 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_CAPTURER_GET_STREAM_INFO_SYNC_TEST_001 + * @tc.desc:getStreamInfoSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_CAPTURER_GET_STREAM_INFO_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioCapturer.getStreamInfoSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_STREAM_INFO_SYNC_TEST_001 SUCCESS: ${data}`); + expect(data.samplingRate).assertEqual(audio.AudioSamplingRate.SAMPLE_RATE_48000); + expect(data.channels).assertEqual(audio.AudioChannel.CHANNEL_1); + expect(data.sampleFormat).assertEqual(audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE); + expect(data.encodingType).assertEqual(audio.AudioEncodingType.ENCODING_TYPE_RAW); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_STREAM_INFO_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_CAPTURER_GET_CAPTURER_INFO_SYNC_TEST_001 + * @tc.desc:getCapturerInfoSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_CAPTURER_GET_CAPTURER_INFO_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioCapturer.getCapturerInfoSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_CAPTURER_INFO_SYNC_TEST_001 SUCCESS: ${data}`); + expect(data.source).assertEqual(audio.SourceType.SOURCE_TYPE_MIC); + expect(data.capturerFlags).assertEqual(0); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_CAPTURER_INFO_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 + * @tc.desc:getAudioStreamIdSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioCapturer.getAudioStreamIdSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_CAPTURER_GET_BUFFER_SIZE_SYNC_TEST_001 + * @tc.desc:getBufferSizeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_CAPTURER_GET_BUFFER_SIZE_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioCapturer.getBufferSizeSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_BUFFER_SIZE_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_BUFFER_SIZE_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_CAPTURER_GET_AUDIO_TIME_SYNC_TEST_001 + * @tc.desc:getAudioTimeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_CAPTURER_GET_AUDIO_TIME_SYNC_TEST_001', 0, async function (done) { + try { + let audioCapturer = audio.createAudioCapturerSync(audioCapturerOptions); + let data = audioCapturer.getAudioTimeSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_AUDIO_TIME_SYNC_TEST_001 SUCCESS, before start: ${data}`); + expect(data).assertEqual(0); + + await audioCapturer.start(); + data = audioCapturer.getAudioTimeSync(); + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_AUDIO_TIME_SYNC_TEST_001 SUCCESS, after start: ${data}`); + expect(data).assertLarger(0); + + await audioCapturer.stop(); + await audioCapturer.release(); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_CAPTURER_GET_AUDIO_TIME_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) +}) diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/BUILD.gn b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/BUILD.gn new file mode 100644 index 0000000000..8deb54c6bc --- /dev/null +++ b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") + +module_output_path = "multimedia_audio_framework/audio_capturer_js" + +ohos_js_unittest("AudioCapturerJsUnitTest") { + module_out_path = module_output_path + + hap_profile = "./config.json" + certificate_profile = "./signature/openharmony_sx.p7b" +} + +group("jsunittest") { + testonly = true + deps = [ ":AudioCapturerJsUnitTest" ] +} diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json new file mode 100644 index 0000000000..f9ecd195bb --- /dev/null +++ b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json @@ -0,0 +1,62 @@ +{ + "app": { + "bundleName": "com.example.myapplication", + "vendor": "example", + "version": { + "code": 1, + "name": "1.0" + }, + "apiVersion": { + "compatible": 8, + "target": 9 + } + }, + "deviceConfig": {}, + "module": { + "package": "com.example.myapplication", + "name": ".MyApplication", + "deviceType": [ + "phone", + "tablet", + "2in1" + ], + "distro": { + "deliveryWithInstall": true, + "moduleName": "entry", + "moduleType": "entry" + }, + "abilities": [ + { + "skills": [ + { + "entities": [ + "entity.system.home" + ], + "actions": [ + "action.system.home" + ] + } + ], + "name": "com.example.myapplication.MainAbility", + "icon": "$media:icon", + "description": "$string:mainability_description", + "label": "MyApplication", + "type": "page", + "launchType": "standard", + "visible": true + } + ], + "js": [ + { + "pages": [ + "pages/index/index" + ], + "name": "default", + "window": { + "designWidth": 720, + "autoDesignWidth": false + } + } + ] + } + } \ No newline at end of file diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/signature/openharmony_sx.p7b b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/signature/openharmony_sx.p7b new file mode 100644 index 0000000000000000000000000000000000000000..49d5a09221f14f3d260b279ac71c241802e9391f GIT binary patch literal 3504 zcmcgveNYo;9wr|I5hGX>)KkIufhPvvgd`Ap*6waLAHW6@5+D%|H_7G$l1)f9SrSgG z@yb=LdY)6IDi*&`uV@{$BAuz#-c_q!b!@G3(-V5uSng#0WD2=MtdxUdF|H(lq)_H` z5|U!}JGj^5@K8}vNsEZ|Q(_z8^aq)FC32;#SRt3ni{~W#xRWA+j9XExlob^+?!|LN zg9w6zm%u{=Gnp++I=qC~LpuHJd=4T)c>Mt)C{7@%2oR(e5BmIMl*t{o7CRB7JWh`t zr`RQl*&mE*boyKbHJWs49Riuqgtv~wbULI;g&>eU-v!Y+7;@>fhV(IyA}@NHF~#0 z{Qkl!_s}3>ijaf}FGdre%qfMGBOs0ffs0KlrG<9UcqERgbbP$Xp z`BA+lqA}Rb6-Ikh5u{}?ZMom)u-R3J1+7tgVwC~Z(qJ}g)tFXM?hey+WI(N-+u-$> zOB;#Gh=W8Uv|Ou-1R86U&2W*rywZbIG~h8ffWd%8?ohc5H85jc*yRes zE>sO0>?$Y*Szs5<+9+gE+8}?a3^HISqJlKc+F`_nGPC*KvFw|X$%Oa zwUF{AS5dw?s)icTDi~B~)RCe>bE!-ln@iVL#S}H=rjQgavQX-Lt4dlE(lZzoT4u+Q zu0tGF4_a9kCZT|_%HEW33~KYeRI$&EHCJezK23QuZFT8kg;N@qmdfi$ zv^-#zI6_VbZLul|Oi@QBHVmp)J!KO;-m8PMWB2 zn=%v)QG~DFhKD?MW;iQHP6DUGtXPQLj0z1VFaZ&x4Y=JKVl@Ewdi^xv5D(Z3M52XE zkdXwRC&bELA}2_&vSz24v|*72Y%Y@*W|c=2X!InME=b~D(c+G{;GZDQ{RN0~KIq_a zIb1HkXt(be7$q3NsPVaZd>$`33D_lRAXUIK^97k)L@yZ&USN%pnQ23`NHPIrv);7K zj3M4!6M_3A)4)_V#LJvGGz2{^Pc8z1DcM4)1W18YB4yrkYqn6Plt>FDgC+?4L=XTJ z41mac7KqBA-|6uZxyAt{1{RPhNJ*4|%NeW(kCVcKllV2~x!?439{dVwIyUa~?`Cb< zr!HD9Y|G(%y>Ip(U+=qoWeT>Z=sVu0w@#AWeAE1#4?gZNtV_-M!+O~{9=Yn|_)R;E zpc3RTa3)XVj|W*x$Nr_YTa@*A+P1Rp-We@Dw~`lc7i++1HX)tE`7fW(e^KTsb^^N2Q-`5K{#^F8Vd6IyWD@p2Fvp+R%i|*G>9|Pa`Ez#o^ z9DJcYgI2%v;Kug0?yumrj?2#PuI#>ZWwk)D=;80%nIRpm4Y4mDjw*p}OkY@@Jay%X z&ZQOLvg)rZXZ9K=9_Ju?u1!C*L!QyR_hD#B|KHLNG5k?6{5d_ypY$umtA=9%?w{DY zSzzWs>vEpeI-}oDJwYOgOj10AL<#dbB8h;E$K*-QU&c=o@oY(aOwk)%*C<@R<)*Oo z!f1bBXQX}09k8*fVqNso?(B>Co#!;;Tay_c;S7)Z2YEDwI6iMUlIPgHAz~zv z7fOfnh&7*&B0hscQ@4J5s{Ng8`M#BBcGmyn8Z;x1e|6$pw?CtnBm4IVzTbBxJ(fE!w#xP`l#PlAi?*MmmrD$p~r=f3b0vqVnDDcRt)aam%jTD^_IOzR)&x#obpH z9nR0Wd;Rp4HTT5k9d>{Z6Q`$gnTrNsPD18nw1T;hqkM^opL_p-b;J>C z&Ww4p#;-p+1zWsHdpEzO@x;ApCziQensfp%^3h4o}3}hy>Vho^T&nP z`)Jk4J`eQG8UOs?2I+eZUq?RwJCo8UM2~lF9+gZ`I}el{xiv2 { + if (err) { + console.error(`Failed to set the audio scene mode. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful setting of the audio scene mode.'); + expect(true).assertTrue(); + + try { + let value = audioManager.getAudioSceneSync(); + console.info(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_001 SUCCESS: ${value}.`); + expect(value).assertEqual(audio.AudioScene.AUDIO_SCENE_DEFAULT); + done(); + } catch (err) { + console.error(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_001 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_002 + * @tc.desc:getAudioScene success - AUDIO_SCENE_RINGING + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_002", 0, async function (done) { + audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_RINGING, (err) => { + if (err) { + console.error(`Failed to set the audio scene mode. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful setting of the audio scene mode.'); + expect(true).assertTrue(); + + try { + let value = audioManager.getAudioSceneSync(); + console.info(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_002 SUCCESS: ${value}.`); + expect(value).assertEqual(audio.AudioScene.AUDIO_SCENE_RINGING); + done(); + } catch (err) { + console.error(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_002 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_003 + * @tc.desc:getAudioScene success - AUDIO_SCENE_PHONE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_003", 0, async function (done) { + audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL, (err) => { + if (err) { + console.error(`Failed to set the audio scene mode. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful setting of the audio scene mode.'); + expect(true).assertTrue(); + + try { + let value = audioManager.getAudioSceneSync(); + console.info(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_003 SUCCESS: ${value}.`); + expect(value).assertEqual(audio.AudioScene.AUDIO_SCENE_PHONE_CALL); + done(); + } catch (err) { + console.error(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_003 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_004 + * @tc.desc:getAudioScene success - AUDIO_SCENE_PHONE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_004", 0, async function (done) { + audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_VOICE_CHAT, (err) => { + if (err) { + console.error(`Failed to set the audio scene mode. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful setting of the audio scene mode.'); + expect(true).assertTrue(); + + try { + let value = audioManager.getAudioSceneSync(); + console.info(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_004 SUCCESS: ${value}.`); + expect(value).assertEqual(audio.AudioScene.AUDIO_SCENE_VOICE_CHAT); + done(); + } catch (err) { + console.error(`SUB_AUDIO_MANAGER_GET_AUDIO_SCENE_SYNC_004 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) +}) \ No newline at end of file diff --git a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/BUILD.gn b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/BUILD.gn new file mode 100644 index 0000000000..b62580d6e4 --- /dev/null +++ b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") + +module_output_path = "multimedia_audio_framework/audio_manager_js" + +ohos_js_unittest("AudioManagerJsUnitTest") { + module_out_path = module_output_path + + hap_profile = "./config.json" + certificate_profile = "./signature/openharmony_sx.p7b" +} + +group("jsunittest") { + testonly = true + deps = [ ":AudioManagerJsUnitTest" ] +} diff --git a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json new file mode 100644 index 0000000000..f9ecd195bb --- /dev/null +++ b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json @@ -0,0 +1,62 @@ +{ + "app": { + "bundleName": "com.example.myapplication", + "vendor": "example", + "version": { + "code": 1, + "name": "1.0" + }, + "apiVersion": { + "compatible": 8, + "target": 9 + } + }, + "deviceConfig": {}, + "module": { + "package": "com.example.myapplication", + "name": ".MyApplication", + "deviceType": [ + "phone", + "tablet", + "2in1" + ], + "distro": { + "deliveryWithInstall": true, + "moduleName": "entry", + "moduleType": "entry" + }, + "abilities": [ + { + "skills": [ + { + "entities": [ + "entity.system.home" + ], + "actions": [ + "action.system.home" + ] + } + ], + "name": "com.example.myapplication.MainAbility", + "icon": "$media:icon", + "description": "$string:mainability_description", + "label": "MyApplication", + "type": "page", + "launchType": "standard", + "visible": true + } + ], + "js": [ + { + "pages": [ + "pages/index/index" + ], + "name": "default", + "window": { + "designWidth": 720, + "autoDesignWidth": false + } + } + ] + } + } \ No newline at end of file diff --git a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/signature/openharmony_sx.p7b b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/signature/openharmony_sx.p7b new file mode 100644 index 0000000000000000000000000000000000000000..49d5a09221f14f3d260b279ac71c241802e9391f GIT binary patch literal 3504 zcmcgveNYo;9wr|I5hGX>)KkIufhPvvgd`Ap*6waLAHW6@5+D%|H_7G$l1)f9SrSgG z@yb=LdY)6IDi*&`uV@{$BAuz#-c_q!b!@G3(-V5uSng#0WD2=MtdxUdF|H(lq)_H` z5|U!}JGj^5@K8}vNsEZ|Q(_z8^aq)FC32;#SRt3ni{~W#xRWA+j9XExlob^+?!|LN zg9w6zm%u{=Gnp++I=qC~LpuHJd=4T)c>Mt)C{7@%2oR(e5BmIMl*t{o7CRB7JWh`t zr`RQl*&mE*boyKbHJWs49Riuqgtv~wbULI;g&>eU-v!Y+7;@>fhV(IyA}@NHF~#0 z{Qkl!_s}3>ijaf}FGdre%qfMGBOs0ffs0KlrG<9UcqERgbbP$Xp z`BA+lqA}Rb6-Ikh5u{}?ZMom)u-R3J1+7tgVwC~Z(qJ}g)tFXM?hey+WI(N-+u-$> zOB;#Gh=W8Uv|Ou-1R86U&2W*rywZbIG~h8ffWd%8?ohc5H85jc*yRes zE>sO0>?$Y*Szs5<+9+gE+8}?a3^HISqJlKc+F`_nGPC*KvFw|X$%Oa zwUF{AS5dw?s)icTDi~B~)RCe>bE!-ln@iVL#S}H=rjQgavQX-Lt4dlE(lZzoT4u+Q zu0tGF4_a9kCZT|_%HEW33~KYeRI$&EHCJezK23QuZFT8kg;N@qmdfi$ zv^-#zI6_VbZLul|Oi@QBHVmp)J!KO;-m8PMWB2 zn=%v)QG~DFhKD?MW;iQHP6DUGtXPQLj0z1VFaZ&x4Y=JKVl@Ewdi^xv5D(Z3M52XE zkdXwRC&bELA}2_&vSz24v|*72Y%Y@*W|c=2X!InME=b~D(c+G{;GZDQ{RN0~KIq_a zIb1HkXt(be7$q3NsPVaZd>$`33D_lRAXUIK^97k)L@yZ&USN%pnQ23`NHPIrv);7K zj3M4!6M_3A)4)_V#LJvGGz2{^Pc8z1DcM4)1W18YB4yrkYqn6Plt>FDgC+?4L=XTJ z41mac7KqBA-|6uZxyAt{1{RPhNJ*4|%NeW(kCVcKllV2~x!?439{dVwIyUa~?`Cb< zr!HD9Y|G(%y>Ip(U+=qoWeT>Z=sVu0w@#AWeAE1#4?gZNtV_-M!+O~{9=Yn|_)R;E zpc3RTa3)XVj|W*x$Nr_YTa@*A+P1Rp-We@Dw~`lc7i++1HX)tE`7fW(e^KTsb^^N2Q-`5K{#^F8Vd6IyWD@p2Fvp+R%i|*G>9|Pa`Ez#o^ z9DJcYgI2%v;Kug0?yumrj?2#PuI#>ZWwk)D=;80%nIRpm4Y4mDjw*p}OkY@@Jay%X z&ZQOLvg)rZXZ9K=9_Ju?u1!C*L!QyR_hD#B|KHLNG5k?6{5d_ypY$umtA=9%?w{DY zSzzWs>vEpeI-}oDJwYOgOj10AL<#dbB8h;E$K*-QU&c=o@oY(aOwk)%*C<@R<)*Oo z!f1bBXQX}09k8*fVqNso?(B>Co#!;;Tay_c;S7)Z2YEDwI6iMUlIPgHAz~zv z7fOfnh&7*&B0hscQ@4J5s{Ng8`M#BBcGmyn8Z;x1e|6$pw?CtnBm4IVzTbBxJ(fE!w#xP`l#PlAi?*MmmrD$p~r=f3b0vqVnDDcRt)aam%jTD^_IOzR)&x#obpH z9nR0Wd;Rp4HTT5k9d>{Z6Q`$gnTrNsPD18nw1T;hqkM^opL_p-b;J>C z&Ww4p#;-p+1zWsHdpEzO@x;ApCziQensfp%^3h4o}3}hy>Vho^T&nP z`)Jk4J`eQG8UOs?2I+eZUq?RwJCo8UM2~lF9+gZ`I}el{xiv2 { @@ -200,4 +204,1229 @@ describe("AudioGroupManagerJsUnitTest", function () { done(); }) }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_001 + * @tc.desc:verify getVolumeSync get volume successfully - VOICE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_001", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.VOICE_CALL, volume, (err) => { + if (err) { + console.error(`Failed to set VOICE_CALL volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful VOICE_CALL volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`get VOICE_CALL volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_CALL volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_002 + * @tc.desc:verify getVolumeSync get volume successfully - RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_002", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.RINGTONE); + console.info(`get MEDIA volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value <= MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain RINGTONE volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_003 + * @tc.desc:verify getVolumeSync get volume successfully - MEDIA + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_003", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.MEDIA, volume, (err) => { + if (err) { + console.error(`Failed to set MEDIA volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful MEDIA volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.MEDIA); + console.info(`get MEDIA volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain MEDIA volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_004 + * @tc.desc:verify getVolumeSync get volume successfully - ALARM + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_004", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.ALARM, volume, (err) => { + if (err) { + console.error(`Failed to set ALARM volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful ALARM volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.ALARM); + console.info(`get ALARM volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain ALARM volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_005 + * @tc.desc:verify getVolumeSync get volume successfully - ACCESSIBILITY + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_005", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.ACCESSIBILITY, volume, (err) => { + if (err) { + console.error(`Failed to set ACCESSIBILITY volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful ACCESSIBILITY volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.ACCESSIBILITY); + console.info(`get ACCESSIBILITY volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain ACCESSIBILITY volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_006 + * @tc.desc:verify getVolumeSync get volume successfully - VOICE_ASSISTANT + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_006", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.VOICE_ASSISTANT, volume, (err) => { + if (err) { + console.error(`Failed to set VOICE_ASSISTANT volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful VOICE_ASSISTANT volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.VOICE_ASSISTANT); + console.info(`get VOICE_ASSISTANT volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_ASSISTANT volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_007 + * @tc.desc:verify getVolumeSync get volume successfully - ULTRASONIC + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_007", 0, async function (done) { + let volume = 6; + audioVolumeGroupManager.setVolume(audio.AudioVolumeType.ULTRASONIC, volume, (err) => { + if (err) { + console.error(`Failed to set ULTRASONIC volume. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate a successful ULTRASONIC volume setting.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`get ULTRASONIC volume is obtained ${value}.`); + expect(value).assertEqual(volume); + done(); + } catch (err) { + console.error(`Failed to obtain ULTRASONIC volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_008 + * @tc.desc:verify getVolumeSync get volume successfully - ALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_008", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getVolumeSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`get ULTRASONIC volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value <= MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ALL volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_009 + * @tc.desc:verify getVolumeSync get volume fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_009", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getVolumeSync(); + console.info(`get volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_010 + * @tc.desc:verify getVolumeSync get volume fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_010", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getVolumeSync("Invalid type"); + console.info(`get volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_011 + * @tc.desc:verify getVolumeSync get volume fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_VOLUME_SYNC_011", 0, async function (done) { + let invalidVolumeType = 10000; + try { + let value = audioVolumeGroupManager.getVolumeSync(invalidVolumeType); + console.info(`get volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_001 + * @tc.desc:verify getMinVolumeSync get min volume successfully - VOICE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_001", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`get VOICE_CALL min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_CALL min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_002 + * @tc.desc:verify getMinVolumeSync get min volume successfully - RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_002", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.RINGTONE); + console.info(`get RINGTONE min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain RINGTONE min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_003 + * @tc.desc:verify getMinVolumeSync get min volume successfully - MEDIA + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_003", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.MEDIA); + console.info(`get MEDIA min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain MEDIA min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_004 + * @tc.desc:verify getMinVolumeSync get min volume successfully - ALARM + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_004", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.ALARM); + console.info(`get ALARM min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ALARM min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_005 + * @tc.desc:verify getMinVolumeSync get min volume successfully - ACCESSIBILITY + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_005", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.ACCESSIBILITY); + console.info(`get ACCESSIBILITY min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ACCESSIBILITY min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_006 + * @tc.desc:verify getMinVolumeSync get min volume successfully - VOICE_ASSISTANT + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_006", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.VOICE_ASSISTANT); + console.info(`get VOICE_ASSISTANT min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_ASSISTANT min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_007 + * @tc.desc:verify getMinVolumeSync get min volume successfully - ULTRASONIC + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_007", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`get ULTRASONIC min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ULTRASONIC min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_008 + * @tc.desc:verify getMinVolumeSync get min volume successfully - ALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_008", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(audio.AudioVolumeType.ALL); + console.info(`get ALL min volume is obtained ${value}.`); + expect(value >= MIN_VOLUME_LEVEL && value < MAX_VOLUME_LEVEL).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ALL min volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_009 + * @tc.desc:verify getMinVolumeSync get min volume fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_009", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync(); + console.info(`get min volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain min volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_010 + * @tc.desc:verify getMinVolumeSync get volume fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_010", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMinVolumeSync("Invalid type"); + console.info(`get min volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain min volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_011 + * @tc.desc:verify getMinVolumeSync get min volume fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MIN_VOLUME_SYNC_011", 0, async function (done) { + let invalidVolumeType = 10000; + try { + let value = audioVolumeGroupManager.getMinVolumeSync(invalidVolumeType); + console.info(`get min volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain min volume. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_001 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - VOICE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_001", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`get VOICE_CALL max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_CALL max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_002 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_002", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.RINGTONE); + console.info(`get RINGTONE max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain RINGTONE max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_003 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - MEDIA + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_003", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.MEDIA); + console.info(`get MEDIA max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain MEDIA max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_004 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - ALARM + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_004", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.ALARM); + console.info(`get ALARM max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain ALARM max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_005 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - ACCESSIBILITY + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_005", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.ACCESSIBILITY); + console.info(`get ACCESSIBILITY max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain ACCESSIBILITY max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_006 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - VOICE_ASSISTANT + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_006", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.VOICE_ASSISTANT); + console.info(`get VOICE_ASSISTANT max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_ASSISTANT max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_007 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - ULTRASONIC + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_007", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`get ULTRASONIC max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain ULTRASONIC max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_008 + * @tc.desc:verify getMaxVolumeSync get max volume successfully - ALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_008", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(audio.AudioVolumeType.ALL); + console.info(`get ALL max volume is obtained ${value}.`); + expect(value).assertEqual(MAX_VOLUME_LEVEL); + done(); + } catch (err) { + console.error(`Failed to obtain ALL max volume. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_009 + * @tc.desc:verify getMaxVolumeSync get max volume fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_009", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(); + console.info(`get max volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain max volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_010 + * @tc.desc:verify getMaxVolumeSync get volume fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_010", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getMaxVolumeSync("Invalid type"); + console.info(`get max volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain max volume. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_011 + * @tc.desc:verify getMaxVolumeSync get max volume fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_MAX_VOLUME_SYNC_011", 0, async function (done) { + let invalidVolumeType = 10000; + try { + let value = audioVolumeGroupManager.getMaxVolumeSync(invalidVolumeType); + console.info(`get max volume is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain max volume. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_001 + * @tc.desc:verify isMuteSync get mute status successfully - VOICE_CALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_001", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.VOICE_CALL, true, (err) => { + if (err) { + console.error(`Failed to mute VOICE_CALL stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that VOICE_CALL stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`The mute status of VOICE_CALL stream is obtained ${value}.`); + expect(value).assertEqual(false); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_CALL mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_002 + * @tc.desc:verify isMuteSync get mute status successfully - RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_002", 0, async function (done) { + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.RINGTONE); + console.info(`The mute status of RINGTONE stream is obtained ${value}.`); + expect(typeof value).assertEqual('boolean'); + done(); + } catch (err) { + console.error(`Failed to obtain RINGTONE mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_003 + * @tc.desc:verify isMuteSync get mute status successfully - MEDIA + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_003", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.MEDIA, true, (err) => { + if (err) { + console.error(`Failed to mute MEDIA stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that MEDIA stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.MEDIA); + console.info(`The mute status of MEDIA stream is obtained ${value}.`); + expect(value).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain MEDIA mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_004 + * @tc.desc:verify isMuteSync get mute status successfully - ALARM + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_004", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.ALARM, true, (err) => { + if (err) { + console.error(`Failed to mute ALARM stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that ALARM stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.ALARM); + console.info(`The mute status of ALARM stream is obtained ${value}.`); + expect(value).assertEqual(false); + done(); + } catch (err) { + console.error(`Failed to obtain ALARM mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_005 + * @tc.desc:verify isMuteSync get mute status successfully - ACCESSIBILITY + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_005", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.ACCESSIBILITY, true, (err) => { + if (err) { + console.error(`Failed to mute ACCESSIBILITY stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that ACCESSIBILITY stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.ACCESSIBILITY); + console.info(`The mute status of ACCESSIBILITY stream is obtained ${value}.`); + expect(value).assertEqual(false); + done(); + } catch (err) { + console.error(`Failed to obtain ACCESSIBILITY mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_006 + * @tc.desc:verify isMuteSync get mute status successfully - VOICE_ASSISTANT + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_006", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.VOICE_ASSISTANT, true, (err) => { + if (err) { + console.error(`Failed to mute VOICE_ASSISTANT stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that VOICE_ASSISTANT stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.VOICE_ASSISTANT); + console.info(`The mute status of VOICE_ASSISTANT stream is obtained ${value}.`); + expect(value).assertEqual(false); + done(); + } catch (err) { + console.error(`Failed to obtain VOICE_ASSISTANT mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_007 + * @tc.desc:verify isMuteSync get mute status successfully - ULTRASONIC + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_007", 0, async function (done) { + audioVolumeGroupManager.mute(audio.AudioVolumeType.ULTRASONIC, true, (err) => { + if (err) { + console.error(`Failed to mute ULTRASONIC stream. ${err}`); + expect(false).assertTrue(); + done(); + return; + } + console.info('invoked to indicate that ULTRASONIC stream is muted.'); + expect(true).assertTrue(); + + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`The mute status of ULTRASONIC stream is obtained ${value}.`); + expect(value).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain ULTRASONIC mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_008 + * @tc.desc:verify isMuteSync get mute status successfully - ALL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_008", 0, async function (done) { + try { + let value = audioVolumeGroupManager.isMuteSync(audio.AudioVolumeType.ALL); + console.info(`The mute status of ALL stream is obtained ${value}.`); + expect(typeof value).assertEqual('boolean'); + done(); + } catch (err) { + console.error(`Failed to obtain ALL mute status. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_009 + * @tc.desc:verify isMuteSync get mute status fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_009", 0, async function (done) { + try { + let value = audioVolumeGroupManager.isMuteSync(); + console.info(`The mute status of the stream is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain mute status. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_010 + * @tc.desc:verify isMuteSync get mute status fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_010", 0, async function (done) { + try { + let value = audioVolumeGroupManager.isMuteSync("Invalid type"); + console.info(`The mute status of the stream is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain mute status. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_011 + * @tc.desc:verify isMuteSync get mute status fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MUTE_SYNC_011", 0, async function (done) { + let invalidVolumeType = 10000; + try { + let value = audioVolumeGroupManager.isMuteSync(invalidVolumeType); + console.info(`The mute status of the stream is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain mute status. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_RINGER_MODE_SYNC_001 + * @tc.desc:verify getRingerModeSync get ringer mode successfully + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_RINGER_MODE_SYNC_001", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getRingerModeSync(); + console.info(`invoked to indicate that the ringer mode is obtained ${value}.`); + expect(typeof value).assertEqual('number'); + done(); + } catch (err) { + console.error(`Failed to obtain the ringer mode. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_IS_MICROPHONE_MUTE_SYNC_001 + * @tc.desc:verify isMicrophoneMuteSync get microphone mute status successfully + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_IS_MICROPHONE_MUTE_SYNC_001", 0, async function (done) { + try { + let value = audioVolumeGroupManager.isMicrophoneMuteSync(); + console.info(`invoked to indicate that the mute status of the microphone is obtained ${value}.`); + expect(typeof value).assertEqual('boolean'); + done(); + } catch (err) { + console.error(`Failed to obtain the mute status of the microphone. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_001 + * @tc.desc:verify getSystemVolumeInDbSync get volume db successfully - + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_001", 0, async function (done) { + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, volumeLevel, + audio.DeviceType.SPEAKER); + console.info(`get volume db is obtained ${value}.`); + expect(typeof value).assertEqual('number'); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_100 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_100", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_101 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid param count : 1 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_101", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_102 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid param count : 2 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_102", 0, async function (done) { + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, + volumeLevel); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_103 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid first param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_103", 0, async function (done) { + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync("Invalid type", volumeLevel, + audio.DeviceType.SPEAKER); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_104 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid second param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_104", 0, async function (done) { + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, + "Invalid type", audio.DeviceType.SPEAKER); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_105 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(401) - Invalid third param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_105", 0, async function (done) { + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, + volumeLevel, "Invalid type"); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_106 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(6800101) - Invalid first param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_106", 0, async function (done) { + let invalidVolumeType = 10000; + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(invalidVolumeType, volumeLevel, + audio.DeviceType.SPEAKER); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_107 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(6800101) - Invalid second param value : 100 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_107", 0, async function (done) { + let invalidVolumeLevel = 100; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, + invalidVolumeLevel, audio.DeviceType.SPEAKER); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_108 + * @tc.desc:verify getSystemVolumeInDbSync get volume db fail(6800101) - Invalid third param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_GROUP_MANAGER_GET_SYSTEM_VOLUME_IN_DB_SYNC_108", 0, async function (done) { + let invalidDeviceType = 10000; + let volumeLevel = 3; + try { + let value = audioVolumeGroupManager.getSystemVolumeInDbSync(audio.AudioVolumeType.VOICE_CALL, + volumeLevel, invalidDeviceType); + console.info(`get volume db is obtained ${value}.`); + expect(false).assertTrue(); + done(); + } catch (err) { + console.error(`Failed to obtain volume db. ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + done(); + } + }) + + }) \ No newline at end of file diff --git a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/BUILD.gn b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/BUILD.gn index 4e93dd1448..b32aea0004 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/BUILD.gn +++ b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/BUILD.gn @@ -13,7 +13,7 @@ import("//build/test.gni") -module_output_path = "multimedia_audio_framework/audio_manager_js" +module_output_path = "multimedia_audio_framework/audio_volume_group_manager_js" ohos_js_unittest("AudioGroupManagerJsUnitTest") { module_out_path = module_output_path diff --git a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js index d7fac0e562..943f197238 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js +++ b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js @@ -21,6 +21,8 @@ const stringParameter = 'stringParameter'; const numberParameter = 12345678; describe("AudioRoutingManagerJsTest", function () { + const ERROR_INPUT_INVALID = '401'; + const ERROR_INVALID_PARAM = '6800101'; beforeAll(async function () { @@ -799,4 +801,318 @@ describe("AudioRoutingManagerJsTest", function () { done(); } }) + + /* + * @tc.name:isCommunicationDeviceActiveSync001 + * @tc.desc:Get isCommunicationDeviceActiveSync success - SPEAKER + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isCommunicationDeviceActiveSync001", 0, async function (done) { + let audioRoutingManager = null; + + try { + audioRoutingManager = audio.getAudioManager().getRoutingManager(); + await audioRoutingManager.setCommunicationDevice(audio.CommunicationDeviceType.SPEAKER, true); + let isActive = audioRoutingManager.isCommunicationDeviceActiveSync(audio.CommunicationDeviceType.SPEAKER); + console.info(`The active status of the device is obtained ${isActive}.`); + expect(isActive).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isCommunicationDeviceActiveSync001 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isCommunicationDeviceActiveSync002 + * @tc.desc:Get isCommunicationDeviceActiveSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isCommunicationDeviceActiveSync002", 0, async function (done) { + let audioRoutingManager = null; + + try { + audioRoutingManager = audio.getAudioManager().getRoutingManager(); + let isActive = audioRoutingManager.isCommunicationDeviceActiveSync(); + console.info(`The active status of the device is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isCommunicationDeviceActiveSync002 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:isCommunicationDeviceActiveSync003 + * @tc.desc:Get isCommunicationDeviceActiveSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isCommunicationDeviceActiveSync003", 0, async function (done) { + let audioRoutingManager = null; + + try { + audioRoutingManager = audio.getAudioManager().getRoutingManager(); + let isActive = audioRoutingManager.isCommunicationDeviceActiveSync("Invalid type"); + console.info(`The active status of the device is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isCommunicationDeviceActiveSync003 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:isCommunicationDeviceActiveSync004 + * @tc.desc:Get isCommunicationDeviceActiveSync fail(6800101) - Invalid param value : 100 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isCommunicationDeviceActiveSync004", 0, async function (done) { + let invalidDeviceType = 100; + let audioRoutingManager = null; + + try { + audioRoutingManager = audio.getAudioManager().getRoutingManager(); + let isActive = audioRoutingManager.isCommunicationDeviceActiveSync(invalidDeviceType); + console.info(`The active status is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isCommunicationDeviceActiveSync004 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:getDeviceSync001 + * @tc.desc:getDeviceSync success - INPUT_DEVICES_FLAG + * @tc.type: FUNC + * @tc.require: I6C9VA + */ + it("getDeviceSync001", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let AudioDeviceDescriptors = routingManager.getDevicesSync(audio.DeviceFlag.INPUT_DEVICES_FLAG); + console.info(`${TAG} getDeviceSync001 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + expect(AudioDeviceDescriptors.length).assertLarger(0); + for (let i = 0; i < AudioDeviceDescriptors.length; i++) { + expect(AudioDeviceDescriptors[i].displayName!=="" + && AudioDeviceDescriptors[i].displayName!==undefined).assertTrue(); + } + done(); + } catch (e) { + console.error(`${TAG} getDeviceSync001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }); + + /* + * @tc.name:getDeviceSync010 + * @tc.desc:getDeviceSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I6C9VA + */ + it("getDeviceSync010", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let AudioDeviceDescriptors = routingManager.getDevicesSync(); + console.info(`${TAG} getDeviceSync010 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + expect(false).assertTrue(); + done(); + } catch (e) { + console.error(`${TAG} getDeviceSync010 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }); + + /* + * @tc.name:getDeviceSync011 + * @tc.desc:getDeviceSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I6C9VA + */ + it("getDeviceSync011", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let AudioDeviceDescriptors = routingManager.getDevicesSync("Invalid type"); + console.info(`${TAG} getDeviceSync011 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + expect(false).assertTrue(); + done(); + } catch (e) { + console.error(`${TAG} getDeviceSync011 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }); + + /* + * @tc.name:getDeviceSync012 + * @tc.desc:getDeviceSync fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I6C9VA + */ + it("getDeviceSync012", 0, async function (done) { + let invalidDeviceFlag = 10000; + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let AudioDeviceDescriptors = routingManager.getDevicesSync(invalidDeviceFlag); + console.info(`${TAG} getDeviceSync012 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + expect(false).assertTrue(); + done(); + } catch (e) { + console.error(`${TAG} getDeviceSync012 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + } + }); + + /* + * @tc.name:getPreferredInputDeviceForCapturerInfoSyncTest001 + * @tc.desc:getPreferredInputDeviceForCapturerInfoSync success + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredInputDeviceForCapturerInfoSyncTest001", 0, async function (done) { + let capturerInfo = { + content : audio.ContentType.CONTENT_TYPE_MUSIC, + usage : audio.StreamUsage.STREAM_USAGE_MEDIA, + capturerFlags : 0 } + + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredInputDeviceForCapturerInfoSync(capturerInfo); + console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest001 SUCCESS`+JSON.stringify(data)); + expect(true).assertTrue(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }) + + /* + * @tc.name:getPreferredInputDeviceForCapturerInfoSyncTest002 + * @tc.desc:getPreferredInputDeviceForCapturerInfoSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredInputDeviceForCapturerInfoSyncTest002", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredInputDeviceForCapturerInfo(); + console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest002 SUCCESS`+JSON.stringify(data)); + expect().assertFail(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest002 ERROR: ${e.message}`); + expect(e.code).assertFail(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:getPreferredInputDeviceForCapturerInfoSyncTest003 + * @tc.desc:getPreferredInputDeviceForCapturerInfoSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredInputDeviceForCapturerInfoSyncTest003", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredInputDeviceForCapturerInfo("Invalid type"); + console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest003 SUCCESS`+JSON.stringify(data)); + expect().assertFail(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest003 ERROR: ${e.message}`); + expect(e.code).assertFail(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:getPreferredOutputDeviceForRendererInfoSyncTest001 + * @tc.desc:getPreferredOutputDeviceForRendererInfoSync success + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredOutputDeviceForRendererInfoSyncTest001", 0, async function (done) { + let rendererInfo = { + content : audio.ContentType.CONTENT_TYPE_MUSIC, + usage : audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags : 0 } + + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync(rendererInfo); + console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest001 SUCCESS`+JSON.stringify(data)); + expect(true).assertTrue(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }) + + /* + * @tc.name:getPreferredOutputDeviceForRendererInfoSyncTest002 + * @tc.desc:getPreferredOutputDeviceForRendererInfoSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredOutputDeviceForRendererInfoSyncTest002", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync(); + console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest002 SUCCESS`+JSON.stringify(data)); + expect().assertFail(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest002 ERROR: ${e.message}`); + expect(e.code).assertFail(ERROR_INPUT_INVALID); + done(); + } + }) + + /* + * @tc.name:getPreferredOutputDeviceForRendererInfoSyncTest003 + * @tc.desc:getPreferredOutputDeviceForRendererInfoSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7Q56A + */ + it("getPreferredOutputDeviceForRendererInfoSyncTest003", 0, async function (done) { + try { + let routingManager = audio.getAudioManager().getRoutingManager(); + let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync("Invalid type"); + console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest003 SUCCESS`+JSON.stringify(data)); + expect().assertFail(); + done(); + } catch(e) { + console.error(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest003 ERROR: ${e.message}`); + expect(e.code).assertFail(ERROR_INPUT_INVALID); + done(); + } + }) + }) diff --git a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js index 97beef3028..e20647fe2f 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js +++ b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js @@ -19,6 +19,9 @@ import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from const TAG = "[AudioStreamManagerJsTest]"; describe("AudioStreamManagerJsTest", function () { + const ERROR_INPUT_INVALID = '401'; + const ERROR_INVALID_PARAM = '6800101'; + let AudioStreamInfo = { samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, channels: audio.AudioChannel.CHANNEL_2, @@ -37,6 +40,16 @@ describe("AudioStreamManagerJsTest", function () { rendererInfo: AudioRendererInfo } + let AudioCapturerInfo = { + source: audio.SourceType.SOURCE_TYPE_MIC, + capturerFlags: 0 + } + + let AudioCapturerOptions = { + streamInfo: AudioStreamInfo, + capturerInfo: AudioCapturerInfo, + } + beforeAll(async function () { console.info(TAG + "beforeAll called"); }) @@ -348,4 +361,952 @@ describe("AudioStreamManagerJsTest", function () { done(); } }); + + /* + * @tc.name:getCurrentAudioRendererInfoArraySync001 + * @tc.desc:Get getCurrentAudioRendererInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioRendererInfoArraySync001", 0, async function (done) { + let audioRenderer = null; + let audioStreamManager = null; + try { + audioRenderer = await audio.createAudioRenderer(AudioRendererOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + } catch(e) { + console.error(`${TAG} getCurrentAudioRendererInfoArraySync001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + + try { + let audioRendererInfos = audioStreamManager.getCurrentAudioRendererInfoArraySync(); + console.info("getCurrentAudioRendererInfoArraySync001:"+JSON.stringify(audioRendererInfos)); + expect(audioRendererInfos.length).assertLarger(0); + expect(audioRendererInfos[0].deviceDescriptors[0].displayName!=="" + && audioRendererInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioRenderer.release(); + done(); + } catch (err) { + console.error(`${TAG} getCurrentAudioRendererInfoArraySync001 ERROR: ${JSON.stringify(err)}`); + expect(false).assertTrue(); + await audioRenderer.release(); + done(); + return; + } + }); + + /* + * @tc.name:getCurrentAudioRendererInfoArraySync002 + * @tc.desc:Get getCurrentAudioRendererInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioRendererInfoArraySync002", 0, async function (done) { + + let audioRenderer = null; + let audioStreamManager = null; + try { + audioRenderer = await audio.createAudioRenderer(AudioRendererOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + await audioRenderer.start(); + } catch(e) { + console.error(`${TAG} getCurrentAudioRendererInfoArraySync002 ERROR: ${e.message}`); + expect().assertFail(); + await audioRenderer.release(); + done(); + return; + } + + try { + let audioRendererInfos = audioStreamManager.getCurrentAudioRendererInfoArraySync(); + console.info("AudioRendererChangeInfoArray++++:"+JSON.stringify(audioRendererInfos)); + expect(audioRendererInfos.length).assertLarger(0); + expect(audioRendererInfos[0].deviceDescriptors[0].displayName!=="" + && audioRendererInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioRenderer.release(); + done(); + } catch (err) { + console.error(`${TAG} getCurrentAudioRendererInfoArraySync002 ERROR: ${JSON.stringify(err)}`); + expect(false).assertTrue(); + await audioRenderer.release(); + done(); + } + }); + + /* + * @tc.name:getCurrentAudioRendererInfoArraySync003 + * @tc.desc:Get getCurrentAudioRendererInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioRendererInfoArraySync003", 0, async function (done) { + let audioRenderer = null; + let audioStreamManager = null; + + try { + audioRenderer = await audio.createAudioRenderer(AudioRendererOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + await audioRenderer.start(); + await audioRenderer.stop(); + let audioRendererInfos = audioStreamManager.getCurrentAudioRendererInfoArraySync(); + expect(audioRendererInfos.length).assertLarger(0); + expect(audioRendererInfos[0].deviceDescriptors[0].displayName!=="" + && audioRendererInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioRenderer.release(); + audioRendererInfos = audioStreamManager.getCurrentAudioRendererInfoArraySync(); + expect(audioRendererInfos.length).assertEqual(0); + done(); + } catch(e) { + console.error(`${TAG} getCurrentAudioRendererInfoArraySync003 ERROR: ${e.message}`); + expect().assertFail(); + await audioRenderer.release(); + done(); + return; + } + }); + + /* + * @tc.name:getCurrentAudioCapturerInfoArraySync001 + * @tc.desc:Get getCurrentAudioCapturerInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioCapturerInfoArraySync001", 0, async function (done) { + let audioCapturer = null; + let audioStreamManager = null; + try { + audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + } catch(e) { + console.error(`${TAG} getCurrentAudioCapturerInfoArraySync001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + + try { + let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); + console.info("getCurrentAudioCapturerInfoArraySync001:"+JSON.stringify(audioCapturerInfos)); + expect(audioCapturerInfos.length).assertLarger(0); + expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" + && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioCapturer.release(); + done(); + } catch (err) { + console.error(`${TAG} getCurrentAudioCapturerInfoArraySync001 ERROR: ${JSON.stringify(err)}`); + expect(false).assertTrue(); + await audioCapturer.release(); + done(); + return; + } + }); + + /* + * @tc.name:getCurrentAudioCapturerInfoArraySync002 + * @tc.desc:Get getCurrentAudioCapturerInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioCapturerInfoArraySync002", 0, async function (done) { + + let audioCapturer = null; + let audioStreamManager = null; + try { + audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + await audioCapturer.start(); + } catch(e) { + console.error(`${TAG} getCurrentAudioCapturerInfoArraySync002 ERROR: ${e.message}`); + expect().assertFail(); + await audioCapturer.release(); + done(); + return; + } + + try { + let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); + console.info("AudioCapturerChangeInfoArray++++:"+JSON.stringify(audioCapturerInfos)); + expect(audioCapturerInfos.length).assertLarger(0); + expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" + && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioCapturer.release(); + done(); + } catch (err) { + console.error(`${TAG} getCurrentAudioCapturerInfoArraySync002 ERROR: ${JSON.stringify(err)}`); + expect(false).assertTrue(); + await audioCapturer.release(); + done(); + } + }); + + /* + * @tc.name:getCurrentAudioCapturerInfoArraySync003 + * @tc.desc:Get getCurrentAudioCapturerInfoArraySync + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getCurrentAudioCapturerInfoArraySync003", 0, async function (done) { + let audioCapturer = null; + let audioStreamManager = null; + + try { + audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); + audioStreamManager = audio.getAudioManager().getStreamManager(); + await audioCapturer.start(); + await audioCapturer.stop(); + let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); + expect(audioCapturerInfos.length).assertLarger(0); + expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" + && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); + + await audioCapturer.release(); + audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); + expect(audioCapturerInfos.length).assertEqual(0); + done(); + } catch(e) { + console.error(`${TAG} getCurrentAudioCapturerInfoArraySync003 ERROR: ${e.message}`); + expect().assertFail(); + await audioCapturer.release(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync001 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_UNKNOWN + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync001", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(audio.StreamUsage.STREAM_USAGE_UNKNOWN); + console.info(`${TAG} getAudioEffectInfoArraySync success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync001 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync002 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_MEDIA + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync002", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(audio.StreamUsage.STREAM_USAGE_MEDIA); + console.info(`${TAG} getAudioEffectInfoArraySync002 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync002 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync003 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_MUSIC + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync003", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(audio.StreamUsage.STREAM_USAGE_MUSIC); + console.info(`${TAG} getAudioEffectInfoArraySync003 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync003 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync004 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_VOICE_COMMUNICATION + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync004", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION); + console.info(`${TAG} getAudioEffectInfoArraySync004 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync004 ERROR: ${e.message}`); + expect().assertFail(); + done(); + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync005 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_VOICE_ASSISTANT + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync005", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(audio.StreamUsage.STREAM_USAGE_VOICE_ASSISTANT); + console.info(`${TAG} getAudioEffectInfoArraySync005 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync005 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync006 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_ALARM + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync006", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_ALARM); + console.info(`${TAG} getAudioEffectInfoArraySync006 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync006 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync007 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_VOICE_MESSAGE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync007", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_VOICE_MESSAGE); + console.info(`${TAG} getAudioEffectInfoArraySync007 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync007 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync008 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_NOTIFICATION_RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync008", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE); + console.info(`${TAG} getAudioEffectInfoArraySync008 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync008 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync009 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_RINGTONE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync009", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_RINGTONE); + console.info(`${TAG} getAudioEffectInfoArraySync009 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync009 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync010 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_NOTIFICATION + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync010", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_NOTIFICATION); + console.info(`${TAG} getAudioEffectInfoArraySync010 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync010 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync011 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_ACCESSIBILITY + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync011", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_ACCESSIBILITY); + console.info(`${TAG} getAudioEffectInfoArraySync011 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync011 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync012 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_MOVIE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync012", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_MOVIE); + console.info(`${TAG} getAudioEffectInfoArraySync012 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync012 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync013 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_GAME + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync013", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_GAME); + console.info(`${TAG} getAudioEffectInfoArraySync013 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync013 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync014 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_AUDIOBOOK + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync014", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_AUDIOBOOK); + console.info(`${TAG} getAudioEffectInfoArraySync014 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync014 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync015 + * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_NAVIGATION + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync015", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync( + audio.StreamUsage.STREAM_USAGE_NAVIGATION); + console.info(`${TAG} getAudioEffectInfoArraySync015 success:${JSON.stringify(audioEffectInfoArray)}`); + expect(audioEffectInfoArray.length).assertLarger(0); + expect(audioEffectInfoArray[0]).assertEqual(0); + expect(audioEffectInfoArray[1]).assertEqual(1); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync015 ERROR: ${e.message}`); + expect().assertFail(); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync016 + * @tc.desc:Get getAudioEffectInfoArraySync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync016", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(); + console.info(`The effect modes is obtained ${audioEffectInfoArray}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync016 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync017 + * @tc.desc:Get getAudioEffectInfoArraySync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync017", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync("Invalid type"); + console.info(`The effect modes is obtained ${audioEffectInfoArray}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync017 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:getAudioEffectInfoArraySync018 + * @tc.desc:Get getAudioEffectInfoArraySync fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("getAudioEffectInfoArraySync018", 0, async function (done) { + let invalidVolumeType = 10000; + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let audioEffectInfoArray = audioStreamManager.getAudioEffectInfoArraySync(invalidVolumeType); + console.info(`The effect modes is obtained ${audioEffectInfoArray}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} getAudioEffectInfoArraySync018 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INVALID_PARAM); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync001 + * @tc.desc:Get isActiveSync success - VOICE_CALL - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync001", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.VOICE_CALL); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync001 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync002 + * @tc.desc:Get isActiveSync success - RINGTONE - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync002", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.RINGTONE); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync002 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync003 + * @tc.desc:Get isActiveSync success - MEDIA - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync003", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.MEDIA); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync003 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync004 + * @tc.desc:Get isActiveSync success - ALARM - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync004", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.ALARM); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync004 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync005 + * @tc.desc:Get isActiveSync success - ACCESSIBILITY - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync005", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.ACCESSIBILITY); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync005 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync006 + * @tc.desc:Get isActiveSync success - VOICE_ASSISTANT - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync006", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.VOICE_ASSISTANT); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync006 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync007 + * @tc.desc:Get isActiveSync success - ULTRASONIC - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync007", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.ULTRASONIC); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync007 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync008 + * @tc.desc:Get isActiveSync success - ALL - When stream is NOT playing + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync008", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(audio.AudioVolumeType.ALL); + console.info(`The active status is obtained ${isActive}.`); + expect(isActive).assertEqual(false); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync008 ERROR: ${e.message}`); + expect(false).assertTrue(); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync009 + * @tc.desc:Get isActiveSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync009", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(); + console.info(`The active status is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync009 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync010 + * @tc.desc:Get isActiveSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync010", 0, async function (done) { + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync("Invalid type"); + console.info(`The active status is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync010 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); + done(); + return; + } + }); + + /* + * @tc.name:isActiveSync011 + * @tc.desc:Get isActiveSync fail(6800101) - Invalid param value : 10000 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("isActiveSync011", 0, async function (done) { + let invalidVolumeType = 10000; + let audioStreamManager = null; + + try { + audioStreamManager = audio.getAudioManager().getStreamManager(); + let isActive = audioStreamManager.isActiveSync(invalidVolumeType); + console.info(`The active status is obtained ${isActive}.`); + expect(false).assertTrue(); + + done(); + } catch(e) { + console.error(`${TAG} isActiveSync011 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INVALID_PARAM); + done(); + return; + } + }); }) diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/AudioVolumeManagerJsUnitTest.js b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/AudioVolumeManagerJsUnitTest.js new file mode 100644 index 0000000000..50a4dfbe17 --- /dev/null +++ b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/AudioVolumeManagerJsUnitTest.js @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import audio from '@ohos.multimedia.audio'; +import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from 'deccjsunit/index' + + +describe("AudioVolumeManagerJsUnitTest", function () { + let audioManager = audio.getAudioManager(); + let audioVolumeManager = audioManager.getVolumeManager(); + + beforeAll(async function () { + + // input testsuit setup step,setup invoked before all testcases + console.info('beforeAll called') + }) + + afterAll(function () { + + // input testsuit teardown step,teardown invoked after all testcases + console.info('afterAll called') + }) + + beforeEach(function () { + + // input testcase setup step,setup invoked before each testcases + console.info('beforeEach called') + }) + + afterEach(function () { + + // input testcase teardown step,teardown invoked after each testcases + console.info('afterEach called') + }) + + /* + * @tc.name:SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_MANAGER_SYNC_001 + * @tc.desc:getVolumeGroupManagerSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_MANAGER_SYNC_001", 0, async function (done) { + let groupid = audio.DEFAULT_VOLUME_GROUP_ID; + try { + let value = audioVolumeManager.getVolumeGroupManagerSync(groupid); + console.info(`SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_MANAGER_SYNC_001 SUCCESS: ${value}.`); + expect(typeof value).assertEqual('object'); + done(); + } catch (err) { + console.error(`SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_MANAGER_SYNC_001 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_INFOS_SYNC_001 + * @tc.desc:getVolumeGroupInfosSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_INFOS_SYNC_001", 0, async function (done) { + try { + let value = audioVolumeManager.getVolumeGroupInfosSync(audio.LOCAL_NETWORK_ID); + console.info(`SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_INFOS_SYNC_001 SUCCESS: ${value}.`); + expect(value.length).assertLarger(0); + done(); + } catch (err) { + console.error(`SUB_AUDIO_VOLUME_MANAGER_GET_VOLUME_GROUP_INFOS_SYNC_001 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) +}) \ No newline at end of file diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/BUILD.gn b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/BUILD.gn new file mode 100644 index 0000000000..46f86bc5f3 --- /dev/null +++ b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") + +module_output_path = "multimedia_audio_framework/audio_volume_manager_js" + +ohos_js_unittest("AudioVolumeManagerJsUnitTest") { + module_out_path = module_output_path + + hap_profile = "./config.json" + certificate_profile = "./signature/openharmony_sx.p7b" +} + +group("jsunittest") { + testonly = true + deps = [ ":AudioVolumeManagerJsUnitTest" ] +} diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json new file mode 100644 index 0000000000..f9ecd195bb --- /dev/null +++ b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json @@ -0,0 +1,62 @@ +{ + "app": { + "bundleName": "com.example.myapplication", + "vendor": "example", + "version": { + "code": 1, + "name": "1.0" + }, + "apiVersion": { + "compatible": 8, + "target": 9 + } + }, + "deviceConfig": {}, + "module": { + "package": "com.example.myapplication", + "name": ".MyApplication", + "deviceType": [ + "phone", + "tablet", + "2in1" + ], + "distro": { + "deliveryWithInstall": true, + "moduleName": "entry", + "moduleType": "entry" + }, + "abilities": [ + { + "skills": [ + { + "entities": [ + "entity.system.home" + ], + "actions": [ + "action.system.home" + ] + } + ], + "name": "com.example.myapplication.MainAbility", + "icon": "$media:icon", + "description": "$string:mainability_description", + "label": "MyApplication", + "type": "page", + "launchType": "standard", + "visible": true + } + ], + "js": [ + { + "pages": [ + "pages/index/index" + ], + "name": "default", + "window": { + "designWidth": 720, + "autoDesignWidth": false + } + } + ] + } + } \ No newline at end of file diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/signature/openharmony_sx.p7b b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/signature/openharmony_sx.p7b new file mode 100644 index 0000000000000000000000000000000000000000..49d5a09221f14f3d260b279ac71c241802e9391f GIT binary patch literal 3504 zcmcgveNYo;9wr|I5hGX>)KkIufhPvvgd`Ap*6waLAHW6@5+D%|H_7G$l1)f9SrSgG z@yb=LdY)6IDi*&`uV@{$BAuz#-c_q!b!@G3(-V5uSng#0WD2=MtdxUdF|H(lq)_H` z5|U!}JGj^5@K8}vNsEZ|Q(_z8^aq)FC32;#SRt3ni{~W#xRWA+j9XExlob^+?!|LN zg9w6zm%u{=Gnp++I=qC~LpuHJd=4T)c>Mt)C{7@%2oR(e5BmIMl*t{o7CRB7JWh`t zr`RQl*&mE*boyKbHJWs49Riuqgtv~wbULI;g&>eU-v!Y+7;@>fhV(IyA}@NHF~#0 z{Qkl!_s}3>ijaf}FGdre%qfMGBOs0ffs0KlrG<9UcqERgbbP$Xp z`BA+lqA}Rb6-Ikh5u{}?ZMom)u-R3J1+7tgVwC~Z(qJ}g)tFXM?hey+WI(N-+u-$> zOB;#Gh=W8Uv|Ou-1R86U&2W*rywZbIG~h8ffWd%8?ohc5H85jc*yRes zE>sO0>?$Y*Szs5<+9+gE+8}?a3^HISqJlKc+F`_nGPC*KvFw|X$%Oa zwUF{AS5dw?s)icTDi~B~)RCe>bE!-ln@iVL#S}H=rjQgavQX-Lt4dlE(lZzoT4u+Q zu0tGF4_a9kCZT|_%HEW33~KYeRI$&EHCJezK23QuZFT8kg;N@qmdfi$ zv^-#zI6_VbZLul|Oi@QBHVmp)J!KO;-m8PMWB2 zn=%v)QG~DFhKD?MW;iQHP6DUGtXPQLj0z1VFaZ&x4Y=JKVl@Ewdi^xv5D(Z3M52XE zkdXwRC&bELA}2_&vSz24v|*72Y%Y@*W|c=2X!InME=b~D(c+G{;GZDQ{RN0~KIq_a zIb1HkXt(be7$q3NsPVaZd>$`33D_lRAXUIK^97k)L@yZ&USN%pnQ23`NHPIrv);7K zj3M4!6M_3A)4)_V#LJvGGz2{^Pc8z1DcM4)1W18YB4yrkYqn6Plt>FDgC+?4L=XTJ z41mac7KqBA-|6uZxyAt{1{RPhNJ*4|%NeW(kCVcKllV2~x!?439{dVwIyUa~?`Cb< zr!HD9Y|G(%y>Ip(U+=qoWeT>Z=sVu0w@#AWeAE1#4?gZNtV_-M!+O~{9=Yn|_)R;E zpc3RTa3)XVj|W*x$Nr_YTa@*A+P1Rp-We@Dw~`lc7i++1HX)tE`7fW(e^KTsb^^N2Q-`5K{#^F8Vd6IyWD@p2Fvp+R%i|*G>9|Pa`Ez#o^ z9DJcYgI2%v;Kug0?yumrj?2#PuI#>ZWwk)D=;80%nIRpm4Y4mDjw*p}OkY@@Jay%X z&ZQOLvg)rZXZ9K=9_Ju?u1!C*L!QyR_hD#B|KHLNG5k?6{5d_ypY$umtA=9%?w{DY zSzzWs>vEpeI-}oDJwYOgOj10AL<#dbB8h;E$K*-QU&c=o@oY(aOwk)%*C<@R<)*Oo z!f1bBXQX}09k8*fVqNso?(B>Co#!;;Tay_c;S7)Z2YEDwI6iMUlIPgHAz~zv z7fOfnh&7*&B0hscQ@4J5s{Ng8`M#BBcGmyn8Z;x1e|6$pw?CtnBm4IVzTbBxJ(fE!w#xP`l#PlAi?*MmmrD$p~r=f3b0vqVnDDcRt)aam%jTD^_IOzR)&x#obpH z9nR0Wd;Rp4HTT5k9d>{Z6Q`$gnTrNsPD18nw1T;hqkM^opL_p-b;J>C z&Ww4p#;-p+1zWsHdpEzO@x;ApCziQensfp%^3h4o}3}hy>Vho^T&nP z`)Jk4J`eQG8UOs?2I+eZUq?RwJCo8UM2~lF9+gZ`I}el{xiv2 setTimeout(resolve, ms)); + } + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_001', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("1.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_002', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async (eventAction) => { + console.log("2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_003', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("3.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_004', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("4.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_005', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("5.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("5_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_006', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("6.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_007', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("7.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_008', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("8.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_009', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("9.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_010', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("10.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // VOICE_CALL + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_011', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("11-2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_012', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("12_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_013', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("13_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_014', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("14_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_015', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("15.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("15_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_016', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("16_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_017', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("17.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_018', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("18_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_019', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("19_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_020', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("20_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // RINGTONE + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_021', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("21_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_022', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("22.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_023', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("23_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_024', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("24_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_025', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("25.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("25_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_026', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("26_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await startFail(render2,done,render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_027', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("27.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_028', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("28_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_029', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("29_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_030', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("30_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // VOICE_ASSISTANT + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_031', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("31.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_032', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("32.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_033', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("33.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_034', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("34.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_035', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("35.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("35_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_036', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("36.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_037', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("37.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_038', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("38.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_039', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("39.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_040', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("40.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // ULTRASONIC + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_041', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("41.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("41_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_042', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("42.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("42_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_043', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("43.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("43_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_044', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("44.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("44_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_045', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("45_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_046', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("46.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("46_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_047', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("47.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("47_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_048', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("48.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("48_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_049', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("49.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("49_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_050', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("50.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("50_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + // ALARM + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_051', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("51.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_052', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("52.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_053', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("53.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_054', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("54.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_055', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("55.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("55_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_056', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("56.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_057', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("57.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_058', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("58.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_059', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("59.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_060', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("60.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // ACCESSIBILITY + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_061', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("61_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_062', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("62_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_063', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("63.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_064', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("64_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_065', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("65.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("65_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_066', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("66_2.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_067', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("67.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_068', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("68_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_069', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("69_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_070', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("70_2.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await startFail(render2, done, render1) + }) +}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js new file mode 100644 index 0000000000..a0df0a239d --- /dev/null +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js @@ -0,0 +1,912 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import audio from '@ohos.multimedia.audio'; + +describe("AudioRendererInterruptSyncRareTypeUnitTest", function() { + beforeAll(async function () { + // input testsuit setup step, setup invoked before all testcases + console.info('beforeAll called') + }) + + afterAll(function () { + + // input testsuit teardown step, teardown invoked after all testcases + console.info('afterAll called') + }) + + beforeEach(function () { + + // input testcase setup step, setup invoked before each testcases + console.info('beforeEach called') + }) + + afterEach(function () { + + // input testcase teardown step, teardown invoked after each testcases + console.info('afterEach called') + }) + + let renderInfo = { + 'MUSIC': { + content: audio.ContentType.CONTENT_TYPE_MUSIC, + usage: audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags: 0, + }, + 'VOICE_CALL': { + content: audio.ContentType.CONTENT_TYPE_SPEECH, + usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, + rendererFlags: 0 + }, + 'RINGTONE': { + content: audio.ContentType.CONTENT_TYPE_MUSIC, + usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE, + rendererFlags: 0, + }, + 'VOICE_ASSISTANT': { + content: audio.ContentType.CONTENT_TYPE_SPEECH, + usage: audio.StreamUsage.STREAM_USAGE_VOICE_ASSISTANT, + rendererFlags: 0 + }, + 'ULTRASONIC': { + content: audio.ContentType.CONTENT_TYPE_ULTRASONIC, + usage: audio.StreamUsage.STREAM_USAGE_SYSTEM, + rendererFlags: 0 + }, + 'ALARM': { + content: audio.ContentType.CONTENT_TYPE_MUSIC, + usage: audio.StreamUsage.STREAM_USAGE_ALARM, + rendererFlags: 0 + }, + 'ACCESSIBILITY': { + content: audio.ContentType.CONTENT_TYPE_SPEECH, + usage: audio.StreamUsage.STREAM_USAGE_ACCESSIBILITY, + rendererFlags: 0 + }, + 'SPEECH': { + content: audio.ContentType.CONTENT_TYPE_SPEECH, + usage: audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags: 0 + }, + 'MOVIE': { + content: audio.ContentType.CONTENT_TYPE_MOVIE, + usage: audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags: 0 + }, + 'UNKNOW': { + content: audio.ContentType.CONTENT_TYPE_UNKNOWN, + usage: audio.StreamUsage.STREAM_USAGE_UNKNOWN, + rendererFlags: 0 + }, + } + + let streamInfo = { + '44100': { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, + channels: audio.AudioChannel.CHANNEL_2, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + }, + '48000' : { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, + channels: audio.AudioChannel.CHANNEL_2, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + }, + } + + async function createAudioRenderer(AudioRendererInfo, AudioStreamInfo, done) { + let render = null; + + var AudioRendererOptions = { + streamInfo: AudioStreamInfo, + rendererInfo: AudioRendererInfo + } + try { + render = await audio.createAudioRenderer(AudioRendererOptions) + console.log(" createAudioRenderer success.") + } catch (err) { + console.log(" createAudioRenderer err:" + JSON.stringify(err)) + expect(false).assertEqual(true) + done() + } + return render + } + + async function start(render,done) { + try { + render.start() + console.log(" start success.") + } catch (err) { + await release(render,done) + console.log(" start err:" + JSON.stringify(err)) + expect(false).assertEqual(true) + done() + } + } + + + async function startFail(render,done,render1) { + try { + render.start() + console.log(" start success.") + } catch (err) { + console.log(" start err:" + JSON.stringify(err)) + await release(render,done) + await release(render1,done) + expect(true).assertEqual(true) + done() + } + } + + + async function stop(render,done) { + try { + render.stop() + console.log(" stop success.") + } catch (err) { + console.log(" stop err:" + JSON.stringify(err)) + expect(false).assertEqual(true) + await release(render,done) + done() + } + } + + async function release(render,done) { + if (render.state == audio.AudioState.STATE_RELEASED) { + console.log(" release render state: " + render.state) + return + } + try { + render.release() + console.log(" release success.") + } catch (err) { + console.log(" release err:" + JSON.stringify(err)) + expect(false).assertEqual(true) + done() + } + } + + function sleep(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); + } + + // SPEECH + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_071', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("71.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_072', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async (eventAction) => { + console.log("72.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_073', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("73.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_074', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("74.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_075', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("75.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("75_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_076', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("76.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_077', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("77.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_078', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("78.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_079', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("79.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_080', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("80.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // MOVIE + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_081', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("81.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_082', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async (eventAction) => { + console.log("82.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_083', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("83.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_084', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("84.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_085', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("85.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("85_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_086', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("86.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_087', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("87.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_088', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("88.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_089', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("89.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_090', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("90.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // UNKNOW + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_091', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("91.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_092', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async (eventAction) => { + console.log("92.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_093', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("93.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_094', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("94.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_095', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("95.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("95_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_096', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("96.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) + } else { + } + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_097', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("97.eventAction=" + JSON.stringify(eventAction)) + if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) + } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) + } else {} + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render2, done) + await sleep(500) + await release(render1, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_098', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("98.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_099', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("99.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_100', 0, async function (done) { + let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt",async (eventAction) => { + console.log("100.eventAction=" + JSON.stringify(eventAction)) + expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + await start(render2, done) + await sleep(500) + await release(render1, done) + await release(render2, done) + done() + }) + + // 两个stream同时为share mode + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_101', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("101.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("101_2.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == false && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + // 第一个为share mode, 第二个为Independe mode + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_102', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("102.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("102_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback == true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == true && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + + // 第一个为independ mode, 第二个为share mode + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_103', 0, async function (done) { + let render1_callback = false + let render2_callback = false + let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) + render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render1.on("audioInterrupt", async(eventAction) => { + console.log("103.eventAction=" + JSON.stringify(eventAction)) + render1_callback = true + }) + await start(render1, done) + + let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) + render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) + render2.on("audioInterrupt", async(eventAction) => { + console.log("103_2.eventAction=" + JSON.stringify(eventAction)) + render2_callback = true + }) + await start(render2, done) + await sleep(500) + console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) + expect(render1_callback == true && render2_callback == false).assertTrue() + await sleep(100) + await release(render1, done) + await release(render2, done) + done() + }) + +}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/AudioRendererJsUnitTest.js b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/AudioRendererJsUnitTest.js new file mode 100644 index 0000000000..e129722a9a --- /dev/null +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/AudioRendererJsUnitTest.js @@ -0,0 +1,352 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import audio from '@ohos.multimedia.audio'; +import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from 'deccjsunit/index' + +const TAG = "[AudioRendererJsUnitTest]"; + +describe("AudioRendererJsUnitTest", function() { + let audioStreamInfo = { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, + channels: audio.AudioChannel.CHANNEL_1, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + } + let audioRendererInfo = { + content: audio.ContentType.CONTENT_TYPE_MUSIC, + usage: audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags: 0 + } + let audioRendererOptions = { + streamInfo: audioStreamInfo, + rendererInfo: audioRendererInfo + } + + let audioRenderer; + + beforeAll(async function () { + // input testsuit setup step, setup invoked before all testcases + try { + audioRenderer = audio.createAudioRendererSync(audioRendererOptions); + console.info(`${TAG}: AudioRenderer created SUCCESS, state: ${audioRenderer.state}`); + } catch (err) { + console.error(`${TAG}: AudioRenderer created ERROR: ${err.message}`); + } + console.info(TAG + 'beforeAll called') + }) + + afterAll(function () { + + // input testsuit teardown step, teardown invoked after all testcases + audioRenderer.release().then(() => { + console.info(`${TAG}: AudioRenderer release : SUCCESS`); + }).catch((err) => { + console.info(`${TAG}: AudioRenderer release :ERROR : ${err.message}`); + }); + console.info(TAG + 'afterAll called') + }) + + beforeEach(function () { + + // input testcase setup step, setup invoked before each testcases + console.info(TAG + 'beforeEach called') + }) + + afterEach(function () { + + // input testcase teardown step, teardown invoked after each testcases + console.info(TAG + 'afterEach called') + }) + + /* + * @tc.name:SUB_AUDIO_CREATE_AUDIO_RENDERER_SYNC_001 + * @tc.desc:createAudioRendererSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it("SUB_AUDIO_CREATE_AUDIO_RENDERER_SYNC_001", 0, async function (done) { + try { + let value = audio.createAudioRendererSync(audioRendererOptions); + console.info(`SUB_AUDIO_CREATE_AUDIO_RENDERER_SYNC_001 SUCCESS: ${value}.`); + expect(typeof value).assertEqual('object'); + done(); + } catch (err) { + console.error(`SUB_AUDIO_CREATE_AUDIO_RENDERER_SYNC_001 ERROR: ${err}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_STREAM_INFO_SYNC_TEST_001 + * @tc.desc:getStreamInfoSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_STREAM_INFO_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getStreamInfoSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_STREAM_INFO_SYNC_TEST_001 SUCCESS: ${data}`); + expect(data.samplingRate).assertEqual(audio.AudioSamplingRate.SAMPLE_RATE_48000); + expect(data.channels).assertEqual(audio.AudioChannel.CHANNEL_1); + expect(data.sampleFormat).assertEqual(audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE); + expect(data.encodingType).assertEqual(audio.AudioEncodingType.ENCODING_TYPE_RAW); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_STREAM_INFO_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_RENDERER_INFO_SYNC_TEST_001 + * @tc.desc:getRendererInfoSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_RENDERER_INFO_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getRendererInfoSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDERER_INFO_SYNC_TEST_001 SUCCESS: ${data}`); + expect(data.content).assertEqual(audio.ContentType.CONTENT_TYPE_MUSIC); + expect(data.usage).assertEqual(audio.StreamUsage.STREAM_USAGE_MEDIA); + expect(data.rendererFlags).assertEqual(0); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDERER_INFO_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 + * @tc.desc:getAudioStreamIdSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_AUDIO_STREAM_ID_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getAudioStreamIdSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_AUDIO_STREAM_ID_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_BUFFER_SIZE_SYNC_TEST_001 + * @tc.desc:getBufferSizeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_BUFFER_SIZE_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getBufferSizeSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_BUFFER_SIZE_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_BUFFER_SIZE_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_MIN_STREAM_VOLUME_SYNC_TEST_001 + * @tc.desc:getMinStreamVolumeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_MIN_STREAM_VOLUME_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getMinStreamVolumeSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_MIN_STREAM_VOLUME_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_MIN_STREAM_VOLUME_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_MAX_STREAM_VOLUME_SYNC_TEST_001 + * @tc.desc:getMaxStreamVolumeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_MAX_STREAM_VOLUME_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getMaxStreamVolumeSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_MAX_STREAM_VOLUME_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_MAX_STREAM_VOLUME_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_CURRENT_OUTPUT_DEVICES_SYNC_TEST_001 + * @tc.desc:getCurrentOutputDevicesSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_CURRENT_OUTPUT_DEVICES_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getCurrentOutputDevicesSync(); + console.info(`${TAG}: GET_CURRENT_OUTPUT_DEVICES_SYNC_TEST_001 SUCCESS: ${JSON.stringify(data)}`); + expect(data.length).assertLarger(0); + for (let i = 0; i < data.length; i++) { + expect(data[i].displayName!=="" && data[i].displayName!==undefined).assertTrue(); + } + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_CURRENT_OUTPUT_DEVICES_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_AUDIO_TIME_SYNC_TEST_001 + * @tc.desc:getAudioTimeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_AUDIO_TIME_SYNC_TEST_001', 0, async function (done) { + try { + let audioRenderer = audio.createAudioRendererSync(audioRendererOptions); + let data = audioRenderer.getAudioTimeSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_AUDIO_TIME_SYNC_TEST_001 SUCCESS, before start: ${data}`); + expect(data).assertEqual(0); + + await audioRenderer.start(); + data = audioRenderer.getAudioTimeSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_AUDIO_TIME_SYNC_TEST_001 SUCCESS, after start: ${data}`); + expect(data).assertLarger(0); + + await audioRenderer.stop(); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_AUDIO_TIME_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_001 + * @tc.desc:getRenderRateSync success - RENDER_RATE_NORMAL + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_001', 0, async function (done) { + await audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL).then(() => { + console.info('setRenderRate SUCCESS'); + try { + let data = audioRenderer.getRenderRateSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_001 SUCCESS: ${data}`); + expect(data).assertEqual(audio.AudioRendererRate.RENDER_RATE_NORMAL); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }).catch((err) => { + console.error(`setRenderRate ERROR: ${err}`); + }); + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_002 + * @tc.desc:getRenderRateSync success - RENDER_RATE_DOUBLE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_002', 0, async function (done) { + await audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_DOUBLE).then(() => { + console.info('setRenderRate SUCCESS'); + try { + let data = audioRenderer.getRenderRateSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_002 SUCCESS: ${data}`); + expect(data).assertEqual(audio.AudioRendererRate.RENDER_RATE_DOUBLE); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_002 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }).catch((err) => { + console.error(`setRenderRate ERROR: ${err}`); + }); + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_003 + * @tc.desc:getRenderRateSync success - RENDER_RATE_DOUBLE + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_003', 0, async function (done) { + await audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_HALF).then(() => { + console.info('setRenderRate SUCCESS'); + try { + let data = audioRenderer.getRenderRateSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_003 SUCCESS: ${data}`); + expect(data).assertEqual(audio.AudioRendererRate.RENDER_RATE_HALF); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_RENDER_RATE_SYNC_TEST_003 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }).catch((err) => { + console.error(`setRenderRate ERROR: ${err}`); + }); + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_GET_UNDERFLOW_COUNT_SYNC_TEST_001 + * @tc.desc:getUnderflowCountSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_GET_UNDERFLOW_COUNT_SYNC_TEST_001', 0, async function (done) { + try { + let data = audioRenderer.getUnderflowCountSync(); + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_UNDERFLOW_COUNT_SYNC_TEST_001 SUCCESS: ${data}`); + expect(typeof data).assertEqual('number'); + done(); + } catch (err) { + console.info(`${TAG}: SUB_AUDIO_RENDERER_GET_UNDERFLOW_COUNT_SYNC_TEST_001 ERROR: ${err.message}`); + expect(false).assertTrue(); + done(); + } + }) + +}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/BUILD.gn b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/BUILD.gn new file mode 100644 index 0000000000..329340f4a9 --- /dev/null +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") + +module_output_path = "multimedia_audio_framework/audio_renderer_js" + +ohos_js_unittest("AudioRendererJsUnitTest") { + module_out_path = module_output_path + + hap_profile = "./config.json" + certificate_profile = "./signature/openharmony_sx.p7b" +} + +group("jsunittest") { + testonly = true + deps = [ ":AudioRendererJsUnitTest" ] +} diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json new file mode 100644 index 0000000000..f9ecd195bb --- /dev/null +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json @@ -0,0 +1,62 @@ +{ + "app": { + "bundleName": "com.example.myapplication", + "vendor": "example", + "version": { + "code": 1, + "name": "1.0" + }, + "apiVersion": { + "compatible": 8, + "target": 9 + } + }, + "deviceConfig": {}, + "module": { + "package": "com.example.myapplication", + "name": ".MyApplication", + "deviceType": [ + "phone", + "tablet", + "2in1" + ], + "distro": { + "deliveryWithInstall": true, + "moduleName": "entry", + "moduleType": "entry" + }, + "abilities": [ + { + "skills": [ + { + "entities": [ + "entity.system.home" + ], + "actions": [ + "action.system.home" + ] + } + ], + "name": "com.example.myapplication.MainAbility", + "icon": "$media:icon", + "description": "$string:mainability_description", + "label": "MyApplication", + "type": "page", + "launchType": "standard", + "visible": true + } + ], + "js": [ + { + "pages": [ + "pages/index/index" + ], + "name": "default", + "window": { + "designWidth": 720, + "autoDesignWidth": false + } + } + ] + } + } \ No newline at end of file diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/signature/openharmony_sx.p7b b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/signature/openharmony_sx.p7b new file mode 100644 index 0000000000000000000000000000000000000000..49d5a09221f14f3d260b279ac71c241802e9391f GIT binary patch literal 3504 zcmcgveNYo;9wr|I5hGX>)KkIufhPvvgd`Ap*6waLAHW6@5+D%|H_7G$l1)f9SrSgG z@yb=LdY)6IDi*&`uV@{$BAuz#-c_q!b!@G3(-V5uSng#0WD2=MtdxUdF|H(lq)_H` z5|U!}JGj^5@K8}vNsEZ|Q(_z8^aq)FC32;#SRt3ni{~W#xRWA+j9XExlob^+?!|LN zg9w6zm%u{=Gnp++I=qC~LpuHJd=4T)c>Mt)C{7@%2oR(e5BmIMl*t{o7CRB7JWh`t zr`RQl*&mE*boyKbHJWs49Riuqgtv~wbULI;g&>eU-v!Y+7;@>fhV(IyA}@NHF~#0 z{Qkl!_s}3>ijaf}FGdre%qfMGBOs0ffs0KlrG<9UcqERgbbP$Xp z`BA+lqA}Rb6-Ikh5u{}?ZMom)u-R3J1+7tgVwC~Z(qJ}g)tFXM?hey+WI(N-+u-$> zOB;#Gh=W8Uv|Ou-1R86U&2W*rywZbIG~h8ffWd%8?ohc5H85jc*yRes zE>sO0>?$Y*Szs5<+9+gE+8}?a3^HISqJlKc+F`_nGPC*KvFw|X$%Oa zwUF{AS5dw?s)icTDi~B~)RCe>bE!-ln@iVL#S}H=rjQgavQX-Lt4dlE(lZzoT4u+Q zu0tGF4_a9kCZT|_%HEW33~KYeRI$&EHCJezK23QuZFT8kg;N@qmdfi$ zv^-#zI6_VbZLul|Oi@QBHVmp)J!KO;-m8PMWB2 zn=%v)QG~DFhKD?MW;iQHP6DUGtXPQLj0z1VFaZ&x4Y=JKVl@Ewdi^xv5D(Z3M52XE zkdXwRC&bELA}2_&vSz24v|*72Y%Y@*W|c=2X!InME=b~D(c+G{;GZDQ{RN0~KIq_a zIb1HkXt(be7$q3NsPVaZd>$`33D_lRAXUIK^97k)L@yZ&USN%pnQ23`NHPIrv);7K zj3M4!6M_3A)4)_V#LJvGGz2{^Pc8z1DcM4)1W18YB4yrkYqn6Plt>FDgC+?4L=XTJ z41mac7KqBA-|6uZxyAt{1{RPhNJ*4|%NeW(kCVcKllV2~x!?439{dVwIyUa~?`Cb< zr!HD9Y|G(%y>Ip(U+=qoWeT>Z=sVu0w@#AWeAE1#4?gZNtV_-M!+O~{9=Yn|_)R;E zpc3RTa3)XVj|W*x$Nr_YTa@*A+P1Rp-We@Dw~`lc7i++1HX)tE`7fW(e^KTsb^^N2Q-`5K{#^F8Vd6IyWD@p2Fvp+R%i|*G>9|Pa`Ez#o^ z9DJcYgI2%v;Kug0?yumrj?2#PuI#>ZWwk)D=;80%nIRpm4Y4mDjw*p}OkY@@Jay%X z&ZQOLvg)rZXZ9K=9_Ju?u1!C*L!QyR_hD#B|KHLNG5k?6{5d_ypY$umtA=9%?w{DY zSzzWs>vEpeI-}oDJwYOgOj10AL<#dbB8h;E$K*-QU&c=o@oY(aOwk)%*C<@R<)*Oo z!f1bBXQX}09k8*fVqNso?(B>Co#!;;Tay_c;S7)Z2YEDwI6iMUlIPgHAz~zv z7fOfnh&7*&B0hscQ@4J5s{Ng8`M#BBcGmyn8Z;x1e|6$pw?CtnBm4IVzTbBxJ(fE!w#xP`l#PlAi?*MmmrD$p~r=f3b0vqVnDDcRt)aam%jTD^_IOzR)&x#obpH z9nR0Wd;Rp4HTT5k9d>{Z6Q`$gnTrNsPD18nw1T;hqkM^opL_p-b;J>C z&Ww4p#;-p+1zWsHdpEzO@x;ApCziQensfp%^3h4o}3}hy>Vho^T&nP z`)Jk4J`eQG8UOs?2I+eZUq?RwJCo8UM2~lF9+gZ`I}el{xiv2)KkIufhPvvgd`Ap*6waLAHW6@5+D%|H_7G$l1)f9SrSgG z@yb=LdY)6IDi*&`uV@{$BAuz#-c_q!b!@G3(-V5uSng#0WD2=MtdxUdF|H(lq)_H` z5|U!}JGj^5@K8}vNsEZ|Q(_z8^aq)FC32;#SRt3ni{~W#xRWA+j9XExlob^+?!|LN zg9w6zm%u{=Gnp++I=qC~LpuHJd=4T)c>Mt)C{7@%2oR(e5BmIMl*t{o7CRB7JWh`t zr`RQl*&mE*boyKbHJWs49Riuqgtv~wbULI;g&>eU-v!Y+7;@>fhV(IyA}@NHF~#0 z{Qkl!_s}3>ijaf}FGdre%qfMGBOs0ffs0KlrG<9UcqERgbbP$Xp z`BA+lqA}Rb6-Ikh5u{}?ZMom)u-R3J1+7tgVwC~Z(qJ}g)tFXM?hey+WI(N-+u-$> zOB;#Gh=W8Uv|Ou-1R86U&2W*rywZbIG~h8ffWd%8?ohc5H85jc*yRes zE>sO0>?$Y*Szs5<+9+gE+8}?a3^HISqJlKc+F`_nGPC*KvFw|X$%Oa zwUF{AS5dw?s)icTDi~B~)RCe>bE!-ln@iVL#S}H=rjQgavQX-Lt4dlE(lZzoT4u+Q zu0tGF4_a9kCZT|_%HEW33~KYeRI$&EHCJezK23QuZFT8kg;N@qmdfi$ zv^-#zI6_VbZLul|Oi@QBHVmp)J!KO;-m8PMWB2 zn=%v)QG~DFhKD?MW;iQHP6DUGtXPQLj0z1VFaZ&x4Y=JKVl@Ewdi^xv5D(Z3M52XE zkdXwRC&bELA}2_&vSz24v|*72Y%Y@*W|c=2X!InME=b~D(c+G{;GZDQ{RN0~KIq_a zIb1HkXt(be7$q3NsPVaZd>$`33D_lRAXUIK^97k)L@yZ&USN%pnQ23`NHPIrv);7K zj3M4!6M_3A)4)_V#LJvGGz2{^Pc8z1DcM4)1W18YB4yrkYqn6Plt>FDgC+?4L=XTJ z41mac7KqBA-|6uZxyAt{1{RPhNJ*4|%NeW(kCVcKllV2~x!?439{dVwIyUa~?`Cb< zr!HD9Y|G(%y>Ip(U+=qoWeT>Z=sVu0w@#AWeAE1#4?gZNtV_-M!+O~{9=Yn|_)R;E zpc3RTa3)XVj|W*x$Nr_YTa@*A+P1Rp-We@Dw~`lc7i++1HX)tE`7fW(e^KTsb^^N2Q-`5K{#^F8Vd6IyWD@p2Fvp+R%i|*G>9|Pa`Ez#o^ z9DJcYgI2%v;Kug0?yumrj?2#PuI#>ZWwk)D=;80%nIRpm4Y4mDjw*p}OkY@@Jay%X z&ZQOLvg)rZXZ9K=9_Ju?u1!C*L!QyR_hD#B|KHLNG5k?6{5d_ypY$umtA=9%?w{DY zSzzWs>vEpeI-}oDJwYOgOj10AL<#dbB8h;E$K*-QU&c=o@oY(aOwk)%*C<@R<)*Oo z!f1bBXQX}09k8*fVqNso?(B>Co#!;;Tay_c;S7)Z2YEDwI6iMUlIPgHAz~zv z7fOfnh&7*&B0hscQ@4J5s{Ng8`M#BBcGmyn8Z;x1e|6$pw?CtnBm4IVzTbBxJ(fE!w#xP`l#PlAi?*MmmrD$p~r=f3b0vqVnDDcRt)aam%jTD^_IOzR)&x#obpH z9nR0Wd;Rp4HTT5k9d>{Z6Q`$gnTrNsPD18nw1T;hqkM^opL_p-b;J>C z&Ww4p#;-p+1zWsHdpEzO@x;ApCziQensfp%^3h4o}3}hy>Vho^T&nP z`)Jk4J`eQG8UOs?2I+eZUq?RwJCo8UM2~lF9+gZ`I}el{xiv2 Date: Tue, 29 Aug 2023 11:06:49 +0800 Subject: [PATCH 2/4] add sync api ut Signed-off-by: huyue57 Change-Id: I86e97bdfe7d5ea89d94323e6d10955c6f8c2e387 --- .../AudioStreamManagerJsTest.js | 109 ------------------ 1 file changed, 109 deletions(-) diff --git a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js index e20647fe2f..9b5bc25e4a 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js +++ b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/AudioStreamManagerJsTest.js @@ -471,115 +471,6 @@ describe("AudioStreamManagerJsTest", function () { } }); - /* - * @tc.name:getCurrentAudioCapturerInfoArraySync001 - * @tc.desc:Get getCurrentAudioCapturerInfoArraySync - * @tc.type: FUNC - * @tc.require: I7V04L - */ - it("getCurrentAudioCapturerInfoArraySync001", 0, async function (done) { - let audioCapturer = null; - let audioStreamManager = null; - try { - audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); - audioStreamManager = audio.getAudioManager().getStreamManager(); - } catch(e) { - console.error(`${TAG} getCurrentAudioCapturerInfoArraySync001 ERROR: ${e.message}`); - expect().assertFail(); - done(); - return; - } - - try { - let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); - console.info("getCurrentAudioCapturerInfoArraySync001:"+JSON.stringify(audioCapturerInfos)); - expect(audioCapturerInfos.length).assertLarger(0); - expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" - && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); - - await audioCapturer.release(); - done(); - } catch (err) { - console.error(`${TAG} getCurrentAudioCapturerInfoArraySync001 ERROR: ${JSON.stringify(err)}`); - expect(false).assertTrue(); - await audioCapturer.release(); - done(); - return; - } - }); - - /* - * @tc.name:getCurrentAudioCapturerInfoArraySync002 - * @tc.desc:Get getCurrentAudioCapturerInfoArraySync - * @tc.type: FUNC - * @tc.require: I7V04L - */ - it("getCurrentAudioCapturerInfoArraySync002", 0, async function (done) { - - let audioCapturer = null; - let audioStreamManager = null; - try { - audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); - audioStreamManager = audio.getAudioManager().getStreamManager(); - await audioCapturer.start(); - } catch(e) { - console.error(`${TAG} getCurrentAudioCapturerInfoArraySync002 ERROR: ${e.message}`); - expect().assertFail(); - await audioCapturer.release(); - done(); - return; - } - - try { - let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); - console.info("AudioCapturerChangeInfoArray++++:"+JSON.stringify(audioCapturerInfos)); - expect(audioCapturerInfos.length).assertLarger(0); - expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" - && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); - - await audioCapturer.release(); - done(); - } catch (err) { - console.error(`${TAG} getCurrentAudioCapturerInfoArraySync002 ERROR: ${JSON.stringify(err)}`); - expect(false).assertTrue(); - await audioCapturer.release(); - done(); - } - }); - - /* - * @tc.name:getCurrentAudioCapturerInfoArraySync003 - * @tc.desc:Get getCurrentAudioCapturerInfoArraySync - * @tc.type: FUNC - * @tc.require: I7V04L - */ - it("getCurrentAudioCapturerInfoArraySync003", 0, async function (done) { - let audioCapturer = null; - let audioStreamManager = null; - - try { - audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions); - audioStreamManager = audio.getAudioManager().getStreamManager(); - await audioCapturer.start(); - await audioCapturer.stop(); - let audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); - expect(audioCapturerInfos.length).assertLarger(0); - expect(audioCapturerInfos[0].deviceDescriptors[0].displayName!=="" - && audioCapturerInfos[0].deviceDescriptors[0].displayName!==undefined).assertTrue(); - - await audioCapturer.release(); - audioCapturerInfos = audioStreamManager.getCurrentAudioCapturerInfoArraySync(); - expect(audioCapturerInfos.length).assertEqual(0); - done(); - } catch(e) { - console.error(`${TAG} getCurrentAudioCapturerInfoArraySync003 ERROR: ${e.message}`); - expect().assertFail(); - await audioCapturer.release(); - done(); - return; - } - }); - /* * @tc.name:getAudioEffectInfoArraySync001 * @tc.desc:Get getAudioEffectInfoArraySync success - STREAM_USAGE_UNKNOWN -- Gitee From 400056d25d34a8cb5ace2237b1791aad0888d4be Mon Sep 17 00:00:00 2001 From: huyue57 Date: Tue, 29 Aug 2023 14:51:36 +0800 Subject: [PATCH 3/4] add sync api ut Signed-off-by: huyue57 Change-Id: Idb04eed0d73ac08fa8d0ef9ee7f2d9edcb01b617 --- .../unittest/audio_capturer_test/config.json | 6 +- .../unittest/audio_manager_test/config.json | 6 +- .../unittest/group_manager_test/config.json | 6 +- .../AudioRoutingManagerJsTest.js | 64 +- .../unittest/routing_manager_test/BUILD.gn | 2 +- .../unittest/routing_manager_test/config.json | 6 +- .../unittest/stream_manager_test/config.json | 6 +- .../unittest/volume_manager_test/config.json | 6 +- ...RendererInterruptSyncCommonTypeUnitTest.js | 1655 ----------------- ...ioRendererInterruptSyncRareTypeUnitTest.js | 912 --------- .../AudioRendererInterruptSyncUnitTest.js | 143 ++ .../unittest/audio_renderer_test/config.json | 6 +- .../unittest/tone_player_test/config.json | 6 +- 13 files changed, 200 insertions(+), 2624 deletions(-) delete mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncCommonTypeUnitTest.js delete mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js create mode 100644 frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncUnitTest.js diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json index f9ecd195bb..7e8aa3788d 100644 --- a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json +++ b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.audiocapturertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.audiocapturertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.audiocapturertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json index f9ecd195bb..f9c547dab9 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.audiomanagertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.audiomanagertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.audiomanagertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json index f9ecd195bb..693c41dac5 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.groupmanagertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.groupmanagertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.groupmanagertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js index 943f197238..fc67767539 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js +++ b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/AudioRoutingManagerJsTest.js @@ -894,23 +894,23 @@ describe("AudioRoutingManagerJsTest", function () { done(); } catch(e) { console.error(`${TAG} isCommunicationDeviceActiveSync004 ERROR: ${e.message}`); - expect(e.code).assertEqual(ERROR_INPUT_INVALID); + expect(e.code).assertEqual(ERROR_INVALID_PARAM); done(); return; } }); /* - * @tc.name:getDeviceSync001 - * @tc.desc:getDeviceSync success - INPUT_DEVICES_FLAG + * @tc.name:getDevicesSync001 + * @tc.desc:getDevicesSync success - INPUT_DEVICES_FLAG * @tc.type: FUNC * @tc.require: I6C9VA */ - it("getDeviceSync001", 0, async function (done) { + it("getDevicesSync001", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); let AudioDeviceDescriptors = routingManager.getDevicesSync(audio.DeviceFlag.INPUT_DEVICES_FLAG); - console.info(`${TAG} getDeviceSync001 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + console.info(`${TAG} getDevicesSync001 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); expect(AudioDeviceDescriptors.length).assertLarger(0); for (let i = 0; i < AudioDeviceDescriptors.length; i++) { expect(AudioDeviceDescriptors[i].displayName!=="" @@ -918,69 +918,69 @@ describe("AudioRoutingManagerJsTest", function () { } done(); } catch (e) { - console.error(`${TAG} getDeviceSync001 ERROR: ${e.message}`); + console.error(`${TAG} getDevicesSync001 ERROR: ${e.message}`); expect().assertFail(); done(); } }); /* - * @tc.name:getDeviceSync010 - * @tc.desc:getDeviceSync fail(401) - Invalid param count : 0 + * @tc.name:getDevicesSync010 + * @tc.desc:getDevicesSync fail(401) - Invalid param count : 0 * @tc.type: FUNC * @tc.require: I6C9VA */ - it("getDeviceSync010", 0, async function (done) { + it("getDevicesSync010", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); let AudioDeviceDescriptors = routingManager.getDevicesSync(); - console.info(`${TAG} getDeviceSync010 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + console.info(`${TAG} getDevicesSync010 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); expect(false).assertTrue(); done(); } catch (e) { - console.error(`${TAG} getDeviceSync010 ERROR: ${e.message}`); + console.error(`${TAG} getDevicesSync010 ERROR: ${e.message}`); expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }); /* - * @tc.name:getDeviceSync011 - * @tc.desc:getDeviceSync fail(401) - Invalid param type : "Invalid type" + * @tc.name:getDevicesSync011 + * @tc.desc:getDevicesSync fail(401) - Invalid param type : "Invalid type" * @tc.type: FUNC * @tc.require: I6C9VA */ - it("getDeviceSync011", 0, async function (done) { + it("getDevicesSync011", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); let AudioDeviceDescriptors = routingManager.getDevicesSync("Invalid type"); - console.info(`${TAG} getDeviceSync011 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + console.info(`${TAG} getDevicesSync011 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); expect(false).assertTrue(); done(); } catch (e) { - console.error(`${TAG} getDeviceSync011 ERROR: ${e.message}`); + console.error(`${TAG} getDevicesSync011 ERROR: ${e.message}`); expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }); /* - * @tc.name:getDeviceSync012 - * @tc.desc:getDeviceSync fail(6800101) - Invalid param value : 10000 + * @tc.name:getDevicesSync012 + * @tc.desc:getDevicesSync fail(6800101) - Invalid param value : 10000 * @tc.type: FUNC * @tc.require: I6C9VA */ - it("getDeviceSync012", 0, async function (done) { + it("getDevicesSync012", 0, async function (done) { let invalidDeviceFlag = 10000; try { let routingManager = audio.getAudioManager().getRoutingManager(); let AudioDeviceDescriptors = routingManager.getDevicesSync(invalidDeviceFlag); - console.info(`${TAG} getDeviceSync012 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); + console.info(`${TAG} getDevicesSync012 SUCCESS:`+ JSON.stringify(AudioDeviceDescriptors)); expect(false).assertTrue(); done(); } catch (e) { - console.error(`${TAG} getDeviceSync012 ERROR: ${e.message}`); - expect(e.code).assertEqual(ERROR_INPUT_INVALID); + console.error(`${TAG} getDevicesSync012 ERROR: ${e.message}`); + expect(e.code).assertEqual(ERROR_INVALID_PARAM); done(); } }); @@ -999,7 +999,7 @@ describe("AudioRoutingManagerJsTest", function () { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredInputDeviceForCapturerInfoSync(capturerInfo); + let data = routingManager.getPreferredInputDeviceForCapturerInfoSync(capturerInfo); console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest001 SUCCESS`+JSON.stringify(data)); expect(true).assertTrue(); done(); @@ -1019,13 +1019,13 @@ describe("AudioRoutingManagerJsTest", function () { it("getPreferredInputDeviceForCapturerInfoSyncTest002", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredInputDeviceForCapturerInfo(); + let data = routingManager.getPreferredInputDeviceForCapturerInfoSync(); console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest002 SUCCESS`+JSON.stringify(data)); expect().assertFail(); done(); } catch(e) { console.error(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest002 ERROR: ${e.message}`); - expect(e.code).assertFail(ERROR_INPUT_INVALID); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }) @@ -1039,13 +1039,13 @@ describe("AudioRoutingManagerJsTest", function () { it("getPreferredInputDeviceForCapturerInfoSyncTest003", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredInputDeviceForCapturerInfo("Invalid type"); + let data = routingManager.getPreferredInputDeviceForCapturerInfoSync("Invalid type"); console.info(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest003 SUCCESS`+JSON.stringify(data)); expect().assertFail(); done(); } catch(e) { console.error(`${TAG} getPreferredInputDeviceForCapturerInfoSyncTest003 ERROR: ${e.message}`); - expect(e.code).assertFail(ERROR_INPUT_INVALID); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }) @@ -1064,7 +1064,7 @@ describe("AudioRoutingManagerJsTest", function () { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync(rendererInfo); + let data = routingManager.getPreferredOutputDeviceForRendererInfoSync(rendererInfo); console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest001 SUCCESS`+JSON.stringify(data)); expect(true).assertTrue(); done(); @@ -1084,13 +1084,13 @@ describe("AudioRoutingManagerJsTest", function () { it("getPreferredOutputDeviceForRendererInfoSyncTest002", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync(); + let data = routingManager.getPreferredOutputDeviceForRendererInfoSync(); console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest002 SUCCESS`+JSON.stringify(data)); expect().assertFail(); done(); } catch(e) { console.error(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest002 ERROR: ${e.message}`); - expect(e.code).assertFail(ERROR_INPUT_INVALID); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }) @@ -1104,13 +1104,13 @@ describe("AudioRoutingManagerJsTest", function () { it("getPreferredOutputDeviceForRendererInfoSyncTest003", 0, async function (done) { try { let routingManager = audio.getAudioManager().getRoutingManager(); - let data = await routingManager.getPreferredOutputDeviceForRendererInfoSync("Invalid type"); + let data = routingManager.getPreferredOutputDeviceForRendererInfoSync("Invalid type"); console.info(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest003 SUCCESS`+JSON.stringify(data)); expect().assertFail(); done(); } catch(e) { console.error(`${TAG} getPreferredOutputDeviceForRendererInfoSyncTest003 ERROR: ${e.message}`); - expect(e.code).assertFail(ERROR_INPUT_INVALID); + expect(e.code).assertEqual(ERROR_INPUT_INVALID); done(); } }) diff --git a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/BUILD.gn b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/BUILD.gn index 82f5450305..06cce67f23 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/BUILD.gn +++ b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/BUILD.gn @@ -13,7 +13,7 @@ import("//build/test.gni") -module_output_path = "multimedia_audio_framework/audio_routing_manager" +module_output_path = "multimedia_audio_framework/audio_routing_manager_js" ohos_js_unittest("AudioRoutingManagerJsTest") { module_out_path = module_output_path diff --git a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json index 4cdfd29182..9f91433894 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.routingmanagertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.routingmanagertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.routingmanagertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json index 4cdfd29182..f4749d171a 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.streammanagertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.streammanagertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.streammanagertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json index f9ecd195bb..6fd905a210 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.volumemanagertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.volumemanagertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.volumemanagertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncCommonTypeUnitTest.js b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncCommonTypeUnitTest.js deleted file mode 100644 index 1b230c26e5..0000000000 --- a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncCommonTypeUnitTest.js +++ /dev/null @@ -1,1655 +0,0 @@ -/* - * Copyright (c) 2023 Huawei Device Co., Ltd. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import audio from '@ohos.multimedia.audio'; - -describe("AudioRendererInterruptSyncUnitTest", function() { - beforeAll(async function () { - // input testsuit setup step, setup invoked before all testcases - console.info('beforeAll called') - }) - - afterAll(function () { - - // input testsuit teardown step, teardown invoked after all testcases - console.info('afterAll called') - }) - - beforeEach(function () { - - // input testcase setup step, setup invoked before each testcases - console.info('beforeEach called') - }) - - afterEach(function () { - - // input testcase teardown step, teardown invoked after each testcases - console.info('afterEach called') - }) - - let renderInfo = { - 'MUSIC': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0, - }, - 'VOICE_CALL': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, - rendererFlags: 0 - }, - 'RINGTONE': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE, - rendererFlags: 0, - }, - 'VOICE_ASSISTANT': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_VOICE_ASSISTANT, - rendererFlags: 0 - }, - 'ULTRASONIC': { - content: audio.ContentType.CONTENT_TYPE_ULTRASONIC, - usage: audio.StreamUsage.STREAM_USAGE_SYSTEM, - rendererFlags: 0 - }, - 'ALARM': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_ALARM, - rendererFlags: 0 - }, - 'ACCESSIBILITY': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_ACCESSIBILITY, - rendererFlags: 0 - }, - 'SPEECH': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0 - }, - 'MOVIE': { - content: audio.ContentType.CONTENT_TYPE_MOVIE, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0 - }, - 'UNKNOW': { - content: audio.ContentType.CONTENT_TYPE_UNKNOWN, - usage: audio.StreamUsage.STREAM_USAGE_UNKNOWN, - rendererFlags: 0 - }, - } - - let streamInfo = { - '44100': { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, - channels: audio.AudioChannel.CHANNEL_2, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW - }, - '48000' : { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, - channels: audio.AudioChannel.CHANNEL_2, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW - }, - } - - async function createAudioRenderer(AudioRendererInfo, AudioStreamInfo, done) { - let render = null; - - var AudioRendererOptions = { - streamInfo: AudioStreamInfo, - rendererInfo: AudioRendererInfo - } - try { - render = await audio.createAudioRenderer(AudioRendererOptions) - console.log(" createAudioRenderer success.") - } catch (err) { - console.log(" createAudioRenderer err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - return render - } - - async function start(render,done) { - try { - await render.start() - console.log(" start success.") - } catch (err) { - await release(render,done) - console.log(" start err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - } - - - async function startFail(render,done,render1) { - try { - await render.start() - console.log(" start success.") - } catch (err) { - console.log(" start err:" + JSON.stringify(err)) - await release(render,done) - await release(render1,done) - expect(true).assertEqual(true) - done() - } - } - - - async function stop(render,done) { - try { - await render.stop() - console.log(" stop success.") - } catch (err) { - console.log(" stop err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - await release(render,done) - done() - } - } - - async function release(render,done) { - if (render.state == audio.AudioState.STATE_RELEASED) { - console.log(" release render state: " + render.state) - return - } - try { - await render.release() - console.log(" release success.") - } catch (err) { - console.log(" release err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - } - - function sleep(ms) { - return new Promise(resolve => setTimeout(resolve, ms)); - } - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_001', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("1.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_002', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async (eventAction) => { - console.log("2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_003', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("3.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_004', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("4.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_005', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("5.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("5_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_006', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("6.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_007', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("7.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_008', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("8.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_009', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("9.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_010', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("10.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // VOICE_CALL - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_011', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("11-2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_012', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("12_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_013', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("13_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_014', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("14_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_015', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("15.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("15_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_016', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("16_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_017', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("17.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_018', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("18_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_019', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("19_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_020', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("20_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // RINGTONE - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_021', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("21_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_022', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("22.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_023', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("23_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_024', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("24_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_025', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("25.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("25_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_026', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("26_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await startFail(render2,done,render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_027', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("27.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_028', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("28_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_029', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("29_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_030', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("30_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // VOICE_ASSISTANT - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_031', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("31.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_032', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("32.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_033', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("33.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_034', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("34.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_035', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("35.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("35_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_036', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("36.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_037', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("37.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_038', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("38.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_039', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("39.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_040', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("40.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // ULTRASONIC - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_041', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("41.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("41_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_042', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("42.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("42_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_043', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("43.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("43_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_044', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("44.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("44_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_045', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("45_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_046', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("46.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("46_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_047', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("47.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("47_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_048', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("48.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("48_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_049', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("49.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("49_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_050', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("50.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("50_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - // ALARM - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_051', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("51.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_052', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("52.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_053', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("53.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_054', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("54.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_055', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("55.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("55_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_056', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("56.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_057', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("57.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_058', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("58.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_059', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("59.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_060', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("60.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // ACCESSIBILITY - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_061', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("61_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_062', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("62_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_063', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("63.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_064', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("64_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_065', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("65.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("65_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_066', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("66_2.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_067', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("67.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_068', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("68_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_069', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("69_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_070', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("70_2.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await startFail(render2, done, render1) - }) -}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js deleted file mode 100644 index a0df0a239d..0000000000 --- a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncRareTypeUnitTest.js +++ /dev/null @@ -1,912 +0,0 @@ -/* - * Copyright (c) 2023 Huawei Device Co., Ltd. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import audio from '@ohos.multimedia.audio'; - -describe("AudioRendererInterruptSyncRareTypeUnitTest", function() { - beforeAll(async function () { - // input testsuit setup step, setup invoked before all testcases - console.info('beforeAll called') - }) - - afterAll(function () { - - // input testsuit teardown step, teardown invoked after all testcases - console.info('afterAll called') - }) - - beforeEach(function () { - - // input testcase setup step, setup invoked before each testcases - console.info('beforeEach called') - }) - - afterEach(function () { - - // input testcase teardown step, teardown invoked after each testcases - console.info('afterEach called') - }) - - let renderInfo = { - 'MUSIC': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0, - }, - 'VOICE_CALL': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, - rendererFlags: 0 - }, - 'RINGTONE': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE, - rendererFlags: 0, - }, - 'VOICE_ASSISTANT': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_VOICE_ASSISTANT, - rendererFlags: 0 - }, - 'ULTRASONIC': { - content: audio.ContentType.CONTENT_TYPE_ULTRASONIC, - usage: audio.StreamUsage.STREAM_USAGE_SYSTEM, - rendererFlags: 0 - }, - 'ALARM': { - content: audio.ContentType.CONTENT_TYPE_MUSIC, - usage: audio.StreamUsage.STREAM_USAGE_ALARM, - rendererFlags: 0 - }, - 'ACCESSIBILITY': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_ACCESSIBILITY, - rendererFlags: 0 - }, - 'SPEECH': { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0 - }, - 'MOVIE': { - content: audio.ContentType.CONTENT_TYPE_MOVIE, - usage: audio.StreamUsage.STREAM_USAGE_MEDIA, - rendererFlags: 0 - }, - 'UNKNOW': { - content: audio.ContentType.CONTENT_TYPE_UNKNOWN, - usage: audio.StreamUsage.STREAM_USAGE_UNKNOWN, - rendererFlags: 0 - }, - } - - let streamInfo = { - '44100': { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, - channels: audio.AudioChannel.CHANNEL_2, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW - }, - '48000' : { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, - channels: audio.AudioChannel.CHANNEL_2, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW - }, - } - - async function createAudioRenderer(AudioRendererInfo, AudioStreamInfo, done) { - let render = null; - - var AudioRendererOptions = { - streamInfo: AudioStreamInfo, - rendererInfo: AudioRendererInfo - } - try { - render = await audio.createAudioRenderer(AudioRendererOptions) - console.log(" createAudioRenderer success.") - } catch (err) { - console.log(" createAudioRenderer err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - return render - } - - async function start(render,done) { - try { - render.start() - console.log(" start success.") - } catch (err) { - await release(render,done) - console.log(" start err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - } - - - async function startFail(render,done,render1) { - try { - render.start() - console.log(" start success.") - } catch (err) { - console.log(" start err:" + JSON.stringify(err)) - await release(render,done) - await release(render1,done) - expect(true).assertEqual(true) - done() - } - } - - - async function stop(render,done) { - try { - render.stop() - console.log(" stop success.") - } catch (err) { - console.log(" stop err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - await release(render,done) - done() - } - } - - async function release(render,done) { - if (render.state == audio.AudioState.STATE_RELEASED) { - console.log(" release render state: " + render.state) - return - } - try { - render.release() - console.log(" release success.") - } catch (err) { - console.log(" release err:" + JSON.stringify(err)) - expect(false).assertEqual(true) - done() - } - } - - function sleep(ms) { - return new Promise(resolve => setTimeout(resolve, ms)); - } - - // SPEECH - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_071', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("71.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_072', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async (eventAction) => { - console.log("72.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_073', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("73.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_074', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("74.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_075', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("75.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("75_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_076', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("76.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_077', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("77.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_078', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("78.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_079', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("79.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_080', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("80.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // MOVIE - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_081', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("81.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_082', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async (eventAction) => { - console.log("82.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_083', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("83.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_084', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("84.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_085', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("85.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("85_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_086', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("86.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_087', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("87.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_088', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("88.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_089', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("89.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_090', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("90.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // UNKNOW - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_091', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("91.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_092', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async (eventAction) => { - console.log("92.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_CALL'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_093', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("93.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['RINGTONE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_094', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("94.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['VOICE_ASSISTANT'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_095', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("95.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ULTRASONIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("95_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_096', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("96.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_DUCK) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_UNDUCK) - } else { - } - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ALARM'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_097', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("97.eventAction=" + JSON.stringify(eventAction)) - if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_BEGIN) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_PAUSE) - } else if (eventAction.eventType == audio.InterruptType.INTERRUPT_TYPE_END) { - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_RESUME) - } else {} - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['ACCESSIBILITY'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render2, done) - await sleep(500) - await release(render1, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_098', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("98.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['SPEECH'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_099', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("99.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MOVIE'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_100', 0, async function (done) { - let render1 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt",async (eventAction) => { - console.log("100.eventAction=" + JSON.stringify(eventAction)) - expect(eventAction.hintType).assertEqual(audio.InterruptHint.INTERRUPT_HINT_STOP) - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['UNKNOW'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - await start(render2, done) - await sleep(500) - await release(render1, done) - await release(render2, done) - done() - }) - - // 两个stream同时为share mode - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_101', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("101.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("101_2.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == false && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - // 第一个为share mode, 第二个为Independe mode - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_102', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.SHARE_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("102.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("102_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback == true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == true && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - - // 第一个为independ mode, 第二个为share mode - it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_103', 0, async function (done) { - let render1_callback = false - let render2_callback = false - let render1 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['44100']) - render1.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render1.on("audioInterrupt", async(eventAction) => { - console.log("103.eventAction=" + JSON.stringify(eventAction)) - render1_callback = true - }) - await start(render1, done) - - let render2 = await createAudioRenderer(renderInfo['MUSIC'], streamInfo['48000']) - render2.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE) - render2.on("audioInterrupt", async(eventAction) => { - console.log("103_2.eventAction=" + JSON.stringify(eventAction)) - render2_callback = true - }) - await start(render2, done) - await sleep(500) - console.log("render1_callback = " + render1_callback + ", render2_callback = " + render2_callback) - expect(render1_callback == true && render2_callback == false).assertTrue() - await sleep(100) - await release(render1, done) - await release(render2, done) - done() - }) - -}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncUnitTest.js b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncUnitTest.js new file mode 100644 index 0000000000..d98060dee1 --- /dev/null +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_interrupt_test/AudioRendererInterruptSyncUnitTest.js @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import audio from '@ohos.multimedia.audio'; + +describe("AudioRendererInterruptSyncUnitTest", function() { + const ERROR_INPUT_INVALID = '401'; + const ERROR_INVALID_PARAM = '6800101'; + let audioStreamInfo = { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, + channels: audio.AudioChannel.CHANNEL_1, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + } + let audioRendererInfo = { + content: audio.ContentType.CONTENT_TYPE_MUSIC, + usage: audio.StreamUsage.STREAM_USAGE_MEDIA, + rendererFlags: 0 + } + let audioRendererOptions = { + streamInfo: audioStreamInfo, + rendererInfo: audioRendererInfo + } + let audioRenderer; + + beforeAll(async function () { + // input testsuit setup step, setup invoked before all testcases + try { + audioRenderer = audio.createAudioRendererSync(audioRendererOptions); + console.info(`${TAG}: AudioRenderer created SUCCESS, state: ${audioRenderer.state}`); + } catch (err) { + console.error(`${TAG}: AudioRenderer created ERROR: ${err.message}`); + } + console.info(TAG + 'beforeAll called') + }) + + afterAll(function () { + + // input testsuit teardown step, teardown invoked after all testcases + audioRenderer.release().then(() => { + console.info(`${TAG}: AudioRenderer release : SUCCESS`); + }).catch((err) => { + console.info(`${TAG}: AudioRenderer release :ERROR : ${err.message}`); + }); + console.info(TAG + 'afterAll called') + }) + + beforeEach(function () { + + // input testcase setup step, setup invoked before each testcases + console.info('beforeEach called') + }) + + afterEach(function () { + + // input testcase teardown step, teardown invoked after each testcases + console.info('afterEach called') + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_001 + * @tc.desc:setInterruptModeSync success + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_001', 0, async function (done) { + try { + audioRenderer.setInterruptModeSync(audio.InterruptMode.INDEPENDENT_MODE); + console.info(`setInterruptModeSync success`); + expect(true).assertTrue(); + } catch (err) { + console.error(`setInterruptModeSync error: ${err}`); + expect(false).assertTrue(); + } + done() + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_002 + * @tc.desc:setInterruptModeSync fail(401) - Invalid param count : 0 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_002', 0, async function (done) { + try { + audioRenderer.setInterruptModeSync(); + console.info(`setInterruptModeSync success`); + expect(false).assertTrue(); + } catch (err) { + console.error(`setInterruptModeSync error: ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + } + done() + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_003 + * @tc.desc:setInterruptModeSync fail(401) - Invalid param type : "Invalid type" + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_003', 0, async function (done) { + try { + audioRenderer.setInterruptModeSync("Invalid type"); + console.info(`setInterruptModeSync success`); + expect(false).assertTrue(); + } catch (err) { + console.error(`setInterruptModeSync error: ${err}`); + expect(err.code).assertEqual(ERROR_INPUT_INVALID); + } + done() + }) + + /* + * @tc.name:SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_004 + * @tc.desc:setInterruptModeSync fail(6800101) - Invalid param value : 100 + * @tc.type: FUNC + * @tc.require: I7V04L + */ + it('SUB_AUDIO_RENDERER_INTERRUPT_SYNC_TEST_004', 0, async function (done) { + try { + audioRenderer.setInterruptModeSync(100); + console.info(`setInterruptModeSync success`); + expect(false).assertTrue(); + } catch (err) { + console.error(`setInterruptModeSync error: ${err}`); + expect(err.code).assertEqual(ERROR_INVALID_PARAM); + } + done() + }) +}) diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json index f9ecd195bb..62d56e5beb 100644 --- a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.audiorenderertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.audiorenderertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.audiorenderertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json b/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json index f9ecd195bb..001e7c7706 100644 --- a/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json +++ b/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.example.myapplication", + "bundleName": "com.ohos.toneplayertest", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.example.myapplication", + "package": "com.ohos.toneplayertest", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.example.myapplication.MainAbility", + "name": "com.ohos.toneplayertest.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", -- Gitee From 171e2d4bb013e59f7ee4692963d3b9ff3471bfaf Mon Sep 17 00:00:00 2001 From: huyue57 Date: Tue, 29 Aug 2023 16:57:54 +0800 Subject: [PATCH 4/4] add sync api ut Signed-off-by: huyue57 Change-Id: I1f3a3dcef28b2f7459bf37f4d4336bc76cb0de21 --- .../test/unittest/audio_capturer_test/config.json | 6 +++--- .../test/unittest/audio_manager_test/config.json | 6 +++--- .../test/unittest/group_manager_test/config.json | 6 +++--- .../test/unittest/routing_manager_test/config.json | 6 +++--- .../test/unittest/stream_manager_test/config.json | 6 +++--- .../test/unittest/volume_manager_test/config.json | 6 +++--- .../test/unittest/audio_renderer_test/config.json | 6 +++--- .../toneplayer/test/unittest/tone_player_test/config.json | 6 +++--- 8 files changed, 24 insertions(+), 24 deletions(-) diff --git a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json index 7e8aa3788d..f9ecd195bb 100644 --- a/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json +++ b/frameworks/js/napi/audio_capturer/test/unittest/audio_capturer_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.audiocapturertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.audiocapturertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.audiocapturertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json index f9c547dab9..f9ecd195bb 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/audio_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.audiomanagertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.audiomanagertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.audiomanagertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json index 693c41dac5..f9ecd195bb 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/group_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.groupmanagertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.groupmanagertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.groupmanagertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json index 9f91433894..4cdfd29182 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/routing_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.routingmanagertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.routingmanagertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.routingmanagertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json index f4749d171a..4cdfd29182 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/stream_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.streammanagertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.streammanagertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.streammanagertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json index 6fd905a210..f9ecd195bb 100644 --- a/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json +++ b/frameworks/js/napi/audio_manager/test/unittest/volume_manager_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.volumemanagertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.volumemanagertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.volumemanagertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json index 62d56e5beb..f9ecd195bb 100644 --- a/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json +++ b/frameworks/js/napi/audio_renderer/test/unittest/audio_renderer_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.audiorenderertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.audiorenderertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.audiorenderertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", diff --git a/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json b/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json index 001e7c7706..f9ecd195bb 100644 --- a/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json +++ b/frameworks/js/napi/toneplayer/test/unittest/tone_player_test/config.json @@ -1,6 +1,6 @@ { "app": { - "bundleName": "com.ohos.toneplayertest", + "bundleName": "com.example.myapplication", "vendor": "example", "version": { "code": 1, @@ -13,7 +13,7 @@ }, "deviceConfig": {}, "module": { - "package": "com.ohos.toneplayertest", + "package": "com.example.myapplication", "name": ".MyApplication", "deviceType": [ "phone", @@ -37,7 +37,7 @@ ] } ], - "name": "com.ohos.toneplayertest.MainAbility", + "name": "com.example.myapplication.MainAbility", "icon": "$media:icon", "description": "$string:mainability_description", "label": "MyApplication", -- Gitee