2
0

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 1 year has passed since last update.

はじめての Zoom Video SDK for Web (モバイル ブラウザ) - 実装編

Posted at

■ 前置き

本編はブラウザ向けZoom VideoSDKの実装に関する続編になります。
トークン生成を経て通話が可能になりますので、サーバサイドでのトークン生成に必要なKey/SecretをMarketplaceから事前に取得しておく必要があります。はじめての Zoom Video SDK - 準備編
前回までのデスクトップPC向けブラウザの実装についてはこちらを参照ください。はじめての Zoom Video SDK for Web - 実装編

■ サーバ・サイドの実装について

以下Node.jsの環境で解説していきます。

1. まずは、サーバ側の準備を始めていきます。ここではポート「3001」を指定しています。
SharedArrayBufferが無効になりよう明示的にサーバサイドで含めています。

index.js
const express = require('express')
const bodyParser = require('body-parser')
const fs = require('fs')
const cors = require('cors')
const KJUR = require('jsrsasign')

const app = express()
const port = 3001
const path = require('path')
require('dotenv').config({ path: path.join(__dirname, '.env') })

app.use(function(req, res, next) {
  //res.header("Cross-Origin-Embedder-Policy", "require-corp")
  //res.header("Cross-Origin-Opener-Policy", "same-origin")
  res.header("Cross-Origin-Embedder-Policy", false)
  res.header("Cross-Origin-Opener-Policy", false)
  next()
})
app.use(express.static(path.join(__dirname, 'public')))
app.use(bodyParser.json(), cors())

app.post('/', (req, res) => {

  const iat = Math.floor(new Date().getTime() / 1000)
  const exp = iat + 60 * 60 * 2

  const oHeader = { alg: 'HS256', typ: 'JWT' }

  const oPayload = {
    app_key: process.env.ZOOM_VSDK_KEY,
    tpc: req.body.topic,
    role_type: req.body.role,
    pwd: req.body.password,
    iat: iat,
    exp: exp,
  }

  const sHeader = JSON.stringify(oHeader)
  const sPayload = JSON.stringify(oPayload)
  const signature = KJUR.jws.JWS.sign('HS256', sHeader, sPayload, process.env.ZOOM_VSDK_SECRET)

  res.json({
    signature: signature
  })
})

4. 最後に起動できるようにします。

app.listen(port, () => console.log(`Zoom Video SDK for Web Sample. port: ${port}!`))

5. サーバ側で使用する.envファイルは下記のように記載して同じディレクトリに保存しておきます。

.env
ZOOM_VSDK_KEY=Marketpalceで発行したSDK Key
ZOOM_VSDK_SECRET=Marketpalceで発行したSDK Secret

■ クライアント・サイドの実装について

iOS/Android向けのブラウザを使用した実装を前提に最低限のUIを用意していきます。 ここでのポイントは、受信映像用に「canvas」エレメントを使用することと、送信映像(自画像)表示用としてiOS向けに「canvas」エレメント、Android向けに「video」エレメントを分けて準備するところになります。
/public/index.html
<html>
  <head>
    <meta charset="utf-8">
    <title>VideoSDK Sample CDN</title>
      <script src="https://source.zoom.us/videosdk/zoom-video-1.5.5.min.js"></script>
    <style>
    .video-canvas {
      background: rgba(0, 0, 0, 1);
      margin: 1px;
      border-radius: 10px;
      border: 1px solid rgba(0, 0, 0, 1);
    }
    </style>
  </head>
  <body>
<p>
  Display Name: <input type="text" id="user_name" maxLength="20" placeholder="Name" value="User01" required>
  Session Name: <input type="text" id="session_topic" maxLength="200" style="width:150px" placeholder="Session Topic" value="vsdkmeeting" required>
  Session Password: <input type="text" id="session_pwd" style="width:150px" maxLength="32" placeholder="Session Password" value="123456">
  <form id="join-role">
    <input type="radio" name="joinRole" value="0" checked> Participant
    <input type="radio" name="joinRole" value="1"> Host
  </form>
</p>
<p>
  Session:
  <button id="join-button">Join/Start</button>
</p>

<hr/>

<p>
  Leave/End:
  <button id="leave-button">Leave/End</button>
</p>
<p>
  Audio:
  <button id="audio-start-button">Audio Start</button>

  <button id="audio-stop-button">Audio Stop</button>

  Microphone:
  <button id="mic-button">Mute/UnMute</button>
</p>
<p>
  Camera:
  <button id="camera-button">Start/Stop</button>

  Switch Camera:
  <button id="camera-switch-button">Switch</button>
</p>
<p>
  EnableQOS:
  <button id="qos-button">Enable/Disable QOS</button>
</p>

<hr/>

<p>Video:</p>
<p>
   <canvas id="far-canvas-element" class="video-canvas" width="640" height="360" style="background-color:black; height:360px; width:640px;"></canvas>
</p>
<p>
   <canvas id="self-canvas-element" class="video-canvas" width="160" height="90" style="background-color:black; height:90px; width:160px;"></canvas>
</p>
<p>
   <video id="self-video-element" class="video-canvas" width="160" height="90" style="background-color:black; height:90px; width:160px;" playsinline="" muted="" data-video="0"></video>
</p>

<p>ScreenShare:</p>
<p>
   <canvas id="far-screenshare-canvas" class="video-canvas" width="640" height="360" style="background-color:black; height:360px; width:640px;"></canvas>
</p>

  <script src="./js/index.js"></script>

</body>
</html>

2.続いて各種実装になります。まずは音声処理について明示しておきます。

//AUDIO START
async function audioStart() {
  let agent = navigator.userAgent;
  switch(true){
    case agent.includes("iPhone") || agent.includes("iPad") || agent.includes("Android"):
     try{
      await stream.startAudio()
      console.log("audioStart")
     } catch (e){
      console.log(e)
     }
    break
  }
}

//AUDIO STOP
async function audioStop() {
  let agent = navigator.userAgent;
  switch(true){
    case agent.includes("iPhone") || agent.includes("iPad") || agent.includes("Android"):
    try{
        await stream.stopAudio()
     } catch (e) {
        console.log(e)
     }
     console.log("audioStop")
    break
  }
}

//MIC MUTE UNMUTE
function micMuteUnmute() {
  if(!stream.isAudioMuted()){
    stream.muteAudio()
  }else{
    stream.unmuteAudio()
  }
  console.log("isAudioMuted: " + stream.isAudioMuted())
}

3.カメラボタンをクリックした際に送信映像(自画像)の開始、停止処理について明示しておきます。
ここでは、iOSまたはAndroidを判別して各々のエレメントでレンダリングするように分岐させる必要があります。

//LOCAL CAMERA START STOP
async function cameraStartStop() {

  let isVideoOn = await stream.isCapturingVideo()
  console.log("cameraStartStop isCapturingVideo: " + isVideoOn)

  let agent = navigator.userAgent;
  let selfId = client.getCurrentUserInfo().userId;

  switch(true){
    case agent.includes("iPhone") || agent.includes("iPad"):
    if(!isVideoOn){
      await stream.startVideo({mirrored: false})
      toggleSelfVideo(stream,selfId,!isVideoOn)
    } else {
      await stream.stopVideo()
      toggleSelfVideo(stream,selfId,!isVideoOn)
    }
    break
    case agent.includes("Android"):
    let localVideoTrack = ZoomVideo.createLocalVideoTrack()
    if(!isVideoOn){
      let selfVideo = document.getElementById('self-video-element')
      await localVideoTrack.start(selfVideo)
      await stream.startVideo({videoElement: selfVideo})
    }else{
      await localVideoTrack.stop()
      await stream.stopVideo()
    }
    break
  }
}

.....

//TOGGLE NEAR END VIDEO ON CANVAS
const toggleSelfVideo = async (mediaStream, userId, isVideoOn) => {
    let SELF_VIDEO_CANVAS = document.getElementById('self-canvas-element')
    if (isVideoOn) {
        console.log("toggleSelfVideo start")
        await mediaStream.renderVideo(
            SELF_VIDEO_CANVAS,
            userId,
            160,   // Size Width
            90,    // Size Height
            0,     // Starting point x (Vertical)
            0,     // Starting point y (Horizon)
            0      // Video Quality 0:90p, 1:180p, 2:360p, 3:720p
        )
    } else {
        console.log("toggleSelfVideo stop")
        await mediaStream.stopVideo()
        await mediaStream.stopRenderVideo(SELF_VIDEO_CANVAS, userId)
        await mediaStream.clearVideoCanvas(SELF_VIDEO_CANVAS)
    }
}

4.スマホの前後カメラ切り替えも続けて実装に含めておきます。

//SWITCH CAMERA
async function cameraSwitch() {

  let agent = navigator.userAgent;

  switch(true){
    case agent.includes("iPhone") || agent.includes("iPad") || agent.includes("Android"):
    let activeCameraId = await stream.getActiveCamera()
    console.log("Current camera source: " + activeCameraId)

    try{
        if(activeCameraId == "user"){
          await stream.switchCamera("environment");
        }else{
          await stream.switchCamera("user");
        }
    } catch (e){
        console.log(e);
    }
    break
    default:
  }

}

5.受信映像に関する実装を含めていきます。ここでは最初の1拠点を表示させるところまでになっています。

  //LISETN TO FAREND VIDEO STATUS
  client.on('peer-video-state-change', (payload) => {
   console.log("peer-video-state-change: " + JSON.stringify(payload))
   if (payload.action === 'Start') {
     if(videoDecode){
       toggleFarVideo(stream, payload.userId, true)
     }else{
       console.log("wait untill videoDecode gets enabled")
       waitForVideoDecoder(500, payload.userId)
     }

   } else if (payload.action === 'Stop') {
     toggleFarVideo(stream, payload.userId, false)
   }
 })

.....

//WAIT FOR VIDEO DECODER
async function waitForVideoDecoder(ms, userid){
let len = 10
 for (let i = 0; i < len; i++) {
  await sleep(ms)
  console.log("waiting for video decoder: " + i)
   if(videoDecode){
     toggleFarVideo(stream, userid, true)
     break
   }
 }
}

//SLEEP(WAIT)
function sleep(ms) {
  return new Promise((resolve) => {
    setTimeout(resolve, ms)
  })
}

.....

//TOGGLE FAR END VIDEO ON CANVAS
const toggleFarVideo = async (mediaStream, userId, isVideoOn) => {
    var FAR_VIDEO_CANVAS = document.getElementById('far-canvas-element')
    if (isVideoOn) {
        console.log("toggleFarVideo start")
        await mediaStream.renderVideo(
            FAR_VIDEO_CANVAS,
            userId,
            640,  // Size Width
            360,  // Size Height
            0,    // Starting point x (Vertical)
            0,    // Starting point y (Horizon)
            2     // Video Quality 0:90p, 1:180p, 2:360p, 3:720p
        )
    } else {
        console.log("toggleFarVideo stop")
        await mediaStream.stopRenderVideo(FAR_VIDEO_CANVAS, userId)
        await mediaStream.clearVideoCanvas(FAR_VIDEO_CANVAS)
    }
}

6.続いてPCなど相手側が画面共有に対応していた場合に表示ができるように実装を含めておきます。

  //LISETEN FAREND SCREENSHARE STATUS
  client.on('active-share-change', (payload) => {
      console.log(`ScreenShare: active-share-change`, payload)
      console.log("ScreenShare active-share-change state: " + payload.state)
      if(payload.state == "Active"){
        console.log("state: " + payload.state)
        console.log("userId: " + payload.userId)
        if(shareDecode){
          let shareCanvas = document.getElementById('far-screenshare-canvas')
          stream.startShareView(shareCanvas, payload.userId)
        }else{
          console.log("wait untill shareDecode gets enabled")
          waitForShareDecoder(500, payload.userId)
        }
      }else if (payload.state == "Inactive"){
        stream.stopShareView()
        let shareCanvas = document.getElementById('far-screenshare-canvas')
        stream.clearVideoCanvas(shareCanvas)
      }
  })

.....

//WAIT FOR SCREENSHARE DECODER
async function waitForShareDecoder(ms, userid){
let len = 10
 for (let i = 0; i < len; i++) {
  await sleep(ms)
  console.log("Trying to wait for share decoder: " + i)
   if(shareDecode){
     let shareCanvas = document.getElementById('screenshare-canvas')
     stream.startShareView(shareCanvas, userid)
     break
   }
 }
}

//SLEEP(WAIT)
function sleep(ms) {
  return new Promise((resolve) => {
    setTimeout(resolve, ms)
  })
}

7.最後に各種ボタンにリスナーで紐付けしていきます。ここでブラウザの種類によって不要なエレメントを非表示にするように含めています。

document.addEventListener("DOMContentLoaded", function() {
  document.getElementById('user_name').value = "User" + Math.floor(Math.random() * 100)
  document.getElementById('join-button').addEventListener('click', joinSession)
  document.getElementById('leave-button').addEventListener('click', leaveSession)
  document.getElementById('audio-start-button').addEventListener('click', audioStart)
  document.getElementById('audio-stop-button').addEventListener('click', audioStop)
  document.getElementById('mic-button').addEventListener('click', micMuteUnmute)
  document.getElementById('camera-button').addEventListener('click', cameraStartStop)
  document.getElementById('camera-switch-button').addEventListener('click', cameraSwitch)
  document.getElementById('qos-button').addEventListener('click', enableQOS)

  let agent = navigator.userAgent;

  switch(true){
    case agent.includes("iPhone") || agent.includes("iPad"):
     document.getElementById("self-video-element").style.display = "none"
    break
    case agent.includes("Android"):
     document.getElementById("self-canvas-element").style.display = "none"
    break
  }

  console.log('DOMContentLoaded')
})

■ サンプル・ドキュメント類について

今回のサンプル:

ドキュメント類 (オフィシャル・ドキュメント):

2
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
2
0

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?