1. Requirement: JS calls the microphone to do real-time (should be) intercom

Js to obtain microphone permission, the browser must support AudioContext.Copy the code

AudioContext is fully supported from chrome35 onwards.

Before you can start speaking, you need the user to give the browser access to the microphone. That is:

if (window.navigator.mediaDevices) {
      window.navigator.mediaDevices
      // Obtain the browser microphone permission
        .getUserMedia({ 'audio': true })
      // The user agrees to grant microphone permissions
        .then(this.initRecordMicro)
      // The user denied the microphone permission, or the current browser does not support it
        .catch(e= > {
          switch (e.message || e.name) {
            case 'PERMISSION_DENIED':
            case 'PermissionDeniedError':
              this.$message.error('User refuses to provide permission')
              break
            case 'NOT_SUPPORTED_ERROR':
            case 'NotSupportedError':
              this.$message.error('Browser does not support your currently selected device')
              break
            case 'MANDATORY_UNSATISFIED_ERROR':
            case 'MandatoryUnsatisfiedError':
              this.$message.error('The specified hardware device cannot be found')
              break
            default:
              this.$message.error('Unable to open microphone, cause:${e.code || e.name}`)}}}else {
      this.$message.error('Your current browser or protocol does not currently support microphones')}Copy the code

If the user says yes, then it’s done. If not, it’s done.

Then start fetching the “stream” in initRecordMicro, and the.then callback will pass in the stream data returned by the microphone. then

    this.ctxAudio = new window.AudioContext()
    this.sourceAudio = this.ctxAudio.createMediaStreamSource(this.streamAudio)
Copy the code

2. The complete code is as follows:

    <div class="intercomMicroVol">
        <div class="intercomMicroVolCtx" />
    </div>
    <div class="microPhone"
       @mousedown.prevent="microPhoneMousedown"
       @mouseup.prevent="microPhoneMouseup"
    >
       <el-icon />
    </div>

Copy the code

Use vue+TS instead of vue+TS. , long press to start, release to end. So @mousedown.prevent and @mouseup.prevent are used

export default class extends Vue{
    private streamAudio: any 
    private ctxAudio:any
    private sourceAudio:any
    private maxVol=0
    private scriptProcessor:any
    private ws:any
    
    
    // Make a sound size column
    @Watch('maxVol')
    private getVolStyle(val:any) {
        const dom = document.querySelector('.intercomMicroVolCtx') as HTMLElement
        if (val > 0) {
            dom.style.height = `${val * 2.6 + 10}px`
        } else {
            dom.style.height = '0'
        }
    }
    
    private mounted() {
        // Add an event to handle F5 refresh during intercom. You have to add...
        window.addEventListener('beforeunload'.(e) = > this.beforeunloadHandler(e))
    }
    private destroyed() {
        this.intercomMouseup()
        window.removeEventListener('beforeunload'.(e) = > this.beforeunloadHandler(e))
    }
    
    private beforeunloadHandler(e: any) {
        this.intercomMouseup()
    }
    
    private intercomMousedown() {
        // It is used to determine whether to click continuously within 1 second, process and intercept
      if (this.last && nowTime - this.last < 1000) {
      // It is used to determine whether a prompt has been generated
          if (document.querySelectorAll('.el-message').length === 0) {
            this.$message.warning('Click too fast, please click again later.')}}else{
          this.ws = new WebSocket('ws://ip:port')
          this.ws.onopen = (e:any) = > {
              console.log('Connection established', e)
              // Specify what to pass with the back end
              this.ws.send('someID')
              this.startRecord()
          }
          this.ws.onerror = (e:any) = > {
            console.log(e)
          }
       }
    
        
  }

  private intercomMouseup() {
    this.stopRecord()
  }
      
  // Stop intercom when the user mouse over the intercom button
  private intercomMouseleave() {
      if (this.ws || this.sourceAudio) {
          this.intercomMouseup()
      }
  }
      
  private startRecord() {
  // Do not use the full permissions above, because this is a shell layer operation. Shell layer before obtaining the judgment.
    if (window.navigator.mediaDevices) {
      window.navigator.mediaDevices
      // Obtain the browser microphone permission
        .getUserMedia({ 'audio': true })
      // The user agrees to grant microphone permissions
        .then(this.initRecordMicro)
      // The user denied the microphone permission, or the current browser does not support it
        .catch(e= > {
          this.$message.error('Failed to obtain microphone permission, cause:${e}`)})}else {
      this.$message.error('Your current browser or browser version does not currently support microphones')
    }
  }
  private stopRecord() {
      // Close all
    const tracks = this.streamAudio.getAudioTracks()
    for (let i = 0, len = tracks.length; i < len; i++) {
      tracks[i].stop()
    }
    // Close all audio links created in init
    this.sourceAudio.disconnect()
    this.scriptProcessor.disconnect()
    this.sourceAudio = null
    this.scriptProcessor = null
    this.maxVol = 0
    this.ws.close()
  }
    
  private initRecordMicro(stream:any) {
    this.streamAudio = stream
    this.ctxAudio = new window.AudioContext()
    this.sourceAudio = this.ctxAudio.createMediaStreamSource(this.streamAudio)
    // Use the AudioContext to get the volume of the audio in the microphone
    // 256, 512, 1024, 2048, 4096, 8192, 16384
    // By default, numbers with an integer power of 2 are supported. The larger the number, the more mature it is
    this.scriptProcessor = this.ctxAudio.createScriptProcessor(4096.1.1)
    this.sourceAudio.connect(this.scriptProcessor)
    this.scriptProcessor.connect(this.ctxAudio.destination)
    this.scriptProcessor.onaudioprocess = (audioProcessingEvent:any) = > {
      / / buffer
      // Only mono channels are processed
      const buffer = audioProcessingEvent.inputBuffer.getChannelData(0)

      let sum = 0
      let outputData:any = []
      for (let i = 0; i < buffer.length; i++) {
        sum += buffer[i] * buffer[i]
      }
      // This is just for the number, to show the sound of the column
      this.maxVol = Math.round(Math.sqrt(sum / buffer.length) * 100)

      // the sampleRate of the browser microphone is this.ctxaudio. SampleRate: 44100
      const inputSampleRate = this.ctxAudio.sampleRate
      // For docking with the stream, they need me to provide the sample rate of 8000, so they need to compress once
      outputData = this.compress(buffer, inputSampleRate, 8000)

      this.ws.send(outputData)
    }
  }
      
      private floatTo16BitPCM(bytes:any) {
        let offset = 0
        const dataLen = bytes.length
        // The default sampling rate is 16 bits, not 8 bits
        const buffer = new ArrayBuffer(dataLen * 2)
        const data = new DataView(buffer)

        for (let i = 0; i < bytes.length; i++, offset += 2) {
          // Make sure the sampling frame is between -1 and 1
          let s = Math.max(-1.Math.min(1, bytes[i]))
          // Map a 32-bit floating point to a 16-bit integer value
          // The 16-bit partition is 2^16=65536, ranging from -32768 to 32767
          [-32768, 32767] [-32768, 32767] [-32768, 32767
          // The third argument, true, means whether the small endian order is set to true
          data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF.true)}return data
      }
      private compress(data:any, inputSampleRate:number, outputSampleRate:number) {
        const rate = inputSampleRate / outputSampleRate
        const compression = Math.max(rate, 1)
        const length = Math.floor(data.length / rate)
        const result = new Float32Array(length)
        let index = 0
        let j = 0
        while (index < length) {
          / / integer
          let temp = Math.floor(j)
          result[index] = data[temp]
          index++
          j += compression
        }
        // Convert compressed data to PCM format
        return this.floatTo16BitPCM(result)
      }
}
Copy the code

3, add a knowledge point

Before are locally run services, localhost access is no problem, but found that after the deployment of unable to get the window. The navigator. MediaDevices, because browsers have protection mechanism, i.e., Window. The navigator. MediaDevices only on localhost or HTTPS and can only be in the file. So, remember HTTPS

Finished work