Browse Source

@uppy/audio: refactor to TypeScript (#4860)

Antoine du Hamel 1 year ago
parent
commit
c56d412057

+ 142 - 76
packages/@uppy/audio/src/Audio.jsx → packages/@uppy/audio/src/Audio.tsx

@@ -1,44 +1,83 @@
 import { h } from 'preact'
 
-import { UIPlugin } from '@uppy/core'
+import { UIPlugin, type UIPluginOptions } from '@uppy/core'
+import type { Body, Meta } from '@uppy/utils/lib/UppyFile'
+import type { Uppy, MinimalRequiredUppyFile } from '@uppy/core/lib/Uppy.ts'
 
 import getFileTypeExtension from '@uppy/utils/lib/getFileTypeExtension'
-import supportsMediaRecorder from './supportsMediaRecorder.js'
-import RecordingScreen from './RecordingScreen.jsx'
-import PermissionsScreen from './PermissionsScreen.jsx'
-import locale from './locale.js'
-
+import supportsMediaRecorder from './supportsMediaRecorder.ts'
+import RecordingScreen from './RecordingScreen.tsx'
+import PermissionsScreen from './PermissionsScreen.tsx'
+import locale from './locale.ts'
+// eslint-disable-next-line @typescript-eslint/ban-ts-comment
+// @ts-ignore We don't want TS to generate types for the package.json
 import packageJson from '../package.json'
 
+interface AudioOptions extends UIPluginOptions {
+  target?: HTMLElement | string
+  showAudioSourceDropdown: boolean
+}
+interface AudioState {
+  audioReady: boolean
+  recordingLengthSeconds: number
+  hasAudio: boolean
+  cameraError: null
+  audioSources: MediaDeviceInfo[]
+  currentDeviceId?: null | string | MediaStreamTrack
+  isRecording: boolean
+  showAudioSourceDropdown: boolean
+  [id: string]: unknown
+}
+
 /**
  * Audio recording plugin
  */
-export default class Audio extends UIPlugin {
+export default class Audio<M extends Meta, B extends Body> extends UIPlugin<
+  AudioOptions,
+  M,
+  B,
+  AudioState
+> {
   static VERSION = packageJson.version
 
-  #stream = null
+  private recordingLengthTimer: ReturnType<typeof setInterval>
+
+  private icon
+
+  #stream: MediaStream | null = null
 
   #audioActive = false
 
-  #recordingChunks = null
+  #recordingChunks: Blob[] | null = null
 
-  #recorder = null
+  #recorder: MediaRecorder | null = null
 
-  #capturedMediaFile = null
+  #capturedMediaFile: MinimalRequiredUppyFile<M, B> | null = null
 
-  #mediaDevices = null
+  #mediaDevices
 
-  #supportsUserMedia = null
+  #supportsUserMedia
 
-  constructor (uppy, opts) {
+  constructor(uppy: Uppy<M, B>, opts: AudioOptions) {
     super(uppy, opts)
     this.#mediaDevices = navigator.mediaDevices
     this.#supportsUserMedia = this.#mediaDevices != null
     this.id = this.opts.id || 'Audio'
     this.type = 'acquirer'
     this.icon = () => (
-      <svg className="uppy-DashboardTab-iconAudio" aria-hidden="true" focusable="false" width="32px" height="32px" viewBox="0 0 32 32">
-        <path d="M21.143 12.297c.473 0 .857.383.857.857v2.572c0 3.016-2.24 5.513-5.143 5.931v2.64h2.572a.857.857 0 110 1.714H12.57a.857.857 0 110-1.714h2.572v-2.64C12.24 21.24 10 18.742 10 15.726v-2.572a.857.857 0 111.714 0v2.572A4.29 4.29 0 0016 20.01a4.29 4.29 0 004.286-4.285v-2.572c0-.474.384-.857.857-.857zM16 6.5a3 3 0 013 3v6a3 3 0 01-6 0v-6a3 3 0 013-3z" fill="currentcolor" fill-rule="nonzero" />
+      <svg
+        className="uppy-DashboardTab-iconAudio"
+        aria-hidden="true"
+        focusable="false"
+        width="32px"
+        height="32px"
+        viewBox="0 0 32 32"
+      >
+        <path
+          d="M21.143 12.297c.473 0 .857.383.857.857v2.572c0 3.016-2.24 5.513-5.143 5.931v2.64h2.572a.857.857 0 110 1.714H12.57a.857.857 0 110-1.714h2.572v-2.64C12.24 21.24 10 18.742 10 15.726v-2.572a.857.857 0 111.714 0v2.572A4.29 4.29 0 0016 20.01a4.29 4.29 0 004.286-4.285v-2.572c0-.474.384-.857.857-.857zM16 6.5a3 3 0 013 3v6a3 3 0 01-6 0v-6a3 3 0 013-3z"
+          fill="currentcolor"
+          fill-rule="nonzero"
+        />
       </svg>
     )
 
@@ -59,44 +98,43 @@ export default class Audio extends UIPlugin {
     })
   }
 
-  #hasAudioCheck () {
+  #hasAudioCheck(): Promise<boolean> {
     if (!this.#mediaDevices) {
       return Promise.resolve(false)
     }
 
-    return this.#mediaDevices.enumerateDevices().then(devices => {
-      return devices.some(device => device.kind === 'audioinput')
+    return this.#mediaDevices.enumerateDevices().then((devices) => {
+      return devices.some((device) => device.kind === 'audioinput')
     })
   }
 
   // eslint-disable-next-line consistent-return
-  #start = (options = null) => {
+  #start = (options?: { deviceId?: string }): Promise<never> | void => {
     if (!this.#supportsUserMedia) {
       return Promise.reject(new Error('Microphone access not supported'))
     }
 
     this.#audioActive = true
 
-    this.#hasAudioCheck().then(hasAudio => {
+    this.#hasAudioCheck().then((hasAudio) => {
       this.setPluginState({
         hasAudio,
       })
 
       // ask user for access to their camera
-      return this.#mediaDevices.getUserMedia({ audio: true })
+      return this.#mediaDevices
+        .getUserMedia({ audio: true })
         .then((stream) => {
           this.#stream = stream
 
           let currentDeviceId = null
           const tracks = stream.getAudioTracks()
 
-          if (!options || !options.deviceId) {
+          if (!options?.deviceId) {
             currentDeviceId = tracks[0].getSettings().deviceId
           } else {
-            tracks.forEach((track) => {
-              if (track.getSettings().deviceId === options.deviceId) {
-                currentDeviceId = track.getSettings().deviceId
-              }
+            currentDeviceId = tracks.findLast((track) => {
+              return track.getSettings().deviceId === options.deviceId
             })
           }
 
@@ -118,24 +156,34 @@ export default class Audio extends UIPlugin {
     })
   }
 
-  #startRecording = () => {
+  #startRecording = (): void => {
     // only used if supportsMediaRecorder() returned true
     // eslint-disable-next-line compat/compat
-    this.#recorder = new MediaRecorder(this.#stream)
+    this.#recorder = new MediaRecorder(this.#stream!)
     this.#recordingChunks = []
     let stoppingBecauseOfMaxSize = false
     this.#recorder.addEventListener('dataavailable', (event) => {
-      this.#recordingChunks.push(event.data)
+      this.#recordingChunks!.push(event.data)
 
       const { restrictions } = this.uppy.opts
-      if (this.#recordingChunks.length > 1
-          && restrictions.maxFileSize != null
-          && !stoppingBecauseOfMaxSize) {
-        const totalSize = this.#recordingChunks.reduce((acc, chunk) => acc + chunk.size, 0)
+      if (
+        this.#recordingChunks!.length > 1 &&
+        restrictions.maxFileSize != null &&
+        !stoppingBecauseOfMaxSize
+      ) {
+        const totalSize = this.#recordingChunks!.reduce(
+          (acc, chunk) => acc + chunk.size,
+          0,
+        )
         // Exclude the initial chunk from the average size calculation because it is likely to be a very small outlier
-        const averageChunkSize = (totalSize - this.#recordingChunks[0].size) / (this.#recordingChunks.length - 1)
+        const averageChunkSize =
+          (totalSize - this.#recordingChunks![0].size) /
+          (this.#recordingChunks!.length - 1)
         const expectedEndChunkSize = averageChunkSize * 3
-        const maxSize = Math.max(0, restrictions.maxFileSize - expectedEndChunkSize)
+        const maxSize = Math.max(
+          0,
+          restrictions.maxFileSize - expectedEndChunkSize,
+        )
 
         if (totalSize > maxSize) {
           stoppingBecauseOfMaxSize = true
@@ -150,9 +198,13 @@ export default class Audio extends UIPlugin {
     this.#recorder.start(500)
 
     // Start the recordingLengthTimer if we are showing the recording length.
+    // TODO: switch this to a private field
     this.recordingLengthTimer = setInterval(() => {
-      const currentRecordingLength = this.getPluginState().recordingLengthSeconds
-      this.setPluginState({ recordingLengthSeconds: currentRecordingLength + 1 })
+      const currentRecordingLength = this.getPluginState()
+        .recordingLengthSeconds as number
+      this.setPluginState({
+        recordingLengthSeconds: currentRecordingLength + 1,
+      })
     }, 1000)
 
     this.setPluginState({
@@ -160,43 +212,49 @@ export default class Audio extends UIPlugin {
     })
   }
 
-  #stopRecording = () => {
-    const stopped = new Promise((resolve) => {
-      this.#recorder.addEventListener('stop', () => {
+  #stopRecording = (): Promise<void> => {
+    const stopped = new Promise<void>((resolve) => {
+      this.#recorder!.addEventListener('stop', () => {
         resolve()
       })
-      this.#recorder.stop()
+      this.#recorder!.stop()
 
       clearInterval(this.recordingLengthTimer)
       this.setPluginState({ recordingLengthSeconds: 0 })
     })
 
-    return stopped.then(() => {
-      this.setPluginState({
-        isRecording: false,
-      })
-      return this.#getAudio()
-    }).then((file) => {
-      try {
-        this.#capturedMediaFile = file
-        // create object url for capture result preview
+    return stopped
+      .then(() => {
         this.setPluginState({
-          recordedAudio: URL.createObjectURL(file.data),
+          isRecording: false,
         })
-      } catch (err) {
-        // Logging the error, exept restrictions, which is handled in Core
-        if (!err.isRestriction) {
-          this.uppy.log(err)
+        return this.#getAudio()
+      })
+      .then((file) => {
+        try {
+          this.#capturedMediaFile = file
+          // create object url for capture result preview
+          this.setPluginState({
+            recordedAudio: URL.createObjectURL(file.data),
+          })
+        } catch (err) {
+          // Logging the error, exept restrictions, which is handled in Core
+          if (!err.isRestriction) {
+            this.uppy.log(err)
+          }
         }
-      }
-    }).then(() => {
-      this.#recordingChunks = null
-      this.#recorder = null
-    }, (error) => {
-      this.#recordingChunks = null
-      this.#recorder = null
-      throw error
-    })
+      })
+      .then(
+        () => {
+          this.#recordingChunks = null
+          this.#recorder = null
+        },
+        (error) => {
+          this.#recordingChunks = null
+          this.#recorder = null
+          throw error
+        },
+      )
   }
 
   #discardRecordedAudio = () => {
@@ -225,8 +283,8 @@ export default class Audio extends UIPlugin {
 
     if (this.#recorder) {
       await new Promise((resolve) => {
-        this.#recorder.addEventListener('stop', resolve, { once: true })
-        this.#recorder.stop()
+        this.#recorder!.addEventListener('stop', resolve, { once: true })
+        this.#recorder!.stop()
 
         clearInterval(this.recordingLengthTimer)
       })
@@ -244,20 +302,26 @@ export default class Audio extends UIPlugin {
     })
   }
 
-  #getAudio () {
+  #getAudio() {
     // Sometimes in iOS Safari, Blobs (especially the first Blob in the recordingChunks Array)
     // have empty 'type' attributes (e.g. '') so we need to find a Blob that has a defined 'type'
     // attribute in order to determine the correct MIME type.
-    const mimeType = this.#recordingChunks.find(blob => blob.type?.length > 0).type
+    const mimeType = this.#recordingChunks!.find(
+      (blob) => blob.type?.length > 0,
+    )!.type
 
     const fileExtension = getFileTypeExtension(mimeType)
 
     if (!fileExtension) {
-      return Promise.reject(new Error(`Could not retrieve recording: Unsupported media type "${mimeType}"`))
+      return Promise.reject(
+        new Error(
+          `Could not retrieve recording: Unsupported media type "${mimeType}"`,
+        ),
+      )
     }
 
     const name = `audio-${Date.now()}.${fileExtension}`
-    const blob = new Blob(this.#recordingChunks, { type: mimeType })
+    const blob = new Blob(this.#recordingChunks!, { type: mimeType })
     const file = {
       source: this.id,
       name,
@@ -268,20 +332,20 @@ export default class Audio extends UIPlugin {
     return Promise.resolve(file)
   }
 
-  #changeSource = (deviceId) => {
+  #changeSource = (deviceId?: string): void => {
     this.#stop()
     this.#start({ deviceId })
   }
 
   #updateSources = () => {
-    this.#mediaDevices.enumerateDevices().then(devices => {
+    this.#mediaDevices.enumerateDevices().then((devices) => {
       this.setPluginState({
         audioSources: devices.filter((device) => device.kind === 'audioinput'),
       })
     })
   }
 
-  render () {
+  render(): JSX.Element {
     if (!this.#audioActive) {
       this.#start()
     }
@@ -302,6 +366,8 @@ export default class Audio extends UIPlugin {
       <RecordingScreen
         // eslint-disable-next-line react/jsx-props-no-spreading
         {...audioState}
+        // eslint-disable-next-line @typescript-eslint/ban-ts-comment
+        // @ts-ignore TODO: remove unused
         audioActive={this.#audioActive}
         onChangeSource={this.#changeSource}
         onStartRecording={this.#startRecording}
@@ -318,7 +384,7 @@ export default class Audio extends UIPlugin {
     )
   }
 
-  install () {
+  install(): void {
     this.setPluginState({
       audioReady: false,
       recordingLengthSeconds: 0,
@@ -355,7 +421,7 @@ export default class Audio extends UIPlugin {
     }
   }
 
-  uninstall () {
+  uninstall(): void {
     if (this.#stream) {
       this.#stop()
     }

+ 14 - 2
packages/@uppy/audio/src/AudioSourceSelect.jsx → packages/@uppy/audio/src/AudioSourceSelect.tsx

@@ -1,11 +1,23 @@
 import { h } from 'preact'
 
-export default ({ currentDeviceId, audioSources, onChangeSource }) => {
+export interface AudioSourceSelectProps {
+  currentDeviceId: string
+  audioSources: MediaDeviceInfo[]
+  onChangeSource: (value: string) => void
+}
+
+export default ({
+  currentDeviceId,
+  audioSources,
+  onChangeSource,
+}: AudioSourceSelectProps): JSX.Element => {
   return (
     <div className="uppy-Audio-videoSource">
       <select
         className="uppy-u-reset uppy-Audio-audioSource-select"
-        onChange={(event) => { onChangeSource(event.target.value) }}
+        onChange={(event) => {
+          onChangeSource(event.target.value)
+        }}
       >
         {audioSources.map((audioSource) => (
           <option

+ 7 - 1
packages/@uppy/audio/src/DiscardButton.jsx → packages/@uppy/audio/src/DiscardButton.tsx

@@ -1,6 +1,12 @@
 import { h } from 'preact'
+import type { I18n } from '@uppy/utils/lib/Translator'
 
-function DiscardButton ({ onDiscard, i18n }) {
+interface DiscardButtonProps {
+  onDiscard: () => void
+  i18n: I18n
+}
+
+function DiscardButton({ onDiscard, i18n }: DiscardButtonProps): JSX.Element {
   return (
     <button
       className="uppy-u-reset uppy-c-btn uppy-Audio-button"

+ 0 - 12
packages/@uppy/audio/src/PermissionsScreen.jsx

@@ -1,12 +0,0 @@
-import { h } from 'preact'
-
-export default (props) => {
-  const { icon, hasAudio, i18n } = props
-  return (
-    <div className="uppy-Audio-permissons">
-      <div className="uppy-Audio-permissonsIcon">{icon()}</div>
-      <h1 className="uppy-Audio-title">{hasAudio ? i18n('allowAudioAccessTitle') : i18n('noAudioTitle')}</h1>
-      <p>{hasAudio ? i18n('allowAudioAccessDescription') : i18n('noAudioDescription')}</p>
-    </div>
-  )
-}

+ 25 - 0
packages/@uppy/audio/src/PermissionsScreen.tsx

@@ -0,0 +1,25 @@
+import type { I18n } from '@uppy/utils/lib/Translator'
+import { h } from 'preact'
+
+interface PermissionsScreenProps {
+  icon: () => JSX.Element | null
+  hasAudio: boolean
+  i18n: I18n
+}
+
+export default (props: PermissionsScreenProps): JSX.Element => {
+  const { icon, hasAudio, i18n } = props
+  return (
+    <div className="uppy-Audio-permissons">
+      <div className="uppy-Audio-permissonsIcon">{icon()}</div>
+      <h1 className="uppy-Audio-title">
+        {hasAudio ? i18n('allowAudioAccessTitle') : i18n('noAudioTitle')}
+      </h1>
+      <p>
+        {hasAudio
+          ? i18n('allowAudioAccessDescription')
+          : i18n('noAudioDescription')}
+      </p>
+    </div>
+  )
+}

+ 0 - 35
packages/@uppy/audio/src/RecordButton.jsx

@@ -1,35 +0,0 @@
-import { h } from 'preact'
-
-export default function RecordButton ({ recording, onStartRecording, onStopRecording, i18n }) {
-  if (recording) {
-    return (
-      <button
-        className="uppy-u-reset uppy-c-btn uppy-Audio-button"
-        type="button"
-        title={i18n('stopAudioRecording')}
-        aria-label={i18n('stopAudioRecording')}
-        onClick={onStopRecording}
-        data-uppy-super-focusable
-      >
-        <svg aria-hidden="true" focusable="false" className="uppy-c-icon" width="100" height="100" viewBox="0 0 100 100">
-          <rect x="15" y="15" width="70" height="70" />
-        </svg>
-      </button>
-    )
-  }
-
-  return (
-    <button
-      className="uppy-u-reset uppy-c-btn uppy-Audio-button"
-      type="button"
-      title={i18n('startAudioRecording')}
-      aria-label={i18n('startAudioRecording')}
-      onClick={onStartRecording}
-      data-uppy-super-focusable
-    >
-      <svg aria-hidden="true" focusable="false" className="uppy-c-icon" width="14px" height="20px" viewBox="0 0 14 20">
-        <path d="M7 14c2.21 0 4-1.71 4-3.818V3.818C11 1.71 9.21 0 7 0S3 1.71 3 3.818v6.364C3 12.29 4.79 14 7 14zm6.364-7h-.637a.643.643 0 0 0-.636.65V9.6c0 3.039-2.565 5.477-5.6 5.175-2.645-.264-4.582-2.692-4.582-5.407V7.65c0-.36-.285-.65-.636-.65H.636A.643.643 0 0 0 0 7.65v1.631c0 3.642 2.544 6.888 6.045 7.382v1.387H3.818a.643.643 0 0 0-.636.65v.65c0 .36.285.65.636.65h6.364c.351 0 .636-.29.636-.65v-.65c0-.36-.285-.65-.636-.65H7.955v-1.372C11.363 16.2 14 13.212 14 9.6V7.65c0-.36-.285-.65-.636-.65z" fill="#FFF" fill-rule="nonzero" />
-      </svg>
-    </button>
-  )
-}

+ 66 - 0
packages/@uppy/audio/src/RecordButton.tsx

@@ -0,0 +1,66 @@
+import type { I18n } from '@uppy/utils/lib/Translator'
+import { h } from 'preact'
+
+interface RecordButtonProps {
+  recording: boolean
+  onStartRecording: () => void
+  onStopRecording: () => void
+  i18n: I18n
+}
+
+export default function RecordButton({
+  recording,
+  onStartRecording,
+  onStopRecording,
+  i18n,
+}: RecordButtonProps): JSX.Element {
+  if (recording) {
+    return (
+      <button
+        className="uppy-u-reset uppy-c-btn uppy-Audio-button"
+        type="button"
+        title={i18n('stopAudioRecording')}
+        aria-label={i18n('stopAudioRecording')}
+        onClick={onStopRecording}
+        data-uppy-super-focusable
+      >
+        <svg
+          aria-hidden="true"
+          focusable="false"
+          className="uppy-c-icon"
+          width="100"
+          height="100"
+          viewBox="0 0 100 100"
+        >
+          <rect x="15" y="15" width="70" height="70" />
+        </svg>
+      </button>
+    )
+  }
+
+  return (
+    <button
+      className="uppy-u-reset uppy-c-btn uppy-Audio-button"
+      type="button"
+      title={i18n('startAudioRecording')}
+      aria-label={i18n('startAudioRecording')}
+      onClick={onStartRecording}
+      data-uppy-super-focusable
+    >
+      <svg
+        aria-hidden="true"
+        focusable="false"
+        className="uppy-c-icon"
+        width="14px"
+        height="20px"
+        viewBox="0 0 14 20"
+      >
+        <path
+          d="M7 14c2.21 0 4-1.71 4-3.818V3.818C11 1.71 9.21 0 7 0S3 1.71 3 3.818v6.364C3 12.29 4.79 14 7 14zm6.364-7h-.637a.643.643 0 0 0-.636.65V9.6c0 3.039-2.565 5.477-5.6 5.175-2.645-.264-4.582-2.692-4.582-5.407V7.65c0-.36-.285-.65-.636-.65H.636A.643.643 0 0 0 0 7.65v1.631c0 3.642 2.544 6.888 6.045 7.382v1.387H3.818a.643.643 0 0 0-.636.65v.65c0 .36.285.65.636.65h6.364c.351 0 .636-.29.636-.65v-.65c0-.36-.285-.65-.636-.65H7.955v-1.372C11.363 16.2 14 13.212 14 9.6V7.65c0-.36-.285-.65-.636-.65z"
+          fill="#FFF"
+          fill-rule="nonzero"
+        />
+      </svg>
+    </button>
+  )
+}

+ 0 - 12
packages/@uppy/audio/src/RecordingLength.jsx

@@ -1,12 +0,0 @@
-import { h } from 'preact'
-import formatSeconds from './formatSeconds.js'
-
-export default function RecordingLength ({ recordingLengthSeconds, i18n }) {
-  const formattedRecordingLengthSeconds = formatSeconds(recordingLengthSeconds)
-
-  return (
-    <span aria-label={i18n('recordingLength', { recording_length: formattedRecordingLengthSeconds })}>
-      {formattedRecordingLengthSeconds}
-    </span>
-  )
-}

+ 25 - 0
packages/@uppy/audio/src/RecordingLength.tsx

@@ -0,0 +1,25 @@
+import { h } from 'preact'
+import type { I18n } from '@uppy/utils/lib/Translator'
+import formatSeconds from './formatSeconds.ts'
+
+interface RecordingLengthProps {
+  recordingLengthSeconds: number
+  i18n: I18n
+}
+
+export default function RecordingLength({
+  recordingLengthSeconds,
+  i18n,
+}: RecordingLengthProps): JSX.Element {
+  const formattedRecordingLengthSeconds = formatSeconds(recordingLengthSeconds)
+
+  return (
+    <span
+      aria-label={i18n('recordingLength', {
+        recording_length: formattedRecordingLengthSeconds,
+      })}
+    >
+      {formattedRecordingLengthSeconds}
+    </span>
+  )
+}

+ 48 - 32
packages/@uppy/audio/src/RecordingScreen.jsx → packages/@uppy/audio/src/RecordingScreen.tsx

@@ -1,14 +1,34 @@
 /* eslint-disable jsx-a11y/media-has-caption */
 import { h } from 'preact'
 import { useEffect, useRef } from 'preact/hooks'
-import RecordButton from './RecordButton.jsx'
-import RecordingLength from './RecordingLength.jsx'
-import AudioSourceSelect from './AudioSourceSelect.jsx'
-import AudioOscilloscope from './audio-oscilloscope/index.js'
-import SubmitButton from './SubmitButton.jsx'
-import DiscardButton from './DiscardButton.jsx'
+import type { I18n } from '@uppy/utils/lib/Translator'
+import RecordButton from './RecordButton.tsx'
+import RecordingLength from './RecordingLength.tsx'
+import AudioSourceSelect, {
+  type AudioSourceSelectProps,
+} from './AudioSourceSelect.tsx'
+import AudioOscilloscope from './audio-oscilloscope/index.ts'
+import SubmitButton from './SubmitButton.tsx'
+import DiscardButton from './DiscardButton.tsx'
 
-export default function RecordingScreen (props) {
+interface RecordingScreenProps extends AudioSourceSelectProps {
+  stream: MediaStream | null | undefined
+  recordedAudio: string
+  recording: boolean
+  supportsRecording: boolean
+  showAudioSourceDropdown: boolean
+  onSubmit: () => void
+  i18n: I18n
+  onStartRecording: () => void
+  onStopRecording: () => void
+  onStop: () => void
+  onDiscardRecordedAudio: () => void
+  recordingLengthSeconds: number
+}
+
+export default function RecordingScreen(
+  props: RecordingScreenProps,
+): JSX.Element {
   const {
     stream,
     recordedAudio,
@@ -25,8 +45,8 @@ export default function RecordingScreen (props) {
     recordingLengthSeconds,
   } = props
 
-  const canvasEl = useRef(null)
-  const oscilloscope = useRef(null)
+  const canvasEl = useRef<HTMLCanvasElement>(null)
+  const oscilloscope = useRef<AudioOscilloscope | null>()
 
   // componentDidMount / componentDidUnmount
   useEffect(() => {
@@ -39,7 +59,7 @@ export default function RecordingScreen (props) {
   // componentDidUpdate
   useEffect(() => {
     if (!recordedAudio) {
-      oscilloscope.current = new AudioOscilloscope(canvasEl.current, {
+      oscilloscope.current = new AudioOscilloscope(canvasEl.current!, {
         canvas: {
           width: 600,
           height: 600,
@@ -62,33 +82,24 @@ export default function RecordingScreen (props) {
 
   const hasRecordedAudio = recordedAudio != null
   const shouldShowRecordButton = !hasRecordedAudio && supportsRecording
-  const shouldShowAudioSourceDropdown = showAudioSourceDropdown
-    && !hasRecordedAudio
-    && audioSources
-    && audioSources.length > 1
+  const shouldShowAudioSourceDropdown =
+    showAudioSourceDropdown &&
+    !hasRecordedAudio &&
+    audioSources &&
+    audioSources.length > 1
 
   return (
     <div className="uppy-Audio-container">
       <div className="uppy-Audio-audioContainer">
-        {hasRecordedAudio
-          ? (
-            <audio
-              className="uppy-Audio-player"
-              controls
-              src={recordedAudio}
-            />
-          ) : (
-            <canvas
-              ref={canvasEl}
-              className="uppy-Audio-canvas"
-            />
-          )}
+        {hasRecordedAudio ? (
+          <audio className="uppy-Audio-player" controls src={recordedAudio} />
+        ) : (
+          <canvas ref={canvasEl} className="uppy-Audio-canvas" />
+        )}
       </div>
       <div className="uppy-Audio-footer">
         <div className="uppy-Audio-audioSourceContainer">
-          {shouldShowAudioSourceDropdown
-            ? AudioSourceSelect(props)
-            : null}
+          {shouldShowAudioSourceDropdown ? AudioSourceSelect(props) : null}
         </div>
         <div className="uppy-Audio-buttonContainer">
           {shouldShowRecordButton && (
@@ -102,12 +113,17 @@ export default function RecordingScreen (props) {
 
           {hasRecordedAudio && <SubmitButton onSubmit={onSubmit} i18n={i18n} />}
 
-          {hasRecordedAudio && <DiscardButton onDiscard={onDiscardRecordedAudio} i18n={i18n} />}
+          {hasRecordedAudio && (
+            <DiscardButton onDiscard={onDiscardRecordedAudio} i18n={i18n} />
+          )}
         </div>
 
         <div className="uppy-Audio-recordingLength">
           {!hasRecordedAudio && (
-            <RecordingLength recordingLengthSeconds={recordingLengthSeconds} i18n={i18n} />
+            <RecordingLength
+              recordingLengthSeconds={recordingLengthSeconds}
+              i18n={i18n}
+            />
           )}
         </div>
       </div>

+ 12 - 2
packages/@uppy/audio/src/SubmitButton.jsx → packages/@uppy/audio/src/SubmitButton.tsx

@@ -1,6 +1,12 @@
 import { h } from 'preact'
+import type { I18n } from '@uppy/utils/lib/Translator'
 
-function SubmitButton ({ onSubmit, i18n }) {
+interface SubmitButtonProps {
+  onSubmit: () => void
+  i18n: I18n
+}
+
+function SubmitButton({ onSubmit, i18n }: SubmitButtonProps): JSX.Element {
   return (
     <button
       className="uppy-u-reset uppy-c-btn uppy-Audio-button uppy-Audio-button--submit"
@@ -19,7 +25,11 @@ function SubmitButton ({ onSubmit, i18n }) {
         focusable="false"
         className="uppy-c-icon"
       >
-        <path fill="#fff" fillRule="nonzero" d="M10.66 0L12 1.31 4.136 9 0 4.956l1.34-1.31L4.136 6.38z" />
+        <path
+          fill="#fff"
+          fillRule="nonzero"
+          d="M10.66 0L12 1.31 4.136 9 0 4.956l1.34-1.31L4.136 6.38z"
+        />
       </svg>
     </button>
   )

+ 0 - 84
packages/@uppy/audio/src/audio-oscilloscope/index.js

@@ -1,84 +0,0 @@
-function isFunction (v) {
-  return typeof v === 'function'
-}
-
-function result (v) {
-  return isFunction(v) ? v() : v
-}
-
-/* Audio Oscilloscope
-  https://github.com/miguelmota/audio-oscilloscope
-*/
-export default class AudioOscilloscope {
-  constructor (canvas, options = {}) {
-    const canvasOptions = options.canvas || {}
-    const canvasContextOptions = options.canvasContext || {}
-    this.analyser = null
-    this.bufferLength = 0
-    this.dataArray = []
-    this.canvas = canvas
-    this.width = result(canvasOptions.width) || this.canvas.width
-    this.height = result(canvasOptions.height) || this.canvas.height
-    this.canvas.width = this.width
-    this.canvas.height = this.height
-    this.canvasContext = this.canvas.getContext('2d')
-    this.canvasContext.fillStyle = result(canvasContextOptions.fillStyle) || 'rgb(255, 255, 255)'
-    this.canvasContext.strokeStyle = result(canvasContextOptions.strokeStyle) || 'rgb(0, 0, 0)'
-    this.canvasContext.lineWidth = result(canvasContextOptions.lineWidth) || 1
-    this.onDrawFrame = isFunction(options.onDrawFrame) ? options.onDrawFrame : () => {}
-  }
-
-  addSource (streamSource) {
-    this.streamSource = streamSource
-    this.audioContext = this.streamSource.context
-    this.analyser = this.audioContext.createAnalyser()
-    this.analyser.fftSize = 2048
-    this.bufferLength = this.analyser.frequencyBinCount
-    this.source = this.audioContext.createBufferSource()
-    this.dataArray = new Uint8Array(this.bufferLength)
-    this.analyser.getByteTimeDomainData(this.dataArray)
-    this.streamSource.connect(this.analyser)
-  }
-
-  draw () {
-    const { analyser, dataArray, bufferLength } = this
-    const ctx = this.canvasContext
-    const w = this.width
-    const h = this.height
-
-    if (analyser) {
-      analyser.getByteTimeDomainData(dataArray)
-    }
-
-    ctx.fillRect(0, 0, w, h)
-    ctx.beginPath()
-
-    const sliceWidth = (w * 1.0) / bufferLength
-    let x = 0
-
-    if (!bufferLength) {
-      ctx.moveTo(0, this.height / 2)
-    }
-
-    for (let i = 0; i < bufferLength; i++) {
-      const v = dataArray[i] / 128.0
-      const y = v * (h / 2)
-
-      if (i === 0) {
-        ctx.moveTo(x, y)
-      } else {
-        ctx.lineTo(x, y)
-      }
-
-      x += sliceWidth
-    }
-
-    ctx.lineTo(w, h / 2)
-    ctx.stroke()
-
-    this.onDrawFrame(this)
-    requestAnimationFrame(this.#draw)
-  }
-
-  #draw = () => this.draw()
-}

+ 136 - 0
packages/@uppy/audio/src/audio-oscilloscope/index.ts

@@ -0,0 +1,136 @@
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isFunction(v: any): v is Function {
+  return typeof v === 'function'
+}
+
+function result<T>(v: T): T extends (...args: any) => any ? ReturnType<T> : T {
+  return isFunction(v) ? v() : v
+}
+
+type MaybeFunction<T> = T | (() => T)
+
+interface AudioOscilloscopeOptions {
+  canvas?: {
+    width?: number
+    height?: number
+  }
+  canvasContext?: {
+    width?: MaybeFunction<number>
+    height?: MaybeFunction<number>
+    lineWidth?: MaybeFunction<number>
+    fillStyle?: MaybeFunction<string>
+    strokeStyle?: MaybeFunction<string>
+  }
+
+  // eslint-disable-next-line no-use-before-define
+  onDrawFrame?: (oscilloscope: AudioOscilloscope) => void
+}
+
+/* Audio Oscilloscope
+  https://github.com/miguelmota/audio-oscilloscope
+*/
+export default class AudioOscilloscope {
+  private canvas: HTMLCanvasElement
+
+  private canvasContext: CanvasRenderingContext2D
+
+  private width: number
+
+  private height: number
+
+  private analyser: null | AnalyserNode
+
+  private bufferLength: number
+
+  private dataArray: Uint8Array
+
+  // eslint-disable-next-line no-use-before-define
+  private onDrawFrame: (oscilloscope: AudioOscilloscope) => void
+
+  private streamSource?: MediaStreamAudioSourceNode
+
+  private audioContext: BaseAudioContext
+
+  public source: AudioBufferSourceNode
+
+  constructor(
+    canvas: HTMLCanvasElement,
+    options: AudioOscilloscopeOptions = {},
+  ) {
+    const canvasOptions =
+      options.canvas || ({} as NonNullable<AudioOscilloscopeOptions['canvas']>)
+    const canvasContextOptions =
+      options.canvasContext ||
+      ({} as NonNullable<AudioOscilloscopeOptions['canvasContext']>)
+    this.analyser = null
+    this.bufferLength = 0
+    this.canvas = canvas
+    this.width = result(canvasOptions.width) || this.canvas.width
+    this.height = result(canvasOptions.height) || this.canvas.height
+    this.canvas.width = this.width
+    this.canvas.height = this.height
+    this.canvasContext = this.canvas.getContext('2d')!
+    this.canvasContext.fillStyle =
+      result(canvasContextOptions.fillStyle) || 'rgb(255, 255, 255)'
+    this.canvasContext.strokeStyle =
+      result(canvasContextOptions.strokeStyle) || 'rgb(0, 0, 0)'
+    this.canvasContext.lineWidth = result(canvasContextOptions.lineWidth) || 1
+    this.onDrawFrame = isFunction(options.onDrawFrame)
+      ? options.onDrawFrame
+      : () => {} // eslint-disable-line @typescript-eslint/no-empty-function
+  }
+
+  addSource(streamSource: MediaStreamAudioSourceNode): void {
+    this.streamSource = streamSource
+    this.audioContext = this.streamSource.context
+    this.analyser = this.audioContext.createAnalyser()
+    this.analyser.fftSize = 2048
+    this.bufferLength = this.analyser.frequencyBinCount
+    this.source = this.audioContext.createBufferSource()
+    this.dataArray = new Uint8Array(this.bufferLength)
+    this.analyser.getByteTimeDomainData(this.dataArray)
+    this.streamSource.connect(this.analyser)
+  }
+
+  draw(): void {
+    const { analyser, dataArray, bufferLength } = this
+    const ctx = this.canvasContext
+    const w = this.width
+    const h = this.height
+
+    if (analyser) {
+      analyser.getByteTimeDomainData(dataArray)
+    }
+
+    ctx.fillRect(0, 0, w, h)
+    ctx.beginPath()
+
+    const sliceWidth = (w * 1.0) / bufferLength
+    let x = 0
+
+    if (!bufferLength) {
+      ctx.moveTo(0, this.height / 2)
+    }
+
+    for (let i = 0; i < bufferLength; i++) {
+      const v = dataArray[i] / 128.0
+      const y = v * (h / 2)
+
+      if (i === 0) {
+        ctx.moveTo(x, y)
+      } else {
+        ctx.lineTo(x, y)
+      }
+
+      x += sliceWidth
+    }
+
+    ctx.lineTo(w, h / 2)
+    ctx.stroke()
+
+    this.onDrawFrame(this)
+    requestAnimationFrame(this.#draw)
+  }
+
+  #draw = () => this.draw()
+}

+ 0 - 12
packages/@uppy/audio/src/formatSeconds.js

@@ -1,12 +0,0 @@
-/**
- * Takes an Integer value of seconds (e.g. 83) and converts it into a human-readable formatted string (e.g. '1:23').
- *
- * @param {Integer} seconds
- * @returns {string} the formatted seconds (e.g. '1:23' for 1 minute and 23 seconds)
- *
- */
-export default function formatSeconds (seconds) {
-  return `${Math.floor(
-    seconds / 60,
-  )}:${String(seconds % 60).padStart(2, 0)}`
-}

+ 0 - 12
packages/@uppy/audio/src/formatSeconds.test.js

@@ -1,12 +0,0 @@
-import { describe, expect, it } from 'vitest'
-import formatSeconds from './formatSeconds.js'
-
-describe('formatSeconds', () => {
-  it('should return a value of \'0:43\' when an argument of 43 seconds is supplied', () => {
-    expect(formatSeconds(43)).toEqual('0:43')
-  })
-
-  it('should return a value of \'1:43\' when an argument of 103 seconds is supplied', () => {
-    expect(formatSeconds(103)).toEqual('1:43')
-  })
-})

+ 12 - 0
packages/@uppy/audio/src/formatSeconds.test.ts

@@ -0,0 +1,12 @@
+import { describe, expect, it } from 'vitest'
+import formatSeconds from './formatSeconds.ts'
+
+describe('formatSeconds', () => {
+  it("should return a value of '0:43' when an argument of 43 seconds is supplied", () => {
+    expect(formatSeconds(43)).toEqual('0:43')
+  })
+
+  it("should return a value of '1:43' when an argument of 103 seconds is supplied", () => {
+    expect(formatSeconds(103)).toEqual('1:43')
+  })
+})

+ 7 - 0
packages/@uppy/audio/src/formatSeconds.ts

@@ -0,0 +1,7 @@
+/**
+ * Takes an Integer value of seconds (e.g. 83) and converts it into a
+ * human-readable formatted string (e.g. '1:23').
+ */
+export default function formatSeconds(seconds: number): string {
+  return `${Math.floor(seconds / 60)}:${String(seconds % 60).padStart(2, '0')}`
+}

+ 0 - 1
packages/@uppy/audio/src/index.js

@@ -1 +0,0 @@
-export { default } from './Audio.jsx'

+ 1 - 0
packages/@uppy/audio/src/index.ts

@@ -0,0 +1 @@
+export { default } from './Audio.tsx'

+ 10 - 5
packages/@uppy/audio/src/locale.js → packages/@uppy/audio/src/locale.ts

@@ -1,3 +1,5 @@
+import type { Locale } from '@uppy/utils/lib/Translator'
+
 export default {
   strings: {
     pluginNameAudio: 'Audio',
@@ -10,13 +12,16 @@ export default {
     // Title on the “allow access” screen
     allowAudioAccessTitle: 'Please allow access to your microphone',
     // Description on the “allow access” screen
-    allowAudioAccessDescription: 'In order to record audio, please allow microphone access for this site.',
+    allowAudioAccessDescription:
+      'In order to record audio, please allow microphone access for this site.',
     // Title on the “device not available” screen
     noAudioTitle: 'Microphone Not Available',
     // Description on the “device not available” screen
-    noAudioDescription: 'In order to record audio, please connect a microphone or another audio input device',
+    noAudioDescription:
+      'In order to record audio, please connect a microphone or another audio input device',
     // Message about file size will be shown in an Informer bubble
-    recordingStoppedMaxSize: 'Recording stopped because the file size is about to exceed the limit',
+    recordingStoppedMaxSize:
+      'Recording stopped because the file size is about to exceed the limit',
     // Used as the label for the counter that shows recording length (`1:25`).
     // This is not visibly rendered but is picked up by screen readers.
     recordingLength: 'Recording length %{recording_length}',
@@ -26,5 +31,5 @@ export default {
     // Used as the label for the discard cross button.
     // This is not visibly rendered but is picked up by screen readers.
     discardRecordedFile: 'Discard recorded file',
-  },
-}
+  } as Locale<0>['strings'],
+} as any as Locale

+ 0 - 6
packages/@uppy/audio/src/supportsMediaRecorder.js

@@ -1,6 +0,0 @@
-export default function supportsMediaRecorder () {
-  /* eslint-disable compat/compat */
-  return typeof MediaRecorder === 'function'
-    && typeof MediaRecorder.prototype?.start === 'function'
-  /* eslint-enable compat/compat */
-}

+ 8 - 4
packages/@uppy/audio/src/supportsMediaRecorder.test.js → packages/@uppy/audio/src/supportsMediaRecorder.test.ts

@@ -1,24 +1,28 @@
-/* eslint-disable max-classes-per-file */
+/* eslint-disable max-classes-per-file, compat/compat */
 import { describe, expect, it } from 'vitest'
-import supportsMediaRecorder from './supportsMediaRecorder.js'
+import supportsMediaRecorder from './supportsMediaRecorder.ts'
 
 describe('supportsMediaRecorder', () => {
   it('should return true if MediaRecorder is supported', () => {
+    // @ts-expect-error just a test
     globalThis.MediaRecorder = class MediaRecorder {
-      start () {} // eslint-disable-line
+      start() {} // eslint-disable-line
     }
     expect(supportsMediaRecorder()).toEqual(true)
   })
 
   it('should return false if MediaRecorder is not supported', () => {
+    // @ts-expect-error just a test
     globalThis.MediaRecorder = undefined
     expect(supportsMediaRecorder()).toEqual(false)
 
+    // @ts-expect-error just a test
     globalThis.MediaRecorder = class MediaRecorder {}
     expect(supportsMediaRecorder()).toEqual(false)
 
+    // @ts-expect-error just a test
     globalThis.MediaRecorder = class MediaRecorder {
-      foo () {} // eslint-disable-line
+      foo() {} // eslint-disable-line
     }
     expect(supportsMediaRecorder()).toEqual(false)
   })

+ 8 - 0
packages/@uppy/audio/src/supportsMediaRecorder.ts

@@ -0,0 +1,8 @@
+export default function supportsMediaRecorder(): boolean {
+  /* eslint-disable compat/compat */
+  return (
+    typeof MediaRecorder === 'function' &&
+    typeof MediaRecorder.prototype?.start === 'function'
+  )
+  /* eslint-enable compat/compat */
+}

+ 25 - 0
packages/@uppy/audio/tsconfig.build.json

@@ -0,0 +1,25 @@
+{
+  "extends": "../../../tsconfig.shared",
+  "compilerOptions": {
+    "noImplicitAny": false,
+    "outDir": "./lib",
+    "paths": {
+      "@uppy/utils/lib/*": ["../utils/src/*"],
+      "@uppy/core": ["../core/src/index.js"],
+      "@uppy/core/lib/*": ["../core/src/*"]
+    },
+    "resolveJsonModule": false,
+    "rootDir": "./src",
+    "skipLibCheck": true
+  },
+  "include": ["./src/**/*.*"],
+  "exclude": ["./src/**/*.test.ts"],
+  "references": [
+    {
+      "path": "../utils/tsconfig.build.json"
+    },
+    {
+      "path": "../core/tsconfig.build.json"
+    }
+  ]
+}

+ 21 - 0
packages/@uppy/audio/tsconfig.json

@@ -0,0 +1,21 @@
+{
+  "extends": "../../../tsconfig.shared",
+  "compilerOptions": {
+    "emitDeclarationOnly": false,
+    "noEmit": true,
+    "paths": {
+      "@uppy/utils/lib/*": ["../utils/src/*"],
+      "@uppy/core": ["../core/src/index.js"],
+      "@uppy/core/lib/*": ["../core/src/*"]
+    }
+  },
+  "include": ["./package.json", "./src/**/*.*"],
+  "references": [
+    {
+      "path": "../utils/tsconfig.build.json"
+    },
+    {
+      "path": "../core/tsconfig.build.json"
+    }
+  ]
+}