diff --git a/examples/minimal-react/package-lock.json b/examples/minimal-react/package-lock.json index 4a32596..fdcacc0 100644 --- a/examples/minimal-react/package-lock.json +++ b/examples/minimal-react/package-lock.json @@ -29,7 +29,7 @@ "version": "0.4.0", "license": "Apache-2.0", "dependencies": { - "@fishjam-dev/ts-client": "^0.5.0", + "@fishjam-dev/ts-client": "file:../ts-client-sdk", "events": "3.3.0", "lodash.isequal": "4.5.0" }, diff --git a/examples/minimal-react/src/components/App.tsx b/examples/minimal-react/src/components/App.tsx index 49b5982..839f397 100644 --- a/examples/minimal-react/src/components/App.tsx +++ b/examples/minimal-react/src/components/App.tsx @@ -59,7 +59,7 @@ export const App = () => { // Get screen sharing MediaStream navigator.mediaDevices.getDisplayMedia(SCREEN_SHARING_MEDIA_CONSTRAINTS).then((screenStream) => { // Add local MediaStream to webrtc - screenStream.getTracks().forEach((track) => client.addTrack(track, screenStream, { type: "screen" })); + screenStream.getTracks().forEach((track) => client.addTrack(track, { type: "screen" })); }); }} > diff --git a/examples/use-camera-and-microphone-example/package-lock.json b/examples/use-camera-and-microphone-example/package-lock.json index b90db8d..0dc39e1 100644 --- a/examples/use-camera-and-microphone-example/package-lock.json +++ b/examples/use-camera-and-microphone-example/package-lock.json @@ -33,7 +33,7 @@ "version": "0.4.0", "license": "Apache-2.0", "dependencies": { - "@fishjam-dev/ts-client": "^0.5.0", + "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk", "events": "3.3.0", "lodash.isequal": "4.5.0" }, @@ -49,7 +49,7 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.6", - "prettier": "3.2.5", + "prettier": "3.3.2", "prettier-plugin-tailwindcss": "0.5.12", "react": "^18.2.0", "testcontainers": "^10.7.2", diff --git a/examples/use-camera-and-microphone-example/src/DeviceControls.tsx b/examples/use-camera-and-microphone-example/src/DeviceControls.tsx index 81286e4..2bab0e4 100644 --- a/examples/use-camera-and-microphone-example/src/DeviceControls.tsx +++ b/examples/use-camera-and-microphone-example/src/DeviceControls.tsx @@ -1,4 +1,4 @@ -import type { PeerStatus, UseMicrophoneResult, UseCameraResult, UseScreenShareResult } from "@fishjam-dev/react-client"; +import type { PeerStatus, CameraAPI, MicrophoneAPI, ScreenShareAPI } from "@fishjam-dev/react-client"; import type { TrackMetadata } from "./fishjamSetup"; type DeviceControlsProps = { @@ -6,15 +6,15 @@ type DeviceControlsProps = { metadata: TrackMetadata; } & ( | { - device: UseMicrophoneResult; + device: MicrophoneAPI; type: "audio"; } | { - device: UseCameraResult; + device: CameraAPI; type: "video"; } | { - device: UseScreenShareResult; + device: ScreenShareAPI; type: "screenshare"; } ); diff --git a/examples/use-camera-and-microphone-example/src/DeviceSelector.tsx b/examples/use-camera-and-microphone-example/src/DeviceSelector.tsx index 900259e..83be0a8 100644 --- a/examples/use-camera-and-microphone-example/src/DeviceSelector.tsx +++ b/examples/use-camera-and-microphone-example/src/DeviceSelector.tsx @@ -5,11 +5,12 @@ type Props = { name: string; defaultOptionText: string; devices: MediaDeviceInfo[] | null; + stop: () => void; setInput: (value: string | null) => void; activeDevice: string | null; }; -export const DeviceSelector = ({ name, devices, setInput, defaultOptionText, activeDevice }: Props) => { +export const DeviceSelector = ({ name, devices, setInput, defaultOptionText, activeDevice, stop }: Props) => { const [selectedDevice, setSelectedDevice] = useState(null); const onOptionChangeHandler = (event: ChangeEvent) => { @@ -32,13 +33,22 @@ export const DeviceSelector = ({ name, devices, setInput, defaultOptionText, act ))} + diff --git a/examples/use-camera-and-microphone-example/src/MainControls.tsx b/examples/use-camera-and-microphone-example/src/MainControls.tsx index cbcabf9..b7036a0 100644 --- a/examples/use-camera-and-microphone-example/src/MainControls.tsx +++ b/examples/use-camera-and-microphone-example/src/MainControls.tsx @@ -27,21 +27,26 @@ import { Badge } from "./Badge"; import { DeviceControls } from "./DeviceControls"; import { Radio } from "./Radio"; -type RestartChange = "stop" | "replace" | undefined; +type OnDeviceChange = "remove" | "replace" | undefined; +type OnDeviceStop = "remove" | "mute" | undefined; -const isRestartChange = (e: string | undefined): e is RestartChange => { - return e === undefined || e === "stop" || e === "replace"; -}; +const isDeviceChangeValue = (e: string | undefined): e is OnDeviceChange => + e === undefined || e === "remove" || e === "replace"; + +const isDeviceStopValue = (e: string | undefined): e is OnDeviceStop => + e === undefined || e === "remove" || e === "mute"; const tokenAtom = atomWithStorage("token", ""); const broadcastVideoOnConnectAtom = atomWithStorage("broadcastVideoOnConnect", undefined); const broadcastVideoOnDeviceStartAtom = atomWithStorage("broadcastVideoOnDeviceStart", undefined); -const broadcastVideoOnDeviceChangeAtom = atomWithStorage("broadcastVideoOnDeviceChange", undefined); +const videoOnDeviceChangeAtom = atomWithStorage("videoOnDeviceChange", undefined); +const videoOnDeviceStopAtom = atomWithStorage("videoOnDeviceStop", undefined); const broadcastAudioOnConnectAtom = atomWithStorage("broadcastAudioOnConnect", undefined); const broadcastAudioOnDeviceStartAtom = atomWithStorage("broadcastAudioOnDeviceStart", undefined); -const broadcastAudioOnDeviceChangeAtom = atomWithStorage("broadcastAudioOnDeviceChange", undefined); +const audioOnDeviceChangeAtom = atomWithStorage("audioOnDeviceChange", undefined); +const audioOnDeviceStopAtom = atomWithStorage("audioOnDeviceStop", undefined); const broadcastScreenShareOnConnectAtom = atomWithStorage( "broadcastScreenShareOnConnect", @@ -67,11 +72,13 @@ export const MainControls = () => { const [broadcastVideoOnConnect, setBroadcastVideoOnConnect] = useAtom(broadcastVideoOnConnectAtom); const [broadcastVideoOnDeviceStart, setBroadcastVideoOnDeviceStart] = useAtom(broadcastVideoOnDeviceStartAtom); - const [broadcastVideoOnDeviceChange, setBroadcastVideoOnDeviceChange] = useAtom(broadcastVideoOnDeviceChangeAtom); + const [broadcastVideoOnDeviceChange, setBroadcastVideoOnDeviceChange] = useAtom(videoOnDeviceChangeAtom); + const [broadcastVideoOnDeviceStop, setBroadcastVideoOnDeviceStop] = useAtom(videoOnDeviceStopAtom); const [broadcastAudioOnConnect, setBroadcastAudioOnConnect] = useAtom(broadcastAudioOnConnectAtom); const [broadcastAudioOnDeviceStart, setBroadcastAudioOnDeviceStart] = useAtom(broadcastAudioOnDeviceStartAtom); - const [broadcastAudioOnDeviceChange, setBroadcastAudioOnDeviceChange] = useAtom(broadcastAudioOnDeviceChangeAtom); + const [broadcastAudioOnDeviceChange, setBroadcastAudioOnDeviceChange] = useAtom(audioOnDeviceChangeAtom); + const [broadcastAudioOnDeviceStop, setBroadcastAudioOnDeviceStop] = useAtom(audioOnDeviceStopAtom); const [broadcastScreenShareOnConnect, setBroadcastScreenShareOnConnect] = useAtom(broadcastScreenShareOnConnectAtom); const [broadcastScreenShareOnDeviceStart, setBroadcastScreenShareOnDeviceStart] = useAtom( @@ -85,7 +92,8 @@ export const MainControls = () => { trackConstraints: VIDEO_TRACK_CONSTRAINTS, broadcastOnConnect: broadcastVideoOnConnect, broadcastOnDeviceStart: broadcastVideoOnDeviceStart, - broadcastOnDeviceChange: broadcastVideoOnDeviceChange, + onDeviceChange: broadcastVideoOnDeviceChange, + onDeviceStop: broadcastVideoOnDeviceStop, defaultTrackMetadata: DEFAULT_VIDEO_TRACK_METADATA, defaultSimulcastConfig: { enabled: true, @@ -97,7 +105,8 @@ export const MainControls = () => { trackConstraints: AUDIO_TRACK_CONSTRAINTS, broadcastOnConnect: broadcastAudioOnConnect, broadcastOnDeviceStart: broadcastAudioOnDeviceStart, - broadcastOnDeviceChange: broadcastAudioOnDeviceChange, + onDeviceChange: broadcastAudioOnDeviceChange, + onDeviceStop: broadcastAudioOnDeviceStop, defaultTrackMetadata: DEFAULT_AUDIO_TRACK_METADATA, }, screenShare: { @@ -221,15 +230,28 @@ export const MainControls = () => { name='Broadcast video on device change (default "replace")' value={broadcastVideoOnDeviceChange} set={(value) => { - if (isRestartChange(value)) setBroadcastVideoOnDeviceChange(value); + if (isDeviceChangeValue(value)) setBroadcastVideoOnDeviceChange(value); }} radioClass="radio-primary" options={[ { value: undefined, key: "undefined" }, - { value: "stop", key: "stop" }, + { value: "remove", key: "remove" }, { value: "replace", key: "replace" }, ]} /> + { + if (isDeviceStopValue(value)) setBroadcastVideoOnDeviceStop(value); + }} + radioClass="radio-primary" + options={[ + { value: undefined, key: "undefined" }, + { value: "remove", key: "remove" }, + { value: "mute", key: "mute" }, + ]} + /> { name='Broadcast audio on device change (default "replace")' value={broadcastAudioOnDeviceChange} set={(value) => { - if (isRestartChange(value)) setBroadcastAudioOnDeviceChange(value); + if (isDeviceChangeValue(value)) setBroadcastAudioOnDeviceChange(value); }} radioClass="radio-secondary" options={[ { value: undefined, key: "undefined" }, - { value: "stop", key: "stop" }, + { value: "remove", key: "remove" }, { value: "replace", key: "replace" }, ]} /> + { + if (isDeviceStopValue(value)) setBroadcastAudioOnDeviceStop(value); + }} + radioClass="radio-secondary" + options={[ + { value: undefined, key: "undefined" }, + { value: "remove", key: "remove" }, + { value: "mute", key: "mute" }, + ]} + /> { video.start(id); }} defaultOptionText="Select video device" + stop={() => { + video.stop(); + }} /> { audio.start(id); }} defaultOptionText="Select audio device" + stop={() => { + audio.stop(); + }} />
@@ -318,12 +359,15 @@ export const MainControls = () => {

Streaming:

- {local.map(({ trackId, stream, track }) => ( -
- {track?.kind === "video" && } - {track?.kind === "audio" && } -
- ))} +
+ {local.map(({ trackId, stream, track }) => ( +
+ trackId: {trackId} + {track?.kind === "audio" && } + {track?.kind === "video" && } +
+ ))} +
diff --git a/package-lock.json b/package-lock.json index 445d8db..97198be 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "0.4.0", "license": "Apache-2.0", "dependencies": { - "@fishjam-dev/ts-client": "^0.5.0", + "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk", "events": "3.3.0", "lodash.isequal": "4.5.0" }, @@ -36,13 +36,13 @@ } }, "../ts-client-sdk": { - "name": "@jellyfish-dev/ts-client-sdk", - "version": "0.4.0", + "name": "@fishjam-dev/ts-client", + "version": "0.5.0", "extraneous": true, "license": "Apache-2.0", "dependencies": { "events": "^3.3.0", - "ts-proto": "^1.165.0", + "protobufjs": "^7.3.0", "typed-emitter": "^2.1.0", "uuid": "^9.0.1" }, @@ -58,15 +58,18 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-react-hooks": "^4.6.0", "fake-mediastreamtrack": "^1.2.0", + "husky": "^9.0.11", + "lint-staged": "^15.2.5", "prettier": "^3.1.0", "prettier-plugin-tailwindcss": "^0.5.7", "react": "^18.2.0", "testcontainers": "^10.3.2", + "ts-proto": "^1.176.0", "typed-emitter": "^2.1.0", - "typedoc": "^0.25.12", + "typedoc": "^0.25.13", "typedoc-plugin-external-resolver": "^1.0.3", "typedoc-plugin-mdn-links": "^3.1.6", - "typescript": "^4.9.5", + "typescript": "^5.4.5", "vitest": "^1.6.0", "zod": "^3.23.6" } @@ -144,11 +147,10 @@ }, "node_modules/@fishjam-dev/ts-client": { "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@fishjam-dev/ts-client/-/ts-client-0.5.0.tgz", - "integrity": "sha512-Hbcm0hcjovj8zEu9R6A5uFAVpZNLrO/ycV6W+wQyzIkJn6/sUgunqdqyRHYgsnNz2WMM1J4SI0P5tonMAvTViQ==", + "resolved": "git+ssh://git@github.com/fishjam-dev/ts-client-sdk.git#d254b040a184b9631c64e36bb80a453e0757005c", "dependencies": { "events": "^3.3.0", - "ts-proto": "^1.165.0", + "protobufjs": "^7.3.0", "typed-emitter": "^2.1.0", "uuid": "^9.0.1" } @@ -1185,17 +1187,6 @@ "node": ">=6" } }, - "node_modules/case-anything": { - "version": "2.1.13", - "resolved": "https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz", - "integrity": "sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==", - "engines": { - "node": ">=12.13" - }, - "funding": { - "url": "https://github.com/sponsors/mesqueeb" - } - }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -1346,17 +1337,6 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, - "node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -1434,14 +1414,6 @@ "node": ">=6.0.0" } }, - "node_modules/dprint-node": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/dprint-node/-/dprint-node-1.0.8.tgz", - "integrity": "sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==", - "dependencies": { - "detect-libc": "^1.0.3" - } - }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -2584,9 +2556,9 @@ } }, "node_modules/protobufjs": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.0.tgz", - "integrity": "sha512-YWD03n3shzV9ImZRX3ccbjqLxj7NokGN0V/ESiBV5xWqrommYHYiihuIyavq03pWSGqlyvYUFmfoMKd+1rPA/g==", + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.2.tgz", + "integrity": "sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg==", "hasInstallScript": true, "dependencies": { "@protobufjs/aspromise": "^1.1.2", @@ -3081,37 +3053,6 @@ "typescript": ">=4.2.0" } }, - "node_modules/ts-poet": { - "version": "6.9.0", - "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-6.9.0.tgz", - "integrity": "sha512-roe6W6MeZmCjRmppyfOURklO5tQFQ6Sg7swURKkwYJvV7dbGCrK28um5+51iW3twdPRKtwarqFAVMU6G1mvnuQ==", - "dependencies": { - "dprint-node": "^1.0.8" - } - }, - "node_modules/ts-proto": { - "version": "1.176.0", - "resolved": "https://registry.npmjs.org/ts-proto/-/ts-proto-1.176.0.tgz", - "integrity": "sha512-OWVrVUpNfZVfvKJZlSjRmCPkcNcox6IPRw3RIeyK2Z4d0Uoy1/gkumMwCVOAMznZcv80D1r9GyYHgYVdp6Cj+g==", - "dependencies": { - "case-anything": "^2.1.13", - "protobufjs": "^7.2.4", - "ts-poet": "^6.7.0", - "ts-proto-descriptors": "1.16.0" - }, - "bin": { - "protoc-gen-ts_proto": "protoc-gen-ts_proto" - } - }, - "node_modules/ts-proto-descriptors": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-1.16.0.tgz", - "integrity": "sha512-3yKuzMLpltdpcyQji1PJZRfoo4OJjNieKTYkQY8pF7xGKsYz/RHe3aEe4KiRxcinoBmnEhmuI+yJTxLb922ULA==", - "dependencies": { - "long": "^5.2.3", - "protobufjs": "^7.2.4" - } - }, "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", diff --git a/package.json b/package.json index e7cceab..7d51028 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "typescript": "5.4.2" }, "dependencies": { - "@fishjam-dev/ts-client": "^0.5.0", + "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk", "events": "3.3.0", "lodash.isequal": "4.5.0" }, diff --git a/readme.md b/readme.md index 3d0f0a0..f4c487f 100644 --- a/readme.md +++ b/readme.md @@ -112,7 +112,7 @@ export const App = () => { // Get screen sharing MediaStream navigator.mediaDevices.getDisplayMedia(SCREEN_SHARING_MEDIA_CONSTRAINTS).then((screenStream) => { // Add local MediaStream to webrtc - screenStream.getTracks().forEach((track) => api.addTrack(track, screenStream, { type: "screen" })); + screenStream.getTracks().forEach((track) => api.addTrack(track, { type: "screen" })); }); }} > diff --git a/src/Client.ts b/src/Client.ts index 2d8c2e9..e86e2fc 100644 --- a/src/Client.ts +++ b/src/Client.ts @@ -19,13 +19,7 @@ import type { DeviceManagerEvents } from "./DeviceManager"; import { DeviceManager } from "./DeviceManager"; import type { MediaDeviceType, ScreenShareManagerConfig } from "./ScreenShareManager"; import { ScreenShareManager } from "./ScreenShareManager"; -import type { - DeviceManagerConfig, - DeviceState, - InitMediaConfig, - UseCameraAndMicrophoneResult, - UseUserMediaState, -} from "./types"; +import type { DeviceManagerConfig, DeviceManagerInitConfig, Devices, DeviceState, MediaState } from "./types"; export type ClientApi = { local: PeerState | null; @@ -38,8 +32,8 @@ export type ClientApi = { bandwidthEstimation: bigint; status: PeerStatus; - media: UseUserMediaState | null; - devices: UseCameraAndMicrophoneResult; + media: MediaState | null; + devices: Devices; deviceManager: DeviceManager; screenShareManager: ScreenShareManager; }; @@ -249,6 +243,14 @@ export interface ClientEvents { event: Parameters["localTrackReplaced"]>[0], client: ClientApi, ) => void; + localTrackMuted: ( + event: Parameters["localTrackMuted"]>[0], + client: ClientApi, + ) => void; + localTrackUnmuted: ( + event: Parameters["localTrackUnmuted"]>[0], + client: ClientApi, + ) => void; localTrackBandwidthSet: ( event: Parameters["localTrackBandwidthSet"]>[0], client: ClientApi, @@ -304,8 +306,8 @@ export class Client extends (EventEmitter as { public bandwidthEstimation: bigint = BigInt(0); public status: PeerStatus = null; - public media: UseUserMediaState | null = null; - public devices: UseCameraAndMicrophoneResult; + public media: MediaState | null = null; + public devices: Devices; private currentMicrophoneTrackId: string | null = null; private currentCameraTrackId: string | null = null; @@ -332,6 +334,9 @@ export class Client extends (EventEmitter as { ) => Promise.reject(), removeTrack: () => Promise.reject(), replaceTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + muteTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + unmuteTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + updateTrackMetadata: NOOP, broadcast: null, status: null, stream: null, @@ -349,6 +354,9 @@ export class Client extends (EventEmitter as { addTrack: (_trackMetadata?: TrackMetadata, _maxBandwidth?: TrackBandwidthLimit) => Promise.reject(), removeTrack: () => Promise.reject(), replaceTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + muteTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + unmuteTrack: (_newTrackMetadata?: TrackMetadata) => Promise.reject(), + updateTrackMetadata: NOOP, broadcast: null, status: null, stream: null, @@ -620,6 +628,18 @@ export class Client extends (EventEmitter as { this.emit("localTrackReplaced", event, this); }); + this.tsClient?.on("localTrackMuted", (event) => { + this.stateToSnapshot(); + + this.emit("localTrackMuted", event, this); + }); + + this.tsClient?.on("localTrackUnmuted", (event) => { + this.stateToSnapshot(); + + this.emit("localTrackUnmuted", event, this); + }); + this.tsClient?.on("localTrackBandwidthSet", (event) => { this.stateToSnapshot(); @@ -697,21 +717,24 @@ export class Client extends (EventEmitter as { public addTrack( track: MediaStreamTrack, - stream: MediaStream, trackMetadata?: TrackMetadata, simulcastConfig: SimulcastConfig = { enabled: false, activeEncodings: [], disabledEncodings: [] }, maxBandwidth: TrackBandwidthLimit = 0, // unlimited bandwidth ): Promise { if (!this.tsClient) throw Error("Client not initialized"); - return this.tsClient.addTrack(track, stream, trackMetadata, simulcastConfig, maxBandwidth); + return this.tsClient.addTrack(track, trackMetadata, simulcastConfig, maxBandwidth); } public removeTrack(trackId: string): Promise { return this.tsClient.removeTrack(trackId); } - public replaceTrack(trackId: string, newTrack: MediaStreamTrack, newTrackMetadata?: TrackMetadata): Promise { + public replaceTrack( + trackId: string, + newTrack: MediaStreamTrack | null, + newTrackMetadata?: TrackMetadata, + ): Promise { return this.tsClient.replaceTrack(trackId, newTrack, newTrackMetadata); } @@ -751,6 +774,27 @@ export class Client extends (EventEmitter as { this.tsClient.updateTrackMetadata(trackId, trackMetadata); }; + // In most cases, the track is identified by its remote track ID. + // This ID comes from the ts-client `addTrack` method. + // However, we don't have that ID before the `addTrack` method returns it. + // + // The `addTrack` method emits the `localTrackAdded` event. + // This event will refresh the internal state of this object. + // However, in that event handler, we don't yet have the remote track ID. + // Therefore, for that brief moment, we will use the local track ID from the MediaStreamTrack object to identify the track. + private getRemoteTrack = (remoteOrLocalTrackId: string | null): Track | null => { + if (!remoteOrLocalTrackId) return null; + + const tracks = this.tsClient?.getLocalEndpoint()?.tracks; + if (!tracks) return null; + + const trackByRemoteId = tracks?.get(remoteOrLocalTrackId); + if (trackByRemoteId) return this.trackContextToTrack(trackByRemoteId); + + const trackByLocalId = [...tracks.values()].find((track) => track.track?.id === remoteOrLocalTrackId); + return trackByLocalId ? this.trackContextToTrack(trackByLocalId) : null; + }; + private stateToSnapshot() { if (!this.deviceManager) Error("Device manager is null"); @@ -767,20 +811,12 @@ export class Client extends (EventEmitter as { localTracks[track.trackId] = this.trackContextToTrack(track); }); - const broadcastedVideoTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentCameraTrackId, - ); - - const broadcastedAudioTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentMicrophoneTrackId, - ); + const broadcastedVideoTrack = this.getRemoteTrack(this.currentCameraTrackId); + const broadcastedAudioTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); + const screenShareVideoTrack = this.getRemoteTrack(this.currentScreenShareTrackId); - const screenShareVideoTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentScreenShareTrackId, - ); - - const devices: UseCameraAndMicrophoneResult = { - init: (config?: InitMediaConfig) => { + const devices: Devices = { + init: (config?: DeviceManagerInitConfig) => { this?.deviceManager?.init(config); }, start: (config) => this?.deviceManager?.start(config), @@ -792,29 +828,30 @@ export class Client extends (EventEmitter as { start: (deviceId?: string) => { this?.deviceManager?.start({ videoDeviceId: deviceId ?? true }); }, - addTrack: ( + addTrack: async ( trackMetadata?: TrackMetadata, simulcastConfig?: SimulcastConfig, maxBandwidth?: TrackBandwidthLimit, ) => { + if (this.currentCameraTrackId) throw Error("Track already added"); + const media = this.deviceManager?.video.media; if (!media || !media.stream || !media.track) throw Error("Device is unavailable"); - const { track } = media; - - const prevTrack = Object.values(localTracks).find((track) => track.track?.id === this.currentCameraTrackId); - if (prevTrack) throw Error("Track already added"); + // see `getRemoteTrack()` explanation + this.currentCameraTrackId = media.track.id; - this.currentCameraTrackId = track?.id; + const remoteTrackId = await this.tsClient.addTrack(media.track, trackMetadata, simulcastConfig, maxBandwidth); - const stream = new MediaStream(); - stream.addTrack(track); + this.currentCameraTrackId = remoteTrackId; - return this.tsClient.addTrack(track, stream, trackMetadata, simulcastConfig, maxBandwidth); + return remoteTrackId; }, removeTrack: () => { - const prevTrack = Object.values(localTracks).find((track) => track.track?.id === this.currentCameraTrackId); + if (!this.currentCameraTrackId) throw Error("There is no video track id"); + + const prevTrack = this.getRemoteTrack(this.currentCameraTrackId); if (!prevTrack) throw Error("There is no video track"); @@ -823,7 +860,9 @@ export class Client extends (EventEmitter as { return this.tsClient.removeTrack(prevTrack.trackId); }, replaceTrack: async (newTrackMetadata?: TrackMetadata) => { - const prevTrack = Object.values(localTracks).find((track) => track.track?.id === this.currentCameraTrackId); + if (!this.currentCameraTrackId) throw Error("There is no track id"); + + const prevTrack = this.getRemoteTrack(this.currentCameraTrackId); if (!prevTrack) throw Error("There is no video track"); @@ -831,18 +870,38 @@ export class Client extends (EventEmitter as { if (!track) throw Error("New track is empty"); - this.currentCameraTrackId = track.id; + await this.tsClient.replaceTrack(prevTrack.trackId, track, newTrackMetadata); + }, + muteTrack: async (newTrackMetadata?: TrackMetadata) => { + if (!this.currentCameraTrackId) throw Error("There is no video track id"); - // todo This is a temporary solution to address an issue with ts-client - // Currently, ts-client does not update the track in the stream during the execution of the replaceTrack method - if (!this.devices.camera.broadcast?.stream) throw Error("New stream is empty"); + const prevTrack = this.getRemoteTrack(this.currentCameraTrackId); - this.devices.camera.broadcast?.stream?.removeTrack( - this.devices.camera.broadcast?.stream?.getVideoTracks()[0], - ); - this.devices.camera.broadcast?.stream.addTrack(track); + if (!prevTrack) throw Error("There is no video track"); - await this.tsClient.replaceTrack(prevTrack.trackId, track, newTrackMetadata); + await this.tsClient.replaceTrack(prevTrack.trackId, null, newTrackMetadata); + }, + unmuteTrack: async (newTrackMetadata?: TrackMetadata) => { + if (!this.currentCameraTrackId) throw Error("There is no video track id"); + + const prevTrack = this.getRemoteTrack(this.currentCameraTrackId); + + if (!prevTrack) throw Error("There is no video track"); + + const media = this.deviceManager?.video.media; + + if (!media || !media.stream || !media.track) throw Error("Device is unavailable"); + + await this.tsClient.replaceTrack(prevTrack.trackId, media.track, newTrackMetadata); + }, + updateTrackMetadata: (newTrackMetadata: TrackMetadata) => { + if (!this.currentCameraTrackId) throw Error("There is no video track id"); + + const prevTrack = this.getRemoteTrack(this.currentCameraTrackId); + + if (!prevTrack) throw Error("There is no video track"); + + this.tsClient.updateTrackMetadata(this.currentCameraTrackId, newTrackMetadata); }, broadcast: broadcastedVideoTrack ?? null, status: deviceManagerSnapshot?.video?.devicesStatus || null, @@ -860,29 +919,26 @@ export class Client extends (EventEmitter as { start: (deviceId?: string) => { this?.deviceManager?.start({ audioDeviceId: deviceId ?? true }); }, - addTrack: (trackMetadata?: TrackMetadata, maxBandwidth?: TrackBandwidthLimit) => { + addTrack: async (trackMetadata?: TrackMetadata, maxBandwidth?: TrackBandwidthLimit) => { const media = this.deviceManager?.audio.media; if (!media || !media.stream || !media.track) throw Error("Device is unavailable"); - const { track } = media; - const prevTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentMicrophoneTrackId, - ); + if (this.currentMicrophoneTrackId) throw Error("Track already added"); - if (prevTrack) throw Error("Track already added"); + // see `getRemoteTrack()` explanation + this.currentMicrophoneTrackId = media.track.id; - this.currentMicrophoneTrackId = track.id; + const remoteTrackId = await this.tsClient.addTrack(media.track, trackMetadata, undefined, maxBandwidth); - const stream = new MediaStream(); - stream.addTrack(track); + this.currentMicrophoneTrackId = remoteTrackId; - return this.tsClient.addTrack(track, stream, trackMetadata, undefined, maxBandwidth); + return remoteTrackId; }, removeTrack: () => { - const prevTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentMicrophoneTrackId, - ); + if (!this.currentMicrophoneTrackId) throw Error("There is no audio track id"); + + const prevTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); if (!prevTrack) throw Error("There is no audio track"); @@ -891,9 +947,9 @@ export class Client extends (EventEmitter as { return this.tsClient.removeTrack(prevTrack.trackId); }, replaceTrack: async (newTrackMetadata?: TrackMetadata) => { - const prevTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentMicrophoneTrackId, - ); + if (!this.currentMicrophoneTrackId) throw Error("There is no audio track id"); + + const prevTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); if (!prevTrack) throw Error("There is no audio track"); @@ -901,19 +957,38 @@ export class Client extends (EventEmitter as { if (!track) throw Error("New track is empty"); - this.currentMicrophoneTrackId = track.id; + await this.tsClient.replaceTrack(prevTrack.trackId, track, newTrackMetadata); + }, + muteTrack: async (newTrackMetadata?: TrackMetadata) => { + if (!this.currentMicrophoneTrackId) throw Error("There is no audio track id"); + + const prevTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); + + if (!prevTrack) throw Error("There is no audio track"); - // todo This is a temporary solution to address an issue with ts-client - // Currently, ts-client does not update the track in the stream during the execution of the replaceTrack method - if (!this.devices.microphone.broadcast?.stream) throw Error("New stream is empty"); + await this.tsClient.replaceTrack(prevTrack.trackId, null, newTrackMetadata); + }, + unmuteTrack: async (newTrackMetadata?: TrackMetadata) => { + if (!this.currentMicrophoneTrackId) throw Error("There is no audio track id"); - this.devices.microphone.broadcast?.stream?.removeTrack( - this.devices.microphone.broadcast?.stream?.getAudioTracks()[0], - ); + const prevTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); - this.devices.microphone.broadcast?.stream.addTrack(track); + if (!prevTrack) throw Error("There is no audio track"); - await this.tsClient.replaceTrack(prevTrack.trackId, track, newTrackMetadata); + const media = this.deviceManager?.audio.media; + + if (!media || !media.stream || !media.track) throw Error("Device is unavailable"); + + await this.tsClient.replaceTrack(prevTrack.trackId, media.track, newTrackMetadata); + }, + updateTrackMetadata: (newTrackMetadata: TrackMetadata) => { + if (!this.currentMicrophoneTrackId) throw Error("There is no audio track id"); + + const prevTrack = this.getRemoteTrack(this.currentMicrophoneTrackId); + + if (!prevTrack) throw Error("There is no audio track"); + + this.tsClient.updateTrackMetadata(this.currentMicrophoneTrackId, newTrackMetadata); }, broadcast: broadcastedAudioTrack ?? null, status: deviceManagerSnapshot?.audio?.devicesStatus || null, @@ -933,29 +1008,28 @@ export class Client extends (EventEmitter as { start: (config?: ScreenShareManagerConfig) => { this.screenShareManager?.start(config); }, - addTrack: (trackMetadata?: TrackMetadata, maxBandwidth?: TrackBandwidthLimit) => { + addTrack: async (trackMetadata?: TrackMetadata, maxBandwidth?: TrackBandwidthLimit) => { const media = this.screenShareManager?.getSnapshot().videoMedia; if (!media || !media.stream || !media.track) throw Error("Device is unavailable"); - const { stream, track } = media; + if (this.currentScreenShareTrackId) throw Error("Screen share track already added"); - const prevTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentScreenShareTrackId, - ); + // see `getRemoteTrack()` explanation + this.currentScreenShareTrackId = media.track.id; - if (prevTrack) throw Error("Track already added"); + const trackId = await this.tsClient.addTrack(media.track, trackMetadata, undefined, maxBandwidth); - this.currentScreenShareTrackId = track?.id; + this.currentScreenShareTrackId = trackId; - return this.tsClient.addTrack(track, stream, trackMetadata, undefined, maxBandwidth); + return trackId; }, removeTrack: () => { - const prevTrack = Object.values(localTracks).find( - (track) => track.track?.id === this.currentScreenShareTrackId, - ); + if (!this.currentScreenShareTrackId) throw Error("There is no screen share track id"); - if (!prevTrack) throw Error("There is no video track"); + const prevTrack = this.getRemoteTrack(this.currentScreenShareTrackId); + + if (!prevTrack) throw Error("There is no screen share video track"); this.currentScreenShareTrackId = null; diff --git a/src/DeviceManager.ts b/src/DeviceManager.ts index eaa63c1..478c5b4 100644 --- a/src/DeviceManager.ts +++ b/src/DeviceManager.ts @@ -7,10 +7,10 @@ import type { DeviceState, Errors, GetMedia, - InitMediaConfig, + DeviceManagerInitConfig, Media, StorageConfig, - UseUserMediaStartConfig, + DeviceManagerStartConfig, } from "./types"; import { NOT_FOUND_ERROR, OVERCONSTRAINED_ERROR, parseError, PERMISSION_DENIED, UNHANDLED_ERROR } from "./types"; @@ -336,7 +336,7 @@ export class DeviceManager extends (EventEmitter as new () => TypedEmitter { + public async init(config?: DeviceManagerInitConfig): Promise<"initialized" | "error"> { if (this.status !== "uninitialized") { return Promise.reject("Device manager already initialized"); } @@ -508,7 +508,7 @@ export class DeviceManager extends (EventEmitter as new () => TypedEmitter = { useSelector: (selector: Selector) => Result; useTracks: () => Record>; useSetupMedia: (config: UseSetupMediaConfig) => UseSetupMediaResult; - useCamera: () => UseCameraAndMicrophoneResult["camera"]; - useMicrophone: () => UseCameraAndMicrophoneResult["microphone"]; - useScreenShare: () => UseScreenShareResult; + useCamera: () => Devices["camera"]; + useMicrophone: () => Devices["microphone"]; + useScreenShare: () => ScreenShareAPI; useClient: () => Client; }; @@ -115,6 +115,8 @@ export const create = ( client.on("localTrackAdded", callback); client.on("localTrackRemoved", callback); client.on("localTrackReplaced", callback); + client.on("localTrackMuted", callback); + client.on("localTrackUnmuted", callback); client.on("localTrackBandwidthSet", callback); client.on("localTrackEncodingBandwidthSet", callback); client.on("localTrackEncodingEnabled", callback); @@ -164,6 +166,9 @@ export const create = ( client.removeListener("localTrackAdded", callback); client.removeListener("localTrackRemoved", callback); client.removeListener("localTrackReplaced", callback); + client.removeListener("localTrackMuted", callback); + client.removeListener("localTrackUnmuted", callback); + client.removeListener("localTrackBandwidthSet", callback); client.removeListener("localTrackEncodingBandwidthSet", callback); client.removeListener("localTrackEncodingEnabled", callback); @@ -241,13 +246,13 @@ export const create = ( const useTracks = () => useSelector((s) => s.tracks); const useClient = () => useSelector((s) => s.client); - const useCamera = (): UseCameraResult => { + const useCamera = (): CameraAPI => { const { state } = useFishjamContext(); return state.devices.camera; }; - const useMicrophone = (): UseMicrophoneResult => { + const useMicrophone = (): MicrophoneAPI => { const { state } = useFishjamContext(); return state.devices.microphone; @@ -286,7 +291,7 @@ export const create = ( event: { mediaDeviceType: MediaDeviceType }, client: ClientApi, ) => { - const broadcastOnDeviceChange = configRef.current.camera.broadcastOnDeviceChange ?? "replace"; + const broadcastOnDeviceChange = configRef.current.camera.onDeviceChange ?? "replace"; if (client.status === "joined" && event.mediaDeviceType === "userMedia" && !pending) { if (!client.devices.camera.broadcast?.stream && configRef.current.camera.broadcastOnDeviceStart) { @@ -307,7 +312,7 @@ export const create = ( await client.devices.camera.replaceTrack().finally(() => { pending = false; }); - } else if (client.devices.camera.broadcast?.stream && broadcastOnDeviceChange === "stop") { + } else if (client.devices.camera.broadcast?.stream && broadcastOnDeviceChange === "remove") { pending = true; await client.devices.camera.removeTrack().finally(() => { @@ -360,7 +365,13 @@ export const create = ( event.trackType === "video" && client.devices.camera.broadcast?.stream ) { - await client.devices.camera.removeTrack(); + const onDeviceStop = configRef.current.camera.onDeviceStop ?? "mute"; + + if (onDeviceStop === "mute") { + await client.devices.camera.muteTrack(); + } else { + await client.devices.camera.removeTrack(); + } } }; @@ -396,7 +407,7 @@ export const create = ( event: { mediaDeviceType: MediaDeviceType }, client: ClientApi, ) => { - const broadcastOnDeviceChange = configRef.current.microphone.broadcastOnDeviceChange ?? "replace"; + const broadcastOnDeviceChange = configRef.current.microphone.onDeviceChange ?? "replace"; if (client.status === "joined" && event.mediaDeviceType === "userMedia" && !pending) { if (!client.devices.microphone.broadcast?.stream && configRef.current.microphone.broadcastOnDeviceStart) { @@ -416,7 +427,7 @@ export const create = ( await client.devices.microphone.replaceTrack().finally(() => { pending = false; }); - } else if (client.devices.microphone.broadcast?.stream && broadcastOnDeviceChange === "stop") { + } else if (client.devices.microphone.broadcast?.stream && broadcastOnDeviceChange === "remove") { pending = true; await client.devices.microphone.removeTrack().finally(() => { @@ -459,24 +470,27 @@ export const create = ( }, [state.client]); useEffect(() => { - const removeOnMicrophoneStopped: ClientEvents["deviceStopped"] = async ( - event, - client, - ) => { + const onMicrophoneStopped: ClientEvents["deviceStopped"] = async (event, client) => { if ( client.status === "joined" && event.mediaDeviceType === "userMedia" && event.trackType === "audio" && client.devices.microphone.broadcast?.stream ) { - await client.devices.microphone.removeTrack(); + const onDeviceStop = configRef.current.microphone.onDeviceStop ?? "mute"; + + if (onDeviceStop === "mute") { + await client.devices.microphone.muteTrack(); + } else { + await client.devices.microphone.removeTrack(); + } } }; - state.client.on("deviceStopped", removeOnMicrophoneStopped); + state.client.on("deviceStopped", onMicrophoneStopped); return () => { - state.client.removeListener("deviceStopped", removeOnMicrophoneStopped); + state.client.removeListener("deviceStopped", onMicrophoneStopped); }; }, [state.client]); @@ -532,10 +546,7 @@ export const create = ( }, [state.client]); useEffect(() => { - const removeOnScreenShareStopped: ClientEvents["deviceStopped"] = async ( - event, - client, - ) => { + const onScreenShareStop: ClientEvents["deviceStopped"] = async (event, client) => { if ( client.status === "joined" && event.mediaDeviceType === "displayMedia" && @@ -545,10 +556,10 @@ export const create = ( } }; - state.client.on("deviceStopped", removeOnScreenShareStopped); + state.client.on("deviceStopped", onScreenShareStop); return () => { - state.client.removeListener("deviceStopped", removeOnScreenShareStopped); + state.client.removeListener("deviceStopped", onScreenShareStop); }; }, [state.client]); @@ -581,7 +592,7 @@ export const create = ( ); }; - const useScreenShare = (): UseScreenShareResult => { + const useScreenShare = (): ScreenShareAPI => { const { state } = useFishjamContext(); return state.devices.screenShare; }; diff --git a/src/index.ts b/src/index.ts index fbac0de..bef4957 100644 --- a/src/index.ts +++ b/src/index.ts @@ -20,10 +20,10 @@ export type { export type { DeviceManagerConfig, StorageConfig, - UseCameraAndMicrophoneResult, - UseCameraResult, - UseScreenShareResult, - UseMicrophoneResult, + Devices, + CameraAPI, + ScreenShareAPI, + MicrophoneAPI, UseSetupMediaResult, UseSetupMediaConfig, } from "./types"; diff --git a/src/state.types.ts b/src/state.types.ts index 29077a7..d02b357 100644 --- a/src/state.types.ts +++ b/src/state.types.ts @@ -1,6 +1,6 @@ import type { TrackEncoding, VadStatus, SimulcastConfig } from "@fishjam-dev/ts-client"; -import type { UseUserMediaState } from "./types"; -import type { UseCameraAndMicrophoneResult } from "./types"; +import type { MediaState } from "./types"; +import type { Devices } from "./types"; import type { Client } from "./Client"; import type { DeviceManager } from "./DeviceManager"; import type { ScreenShareManager } from "./ScreenShareManager"; @@ -48,8 +48,8 @@ export type State = { tracks: Record>; bandwidthEstimation: bigint; status: PeerStatus; - media: UseUserMediaState | null; - devices: UseCameraAndMicrophoneResult; + media: MediaState | null; + devices: Devices; client: Client; deviceManager: DeviceManager; screenShareManager: ScreenShareManager; diff --git a/src/types.ts b/src/types.ts index 4b65e13..9e7b5ad 100644 --- a/src/types.ts +++ b/src/types.ts @@ -22,12 +22,12 @@ export type DeviceState = { error: DeviceError | null; }; -export type UseUserMediaState = { +export type MediaState = { video: DeviceState; audio: DeviceState; }; -export type InitMediaConfig = { +export type DeviceManagerInitConfig = { videoTrackConstraints?: boolean | MediaTrackConstraints; audioTrackConstraints?: boolean | MediaTrackConstraints; }; @@ -46,7 +46,7 @@ export type StorageConfig = { saveLastVideoDevice: (info: MediaDeviceInfo) => void; }; -export type UseUserMediaStartConfig = { +export type DeviceManagerStartConfig = { audioDeviceId?: string | boolean; videoDeviceId?: string | boolean; }; @@ -82,7 +82,13 @@ export type UseSetupMediaConfig = { * Determines whether track should be replaced when the user requests a device. * default: replace */ - broadcastOnDeviceChange?: "replace" | "stop"; + onDeviceChange?: "replace" | "remove"; + /** + * Determines whether currently broadcasted track should be removed or muted + * when the user stopped a device. + * default: replace + */ + onDeviceStop?: "remove" | "mute"; trackConstraints: boolean | MediaTrackConstraints; defaultTrackMetadata?: TrackMetadata; @@ -99,10 +105,18 @@ export type UseSetupMediaConfig = { */ broadcastOnDeviceStart?: boolean; /** - * Determines whether track should be replaced when the user requests a device. + * Determines whether currently broadcasted track should be replaced or stopped + * when the user changed a device. + * default: replace + */ + onDeviceChange?: "replace" | "remove"; + + /** + * Determines whether currently broadcasted track should be removed or muted + * when the user stopped a device. * default: replace */ - broadcastOnDeviceChange?: "replace" | "stop"; + onDeviceStop?: "remove" | "mute"; trackConstraints: boolean | MediaTrackConstraints; defaultTrackMetadata?: TrackMetadata; @@ -131,7 +145,7 @@ export type UseSetupMediaResult = { init: () => void; }; -export type UseCameraResult = { +export type CameraAPI = { stop: () => void; setEnable: (value: boolean) => void; start: (deviceId?: string) => void; @@ -142,6 +156,9 @@ export type UseCameraResult = { ) => Promise; removeTrack: () => Promise; replaceTrack: (newTrackMetadata?: TrackMetadata) => Promise; + muteTrack: (newTrackMetadata?: TrackMetadata) => Promise; + unmuteTrack: (newTrackMetadata?: TrackMetadata) => Promise; + updateTrackMetadata: (newTrackMetadata: TrackMetadata) => void; broadcast: Track | null; status: DevicesStatus | null; // todo how to remove null stream: MediaStream | null; @@ -153,13 +170,16 @@ export type UseCameraResult = { devices: MediaDeviceInfo[] | null; }; -export type UseMicrophoneResult = { +export type MicrophoneAPI = { stop: () => void; setEnable: (value: boolean) => void; start: (deviceId?: string) => void; addTrack: (trackMetadata?: TrackMetadata, maxBandwidth?: TrackBandwidthLimit) => Promise; removeTrack: () => Promise; replaceTrack: (newTrackMetadata?: TrackMetadata) => Promise; + muteTrack: (newTrackMetadata?: TrackMetadata) => Promise; + unmuteTrack: (newTrackMetadata?: TrackMetadata) => Promise; + updateTrackMetadata: (newTrackMetadata: TrackMetadata) => void; broadcast: Track | null; status: DevicesStatus | null; stream: MediaStream | null; @@ -171,7 +191,7 @@ export type UseMicrophoneResult = { devices: MediaDeviceInfo[] | null; }; -export type UseScreenShareResult = { +export type ScreenShareAPI = { stop: () => void; setEnable: (value: boolean) => void; start: (config?: ScreenShareManagerConfig) => void; @@ -187,12 +207,12 @@ export type UseScreenShareResult = { error: DeviceError | null; }; -export type UseCameraAndMicrophoneResult = { - camera: UseCameraResult; - microphone: UseMicrophoneResult; - screenShare: UseScreenShareResult; +export type Devices = { + camera: CameraAPI; + microphone: MicrophoneAPI; + screenShare: ScreenShareAPI; init: (config?: DeviceManagerConfig) => void; - start: (config: UseUserMediaStartConfig) => void; + start: (config: DeviceManagerStartConfig) => void; }; export const PERMISSION_DENIED: DeviceError = { name: "NotAllowedError" };