diff --git a/examples/use-camera-and-microphone-example/package-lock.json b/examples/use-camera-and-microphone-example/package-lock.json
index 0dc39e1..14590b2 100644
--- a/examples/use-camera-and-microphone-example/package-lock.json
+++ b/examples/use-camera-and-microphone-example/package-lock.json
@@ -33,7 +33,7 @@
"version": "0.4.0",
"license": "Apache-2.0",
"dependencies": {
- "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk",
+ "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk#reconnect-on-network-change",
"events": "3.3.0",
"lodash.isequal": "4.5.0"
},
@@ -42,7 +42,7 @@
"@types/events": "^3.0.3",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.11.27",
- "@types/react": "18.2.65",
+ "@types/react": "18.3.3",
"@typescript-eslint/eslint-plugin": "^7.2.0",
"@typescript-eslint/parser": "^7.8.0",
"eslint": "^8.57.0",
@@ -50,13 +50,13 @@
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.6",
"prettier": "3.3.2",
- "prettier-plugin-tailwindcss": "0.5.12",
+ "prettier-plugin-tailwindcss": "0.6.4",
"react": "^18.2.0",
"testcontainers": "^10.7.2",
"typed-emitter": "^2.1.0",
"typedoc": "^0.25.12",
"typedoc-plugin-mdn-links": "^3.1.18",
- "typescript": "5.4.2"
+ "typescript": "5.4.5"
}
},
"node_modules/@alloc/quick-lru": {
diff --git a/examples/use-camera-and-microphone-example/src/Badge.tsx b/examples/use-camera-and-microphone-example/src/Badge.tsx
index 0c64366..9d0721a 100644
--- a/examples/use-camera-and-microphone-example/src/Badge.tsx
+++ b/examples/use-camera-and-microphone-example/src/Badge.tsx
@@ -1,26 +1,12 @@
-import type { PeerStatus } from "@fishjam-dev/react-client";
-
type Props = {
- status: PeerStatus;
-};
-
-const getBadgeColor = (status: PeerStatus) => {
- switch (status) {
- case "joined":
- return "badge-success";
- case "error":
- return "badge-error";
- case "authenticated":
- case "connected":
- return "badge-info";
- case "connecting":
- return "badge-warning";
- }
+ name: string;
+ status: string | null;
+ className: string;
};
-export const Badge = ({ status }: Props) => (
+export const Badge = ({ name, status, className }: Props) => (
- Status:
- {status}
+ {name}
+ {status}
);
diff --git a/examples/use-camera-and-microphone-example/src/MainControls.tsx b/examples/use-camera-and-microphone-example/src/MainControls.tsx
index b7036a0..d02a127 100644
--- a/examples/use-camera-and-microphone-example/src/MainControls.tsx
+++ b/examples/use-camera-and-microphone-example/src/MainControls.tsx
@@ -11,6 +11,7 @@ import {
useConnect,
useDisconnect,
useMicrophone,
+ useReconnection,
useScreenShare,
useSelector,
useSetupMedia,
@@ -26,6 +27,8 @@ import { AUDIO_TRACK_CONSTRAINTS, VIDEO_TRACK_CONSTRAINTS } from "@fishjam-dev/r
import { Badge } from "./Badge";
import { DeviceControls } from "./DeviceControls";
import { Radio } from "./Radio";
+import { useReconnectLogs } from "./utils/useReconnectLogs";
+import { getPeerStatusBadgeColor, getReconnectionStatusBadgeColor } from "./utils/BadgeUtils";
type OnDeviceChange = "remove" | "replace" | undefined;
type OnDeviceStop = "remove" | "mute" | undefined;
@@ -62,8 +65,12 @@ const autostartAtom = atomWithStorage("autostart", false, undefined, {
export const MainControls = () => {
const [token, setToken] = useAtom(tokenAtom);
+ // for debugging
+ useReconnectLogs();
+
const connect = useConnect();
const disconnect = useDisconnect();
+ const reconnection = useReconnection();
const local = useSelector((s) => Object.values(s.local?.tracks || {}));
const client = useClient();
@@ -203,7 +210,13 @@ export const MainControls = () => {
-
+
+
+
{authError && (
diff --git a/examples/use-camera-and-microphone-example/src/fishjamSetup.tsx b/examples/use-camera-and-microphone-example/src/fishjamSetup.tsx
index 7f41ef5..fb40d24 100644
--- a/examples/use-camera-and-microphone-example/src/fishjamSetup.tsx
+++ b/examples/use-camera-and-microphone-example/src/fishjamSetup.tsx
@@ -61,9 +61,15 @@ export const {
useScreenShare,
useSelector,
useClient,
+ useReconnection,
} = create
({
peerMetadataParser: (obj) => peerMetadataSchema.parse(obj),
trackMetadataParser: (obj) => trackMetadataSchema.passthrough().parse(obj),
+ reconnect: {
+ delay: 1000,
+ initialDelay: 500,
+ maxAttempts: 1000,
+ },
});
export const useAuthErrorReason = () => {
diff --git a/examples/use-camera-and-microphone-example/src/utils/BadgeUtils.ts b/examples/use-camera-and-microphone-example/src/utils/BadgeUtils.ts
new file mode 100644
index 0000000..89ff7fd
--- /dev/null
+++ b/examples/use-camera-and-microphone-example/src/utils/BadgeUtils.ts
@@ -0,0 +1,28 @@
+import type { PeerStatus } from "@fishjam-dev/react-client";
+import type { ReconnectionStatus } from "@fishjam-dev/ts-client";
+
+export const getPeerStatusBadgeColor = (status: PeerStatus): string => {
+ switch (status) {
+ case "joined":
+ return "badge-success";
+ case "error":
+ return "badge-error";
+ case "authenticated":
+ case "connected":
+ return "badge-info";
+ case "connecting":
+ return "badge-warning";
+ }
+ return "";
+};
+
+export const getReconnectionStatusBadgeColor = (status: ReconnectionStatus) => {
+ switch (status) {
+ case "idle":
+ return "badge-info";
+ case "error":
+ return "badge-error";
+ case "reconnecting":
+ return "badge-warning";
+ }
+};
diff --git a/examples/use-camera-and-microphone-example/src/utils/useReconnectLogs.tsx b/examples/use-camera-and-microphone-example/src/utils/useReconnectLogs.tsx
new file mode 100644
index 0000000..e7c7b15
--- /dev/null
+++ b/examples/use-camera-and-microphone-example/src/utils/useReconnectLogs.tsx
@@ -0,0 +1,85 @@
+import type { ClientEvents } from "@fishjam-dev/react-client";
+import type { PeerMetadata, TrackMetadata } from "../fishjamSetup";
+import { useClient } from "../fishjamSetup";
+import { useEffect } from "react";
+
+/* eslint-disable no-console */
+export const useReconnectLogs = () => {
+ const client = useClient();
+
+ useEffect(() => {
+ if (!client) return;
+
+ const onReconnectionStarted: ClientEvents["reconnectionStarted"] = () => {
+ console.log("%c" + "reconnectionStarted", "color:green");
+ };
+
+ const onReconnected: ClientEvents["reconnected"] = () => {
+ console.log("%cReconnected", "color:green");
+ };
+
+ const onReconnectionRetriesLimitReached: ClientEvents<
+ PeerMetadata,
+ TrackMetadata
+ >["reconnectionRetriesLimitReached"] = () => {
+ console.log("%cReconnectionRetriesLimitReached", "color:red");
+ };
+
+ const onSocketError: ClientEvents["socketError"] = (error: Event) => {
+ console.warn(error);
+ };
+
+ const onConnectionError: ClientEvents["connectionError"] = (error, client) => {
+ if (client.isReconnecting()) {
+ console.log("%c" + "During reconnection: connectionError %o", "color:gray", {
+ error,
+ // @ts-expect-error
+ iceConnectionState: error?.event?.target?.["iceConnectionState"],
+ });
+ } else {
+ // @ts-expect-error
+ console.warn({ error, state: error?.event?.target?.["iceConnectionState"] });
+ }
+ };
+
+ const onJoinError: ClientEvents["joinError"] = (event) => {
+ console.log(event);
+ };
+
+ const onAuthError: ClientEvents["authError"] = (reason) => {
+ if (client.isReconnecting()) {
+ console.log("%c" + "During reconnection: authError: " + reason, "color:gray");
+ }
+ };
+
+ const onSocketClose: ClientEvents["socketClose"] = (event) => {
+ if (client.isReconnecting()) {
+ console.log("%c" + "During reconnection: Signaling socket closed", "color:gray");
+ } else {
+ console.warn(event);
+ }
+ };
+
+ client.on("reconnectionStarted", onReconnectionStarted);
+ client.on("reconnected", onReconnected);
+ client.on("reconnectionRetriesLimitReached", onReconnectionRetriesLimitReached);
+
+ client.on("socketError", onSocketError);
+ client.on("connectionError", onConnectionError);
+ client.on("joinError", onJoinError);
+ client.on("authError", onAuthError);
+ client.on("socketClose", onSocketClose);
+
+ return () => {
+ client.off("reconnectionStarted", onReconnectionStarted);
+ client.off("reconnected", onReconnected);
+ client.off("reconnectionRetriesLimitReached", onReconnectionRetriesLimitReached);
+
+ client.off("socketError", onSocketError);
+ client.off("connectionError", onConnectionError);
+ client.off("joinError", onJoinError);
+ client.off("authError", onAuthError);
+ client.off("socketClose", onSocketClose);
+ };
+ }, [client]);
+};
diff --git a/package-lock.json b/package-lock.json
index fe476c4..7259a62 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -9,7 +9,7 @@
"version": "0.4.0",
"license": "Apache-2.0",
"dependencies": {
- "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk",
+ "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk#main",
"events": "3.3.0",
"lodash.isequal": "4.5.0"
},
@@ -35,45 +35,6 @@
"typescript": "5.4.5"
}
},
- "../ts-client-sdk": {
- "name": "@fishjam-dev/ts-client",
- "version": "0.5.0",
- "extraneous": true,
- "license": "Apache-2.0",
- "dependencies": {
- "events": "^3.3.0",
- "protobufjs": "^7.3.0",
- "typed-emitter": "^2.1.0",
- "uuid": "^9.0.1"
- },
- "devDependencies": {
- "@playwright/test": "^1.40.1",
- "@types/events": "^3.0.3",
- "@types/node": "^20.10.3",
- "@types/uuid": "^9.0.8",
- "@typescript-eslint/eslint-plugin": "^7.8.0",
- "@typescript-eslint/parser": "^7.8.0",
- "@vitest/coverage-v8": "^1.6.0",
- "eslint": "^8.55.0",
- "eslint-config-prettier": "^9.1.0",
- "eslint-plugin-react-hooks": "^4.6.0",
- "fake-mediastreamtrack": "^1.2.0",
- "husky": "^9.0.11",
- "lint-staged": "^15.2.5",
- "prettier": "^3.1.0",
- "prettier-plugin-tailwindcss": "^0.5.7",
- "react": "^18.2.0",
- "testcontainers": "^10.3.2",
- "ts-proto": "^1.176.0",
- "typed-emitter": "^2.1.0",
- "typedoc": "^0.25.13",
- "typedoc-plugin-external-resolver": "^1.0.3",
- "typedoc-plugin-mdn-links": "^3.1.6",
- "typescript": "5.4.5",
- "vitest": "^1.6.0",
- "zod": "^3.23.6"
- }
- },
"node_modules/@aashutoshrathi/word-wrap": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
@@ -147,7 +108,7 @@
},
"node_modules/@fishjam-dev/ts-client": {
"version": "0.5.0",
- "resolved": "git+ssh://git@github.com/fishjam-dev/ts-client-sdk.git#d254b040a184b9631c64e36bb80a453e0757005c",
+ "resolved": "git+ssh://git@github.com/fishjam-dev/ts-client-sdk.git#e71e738d410ebc359c01edbbd9b77844ec8e25d2",
"dependencies": {
"events": "^3.3.0",
"protobufjs": "^7.3.0",
diff --git a/package.json b/package.json
index e38a693..de82bf1 100644
--- a/package.json
+++ b/package.json
@@ -65,7 +65,7 @@
"typescript": "5.4.5"
},
"dependencies": {
- "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk",
+ "@fishjam-dev/ts-client": "github:fishjam-dev/ts-client-sdk#main",
"events": "3.3.0",
"lodash.isequal": "4.5.0"
},
diff --git a/src/Client.ts b/src/Client.ts
index e86e2fc..eabeb1f 100644
--- a/src/Client.ts
+++ b/src/Client.ts
@@ -8,6 +8,7 @@ import type {
CreateConfig,
MessageEvents,
Peer,
+ ReconnectionStatus,
SimulcastConfig,
TrackBandwidthLimit,
TrackContext,
@@ -36,6 +37,8 @@ export type ClientApi = {
devices: Devices;
deviceManager: DeviceManager;
screenShareManager: ScreenShareManager;
+
+ isReconnecting: () => boolean;
};
export interface ClientEvents {
@@ -69,6 +72,15 @@ export interface ClientEvents {
/** Emitted when the connection is closed */
disconnected: (client: ClientApi) => void;
+ /** Emitted on successful reconnection */
+ reconnected: (client: ClientApi) => void;
+
+ /** Emitted when the process of reconnection starts */
+ reconnectionStarted: (client: ClientApi) => void;
+
+ /** Emitted when the maximum number of reconnection retries is reached */
+ reconnectionRetriesLimitReached: (client: ClientApi) => void;
+
/**
* Called when peer was accepted.
*/
@@ -161,7 +173,10 @@ export interface ClientEvents {
/**
* Called in case of errors related to multimedia session e.g. ICE connection.
*/
- connectionError: (message: string, client: ClientApi) => void;
+ connectionError: (
+ error: Parameters["connectionError"]>[0],
+ client: ClientApi,
+ ) => void;
/**
* Called every time the server estimates client's bandiwdth.
@@ -309,6 +324,8 @@ export class Client extends (EventEmitter as {
public media: MediaState | null = null;
public devices: Devices;
+ public reconnectionStatus: ReconnectionStatus = "idle";
+
private currentMicrophoneTrackId: string | null = null;
private currentCameraTrackId: string | null = null;
private currentScreenShareTrackId: string | null = null;
@@ -420,6 +437,9 @@ export class Client extends (EventEmitter as {
this.tsClient.on("disconnected", () => {
this.status = null;
+ this.currentCameraTrackId = null;
+ this.currentMicrophoneTrackId = null;
+ this.currentScreenShareTrackId = null;
this.stateToSnapshot();
this.emit("disconnected", this);
@@ -438,6 +458,35 @@ export class Client extends (EventEmitter as {
this.emit("joinError", metadata, this);
});
+
+ this.tsClient.on("reconnectionStarted", () => {
+ this.reconnectionStatus = "reconnecting";
+ this.stateToSnapshot();
+
+ this.emit("reconnectionStarted", this);
+ });
+
+ this.tsClient.on("reconnected", () => {
+ this.reconnectionStatus = "idle";
+ this.stateToSnapshot();
+
+ this.emit("reconnected", this);
+ });
+
+ this.tsClient.on("reconnectionRetriesLimitReached", () => {
+ this.reconnectionStatus = "error";
+ this.stateToSnapshot();
+
+ this.emit("reconnectionRetriesLimitReached", this);
+ });
+
+ this.tsClient.on("connectionError", (metadata) => {
+ this.status = "error";
+ this.stateToSnapshot();
+
+ this.emit("connectionError", metadata, this);
+ });
+
this.tsClient.on("peerJoined", (peer) => {
this.stateToSnapshot();
@@ -774,6 +823,10 @@ export class Client extends (EventEmitter as {
this.tsClient.updateTrackMetadata(trackId, trackMetadata);
};
+ public isReconnecting = () => {
+ return this.tsClient.isReconnecting();
+ };
+
// In most cases, the track is identified by its remote track ID.
// This ID comes from the ts-client `addTrack` method.
// However, we don't have that ID before the `addTrack` method returns it.
@@ -839,6 +892,10 @@ export class Client extends (EventEmitter as {
if (!media || !media.stream || !media.track) throw Error("Device is unavailable");
+ const track = this.getRemoteTrack(media.track.id);
+
+ if (track) return track.trackId;
+
// see `getRemoteTrack()` explanation
this.currentCameraTrackId = media.track.id;
@@ -926,6 +983,10 @@ export class Client extends (EventEmitter as {
if (this.currentMicrophoneTrackId) throw Error("Track already added");
+ const track = this.getRemoteTrack(media.track.id);
+
+ if (track) return track.trackId;
+
// see `getRemoteTrack()` explanation
this.currentMicrophoneTrackId = media.track.id;
@@ -1015,6 +1076,10 @@ export class Client extends (EventEmitter as {
if (this.currentScreenShareTrackId) throw Error("Screen share track already added");
+ const track = this.getRemoteTrack(media.track.id);
+
+ if (track) return track.trackId;
+
// see `getRemoteTrack()` explanation
this.currentScreenShareTrackId = media.track.id;
diff --git a/src/create.tsx b/src/create.tsx
index 0748b22..57ec17f 100644
--- a/src/create.tsx
+++ b/src/create.tsx
@@ -1,44 +1,20 @@
-import type { JSX, ReactNode } from "react";
-import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useSyncExternalStore } from "react";
-import type { Selector, State } from "./state.types";
-import type { PeerStatus, TrackId, TrackWithOrigin } from "./state.types";
+import type { JSX } from "react";
+import { createContext, useCallback, useContext, useMemo, useRef, useSyncExternalStore } from "react";
+import type { Selector, State, UseReconnection } from "./state.types";
import type { ConnectConfig, CreateConfig } from "@fishjam-dev/ts-client";
import type {
DeviceManagerConfig,
- Devices,
CameraAPI,
MicrophoneAPI,
ScreenShareAPI,
- UseSetupMediaConfig,
- UseSetupMediaResult,
+ CreateFishjamClient,
+ FishjamContextType,
+ FishjamContextProviderProps,
+ UseConnect,
} from "./types";
-import type { ClientApi, ClientEvents } from "./Client";
import { Client } from "./Client";
-import type { MediaDeviceType, ScreenShareManagerConfig } from "./ScreenShareManager";
-
-export type FishjamContextProviderProps = {
- children: ReactNode;
-};
-
-type FishjamContextType = {
- state: State;
-};
-
-export type UseConnect = (config: ConnectConfig) => () => void;
-
-export type CreateFishjamClient = {
- FishjamContextProvider: ({ children }: FishjamContextProviderProps) => JSX.Element;
- useConnect: () => (config: ConnectConfig) => () => void;
- useDisconnect: () => () => void;
- useStatus: () => PeerStatus;
- useSelector: (selector: Selector) => Result;
- useTracks: () => Record>;
- useSetupMedia: (config: UseSetupMediaConfig) => UseSetupMediaResult;
- useCamera: () => Devices["camera"];
- useMicrophone: () => Devices["microphone"];
- useScreenShare: () => ScreenShareAPI;
- useClient: () => Client;
-};
+import type { ScreenShareManagerConfig } from "./ScreenShareManager";
+import { createUseSetupMediaHook } from "./useSetupMedia";
/**
* Create a client that can be used with a context.
@@ -87,6 +63,10 @@ export const create = (
client.on("peerUpdated", callback);
client.on("peerLeft", callback);
+ client.on("reconnected", callback);
+ client.on("reconnectionRetriesLimitReached", callback);
+ client.on("reconnectionStarted", callback);
+
client.on("componentAdded", callback);
client.on("componentUpdated", callback);
client.on("componentRemoved", callback);
@@ -139,6 +119,10 @@ export const create = (
client.removeListener("peerUpdated", callback);
client.removeListener("peerLeft", callback);
+ client.removeListener("reconnected", callback);
+ client.removeListener("reconnectionRetriesLimitReached", callback);
+ client.removeListener("reconnectionStarted", callback);
+
client.removeListener("componentAdded", callback);
client.removeListener("componentUpdated", callback);
client.removeListener("componentRemoved", callback);
@@ -195,6 +179,7 @@ export const create = (
devices: clientRef.current.devices,
deviceManager: clientRef.current.deviceManager,
client: clientRef.current,
+ reconnectionStatus: clientRef.current.reconnectionStatus,
};
lastSnapshotRef.current = state;
@@ -258,343 +243,20 @@ export const create = (
return state.devices.microphone;
};
- const useSetupMedia = (config: UseSetupMediaConfig): UseSetupMediaResult => {
+ const useScreenShare = (): ScreenShareAPI => {
const { state } = useFishjamContext();
- const configRef = useRef(config);
-
- useEffect(() => {
- configRef.current = config;
-
- if (config.screenShare.streamConfig) {
- state.client.setScreenManagerConfig(config.screenShare.streamConfig);
- }
-
- state.client.setDeviceManagerConfig({
- storage: config.storage,
- });
- }, [config, state.client]);
-
- useEffect(() => {
- if (configRef.current.startOnMount && state.deviceManager.getStatus() === "uninitialized") {
- state.devices.init({
- audioTrackConstraints: configRef.current?.microphone?.trackConstraints,
- videoTrackConstraints: configRef.current?.camera?.trackConstraints,
- });
- }
- // eslint-disable-next-line
- }, []);
-
- useEffect(() => {
- let pending = false;
-
- const broadcastOnCameraStart = async (
- event: { mediaDeviceType: MediaDeviceType },
- client: ClientApi,
- ) => {
- const broadcastOnDeviceChange = configRef.current.camera.onDeviceChange ?? "replace";
-
- if (client.status === "joined" && event.mediaDeviceType === "userMedia" && !pending) {
- if (!client.devices.camera.broadcast?.stream && configRef.current.camera.broadcastOnDeviceStart) {
- pending = true;
-
- await client.devices.camera
- .addTrack(
- configRef.current.camera.defaultTrackMetadata,
- configRef.current.camera.defaultSimulcastConfig,
- configRef.current.camera.defaultMaxBandwidth,
- )
- .finally(() => {
- pending = false;
- });
- } else if (client.devices.camera.broadcast?.stream && broadcastOnDeviceChange === "replace") {
- pending = true;
-
- await client.devices.camera.replaceTrack().finally(() => {
- pending = false;
- });
- } else if (client.devices.camera.broadcast?.stream && broadcastOnDeviceChange === "remove") {
- pending = true;
-
- await client.devices.camera.removeTrack().finally(() => {
- pending = false;
- });
- }
- }
- };
-
- const managerInitialized: ClientEvents["managerInitialized"] = async (
- event,
- client,
- ) => {
- if (event.video?.media?.stream) {
- await broadcastOnCameraStart(event, client);
- }
- };
-
- const devicesReady: ClientEvents["devicesReady"] = async (event, client) => {
- if (event.video.restarted && event.video?.media?.stream) {
- await broadcastOnCameraStart(event, client);
- }
- };
-
- const deviceReady: ClientEvents["deviceReady"] = async (event, client) => {
- if (event.trackType === "video") {
- await broadcastOnCameraStart(event, client);
- }
- };
-
- state.client.on("managerInitialized", managerInitialized);
- state.client.on("devicesReady", devicesReady);
- state.client.on("deviceReady", deviceReady);
-
- return () => {
- state.client.removeListener("managerInitialized", managerInitialized);
- state.client.removeListener("devicesReady", devicesReady);
- state.client.removeListener("deviceReady", deviceReady);
- };
- }, [state.client]);
-
- useEffect(() => {
- const removeOnCameraStopped: ClientEvents["deviceStopped"] = async (
- event,
- client,
- ) => {
- if (
- client.status === "joined" &&
- event.mediaDeviceType === "userMedia" &&
- event.trackType === "video" &&
- client.devices.camera.broadcast?.stream
- ) {
- const onDeviceStop = configRef.current.camera.onDeviceStop ?? "mute";
-
- if (onDeviceStop === "mute") {
- await client.devices.camera.muteTrack();
- } else {
- await client.devices.camera.removeTrack();
- }
- }
- };
-
- state.client.on("deviceStopped", removeOnCameraStopped);
-
- return () => {
- state.client.removeListener("deviceStopped", removeOnCameraStopped);
- };
- }, [state.client]);
-
- useEffect(() => {
- const broadcastCameraOnConnect: ClientEvents["joined"] = async (_, client) => {
- if (client.devices.camera.stream && configRef.current.camera.broadcastOnConnect) {
- await client.devices.camera.addTrack(
- configRef.current.camera.defaultTrackMetadata,
- configRef.current.camera.defaultSimulcastConfig,
- configRef.current.camera.defaultMaxBandwidth,
- );
- }
- };
-
- state.client.on("joined", broadcastCameraOnConnect);
-
- return () => {
- state.client.removeListener("joined", broadcastCameraOnConnect);
- };
- }, [state.client]);
-
- useEffect(() => {
- let pending = false;
-
- const broadcastOnMicrophoneStart = async (
- event: { mediaDeviceType: MediaDeviceType },
- client: ClientApi,
- ) => {
- const broadcastOnDeviceChange = configRef.current.microphone.onDeviceChange ?? "replace";
-
- if (client.status === "joined" && event.mediaDeviceType === "userMedia" && !pending) {
- if (!client.devices.microphone.broadcast?.stream && configRef.current.microphone.broadcastOnDeviceStart) {
- pending = true;
-
- await client.devices.microphone
- .addTrack(
- configRef.current.microphone.defaultTrackMetadata,
- configRef.current.microphone.defaultMaxBandwidth,
- )
- .finally(() => {
- pending = false;
- });
- } else if (client.devices.microphone.broadcast?.stream && broadcastOnDeviceChange === "replace") {
- pending = true;
-
- await client.devices.microphone.replaceTrack().finally(() => {
- pending = false;
- });
- } else if (client.devices.microphone.broadcast?.stream && broadcastOnDeviceChange === "remove") {
- pending = true;
-
- await client.devices.microphone.removeTrack().finally(() => {
- pending = false;
- });
- }
- }
- };
-
- const managerInitialized: ClientEvents["managerInitialized"] = async (
- event,
- client,
- ) => {
- if (event.audio?.media?.stream) {
- await broadcastOnMicrophoneStart(event, client);
- }
- };
-
- const devicesReady: ClientEvents["devicesReady"] = async (event, client) => {
- if (event.audio.restarted && event.audio?.media?.stream) {
- await broadcastOnMicrophoneStart(event, client);
- }
- };
-
- const deviceReady: ClientEvents["deviceReady"] = async (event, client) => {
- if (event.trackType === "audio") {
- await broadcastOnMicrophoneStart(event, client);
- }
- };
-
- state.client.on("managerInitialized", managerInitialized);
- state.client.on("deviceReady", deviceReady);
- state.client.on("devicesReady", devicesReady);
-
- return () => {
- state.client.removeListener("managerInitialized", managerInitialized);
- state.client.removeListener("deviceReady", deviceReady);
- state.client.removeListener("devicesReady", devicesReady);
- };
- }, [state.client]);
-
- useEffect(() => {
- const onMicrophoneStopped: ClientEvents["deviceStopped"] = async (event, client) => {
- if (
- client.status === "joined" &&
- event.mediaDeviceType === "userMedia" &&
- event.trackType === "audio" &&
- client.devices.microphone.broadcast?.stream
- ) {
- const onDeviceStop = configRef.current.microphone.onDeviceStop ?? "mute";
-
- if (onDeviceStop === "mute") {
- await client.devices.microphone.muteTrack();
- } else {
- await client.devices.microphone.removeTrack();
- }
- }
- };
-
- state.client.on("deviceStopped", onMicrophoneStopped);
-
- return () => {
- state.client.removeListener("deviceStopped", onMicrophoneStopped);
- };
- }, [state.client]);
-
- useEffect(() => {
- const broadcastMicrophoneOnConnect: ClientEvents["joined"] = async (_, client) => {
- if (client.devices.microphone.stream && configRef.current.microphone.broadcastOnConnect) {
- await client.devices.microphone.addTrack(
- configRef.current.microphone.defaultTrackMetadata,
- configRef.current.microphone.defaultMaxBandwidth,
- );
- }
- };
-
- state.client.on("joined", broadcastMicrophoneOnConnect);
-
- return () => {
- state.client.removeListener("joined", broadcastMicrophoneOnConnect);
- };
- }, [state.client]);
-
- useEffect(() => {
- let adding = false;
-
- const broadcastOnScreenShareStart: ClientEvents["deviceReady"] = async (
- event: { mediaDeviceType: MediaDeviceType },
- client,
- ) => {
- if (
- client.status === "joined" &&
- event.mediaDeviceType === "displayMedia" &&
- !adding &&
- !client.devices.screenShare.broadcast?.stream &&
- configRef.current.screenShare.broadcastOnDeviceStart
- ) {
- adding = true;
-
- await client.devices.screenShare
- .addTrack(
- configRef.current.screenShare.defaultTrackMetadata,
- configRef.current.screenShare.defaultMaxBandwidth,
- )
- .finally(() => {
- adding = false;
- });
- }
- };
-
- state.client.on("deviceReady", broadcastOnScreenShareStart);
-
- return () => {
- state.client.removeListener("deviceReady", broadcastOnScreenShareStart);
- };
- }, [state.client]);
-
- useEffect(() => {
- const onScreenShareStop: ClientEvents["deviceStopped"] = async (event, client) => {
- if (
- client.status === "joined" &&
- event.mediaDeviceType === "displayMedia" &&
- client.devices.screenShare.broadcast?.stream
- ) {
- await client.devices.screenShare.removeTrack();
- }
- };
-
- state.client.on("deviceStopped", onScreenShareStop);
-
- return () => {
- state.client.removeListener("deviceStopped", onScreenShareStop);
- };
- }, [state.client]);
-
- useEffect(() => {
- const broadcastScreenShareOnConnect: ClientEvents["joined"] = async (_, client) => {
- if (client.devices.screenShare.stream && configRef.current.screenShare.broadcastOnConnect) {
- await client.devices.screenShare.addTrack(
- configRef.current.screenShare.defaultTrackMetadata,
- configRef.current.screenShare.defaultMaxBandwidth,
- );
- }
- };
-
- state.client.on("joined", broadcastScreenShareOnConnect);
-
- return () => {
- state.client.removeListener("joined", broadcastScreenShareOnConnect);
- };
- }, [state.client]);
-
- return useMemo(
- () => ({
- init: () =>
- state.devices.init({
- audioTrackConstraints: configRef.current?.microphone?.trackConstraints,
- videoTrackConstraints: configRef.current?.camera?.trackConstraints,
- }),
- }),
- [state.devices],
- );
+ return state.devices.screenShare;
};
- const useScreenShare = (): ScreenShareAPI => {
+ const useReconnection = (): UseReconnection => {
const { state } = useFishjamContext();
- return state.devices.screenShare;
+
+ return {
+ status: state.reconnectionStatus,
+ isReconnecting: state.reconnectionStatus === "reconnecting",
+ isError: state.reconnectionStatus === "error",
+ isIdle: state.reconnectionStatus === "idle",
+ };
};
return {
@@ -604,10 +266,11 @@ export const create = (
useDisconnect,
useStatus,
useTracks,
- useSetupMedia,
+ useSetupMedia: createUseSetupMediaHook(useFishjamContext),
useCamera,
useMicrophone,
useScreenShare,
useClient,
+ useReconnection,
};
};
diff --git a/src/index.ts b/src/index.ts
index bef4957..fc7ff33 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,5 +1,4 @@
export { create } from "./create";
-export type { CreateFishjamClient, UseConnect } from "./create";
export { Client } from "./Client";
export type { ClientEvents } from "./Client";
@@ -26,6 +25,8 @@ export type {
MicrophoneAPI,
UseSetupMediaResult,
UseSetupMediaConfig,
+ CreateFishjamClient,
+ UseConnect,
} from "./types";
export type { ScreenShareManagerConfig } from "./ScreenShareManager";
diff --git a/src/state.types.ts b/src/state.types.ts
index d02b357..479be50 100644
--- a/src/state.types.ts
+++ b/src/state.types.ts
@@ -1,4 +1,4 @@
-import type { TrackEncoding, VadStatus, SimulcastConfig } from "@fishjam-dev/ts-client";
+import type { TrackEncoding, VadStatus, SimulcastConfig, ReconnectionStatus } from "@fishjam-dev/ts-client";
import type { MediaState } from "./types";
import type { Devices } from "./types";
import type { Client } from "./Client";
@@ -42,6 +42,13 @@ export type PeerState = {
export type PeerStatus = "connecting" | "connected" | "authenticated" | "joined" | "error" | "closed" | null;
+export type UseReconnection = {
+ status: ReconnectionStatus;
+ isReconnecting: boolean;
+ isError: boolean;
+ isIdle: boolean;
+};
+
export type State = {
local: PeerState | null;
remote: Record>;
@@ -53,6 +60,7 @@ export type State = {
client: Client;
deviceManager: DeviceManager;
screenShareManager: ScreenShareManager;
+ reconnectionStatus: ReconnectionStatus;
};
export type SetStore = (
diff --git a/src/types.ts b/src/types.ts
index 9e7b5ad..13c357c 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -1,6 +1,8 @@
-import type { SimulcastConfig, TrackBandwidthLimit } from "@fishjam-dev/ts-client";
+import type { ConnectConfig, SimulcastConfig, TrackBandwidthLimit } from "@fishjam-dev/ts-client";
import type { ScreenShareManagerConfig } from "./ScreenShareManager";
-import type { Track } from "./state.types";
+import type { PeerStatus, Selector, State, Track, TrackId, TrackWithOrigin, UseReconnection } from "./state.types";
+import type { JSX, ReactNode } from "react";
+import type { Client } from "./Client";
export type AudioOrVideoType = "audio" | "video";
@@ -236,3 +238,28 @@ export const parseError = (error: unknown): DeviceError | null => {
console.warn({ name: "Unhandled getUserMedia error", error });
return null;
};
+
+export type FishjamContextProviderProps = {
+ children: ReactNode;
+};
+
+export type FishjamContextType = {
+ state: State;
+};
+
+export type UseConnect = (config: ConnectConfig) => () => void;
+
+export type CreateFishjamClient = {
+ FishjamContextProvider: ({ children }: FishjamContextProviderProps) => JSX.Element;
+ useConnect: () => (config: ConnectConfig) => () => void;
+ useDisconnect: () => () => void;
+ useStatus: () => PeerStatus;
+ useSelector: (selector: Selector) => Result;
+ useTracks: () => Record>;
+ useSetupMedia: (config: UseSetupMediaConfig) => UseSetupMediaResult;
+ useCamera: () => Devices["camera"];
+ useMicrophone: () => Devices["microphone"];
+ useScreenShare: () => ScreenShareAPI;
+ useClient: () => Client;
+ useReconnection: () => UseReconnection;
+};
diff --git a/src/useSetupMedia.tsx b/src/useSetupMedia.tsx
new file mode 100644
index 0000000..3b05878
--- /dev/null
+++ b/src/useSetupMedia.tsx
@@ -0,0 +1,348 @@
+import type { FishjamContextType, UseSetupMediaConfig, UseSetupMediaResult } from "./types";
+import { useEffect, useMemo, useRef } from "react";
+import type { MediaDeviceType, TrackType } from "./ScreenShareManager";
+import type { ClientApi, ClientEvents } from "./Client";
+import type { PeerStatus } from "./state.types";
+
+export const createUseSetupMediaHook = (
+ useFishjamContext: () => FishjamContextType,
+) => {
+ const isBroadcastedTrackChanged = (
+ expectedMediaDeviceType: MediaDeviceType,
+ client: ClientApi,
+ pending: boolean,
+ mediaDeviceType: MediaDeviceType,
+ ) =>
+ client.status === "joined" && mediaDeviceType === expectedMediaDeviceType && !pending && !client.isReconnecting();
+
+ const isBroadcastedTrackStopped = (
+ expectedMediaDeviceType: MediaDeviceType,
+ expectedTrackType: TrackType,
+ status: PeerStatus,
+ event: Parameters["deviceStopped"]>[0],
+ stream: MediaStream | undefined | null,
+ ) =>
+ status === "joined" &&
+ event.mediaDeviceType === expectedMediaDeviceType &&
+ event.trackType === expectedTrackType &&
+ stream;
+
+ return (config: UseSetupMediaConfig): UseSetupMediaResult => {
+ const { state } = useFishjamContext();
+ const configRef = useRef(config);
+
+ useEffect(() => {
+ configRef.current = config;
+
+ if (config.screenShare.streamConfig) {
+ state.client.setScreenManagerConfig(config.screenShare.streamConfig);
+ }
+
+ state.client.setDeviceManagerConfig({
+ storage: config.storage,
+ });
+ }, [config, state.client]);
+
+ useEffect(() => {
+ if (configRef.current.startOnMount && state.deviceManager.getStatus() === "uninitialized") {
+ state.devices.init({
+ audioTrackConstraints: configRef.current?.microphone?.trackConstraints,
+ videoTrackConstraints: configRef.current?.camera?.trackConstraints,
+ });
+ }
+ // eslint-disable-next-line
+ }, []);
+
+ useEffect(() => {
+ let pending = false;
+
+ const broadcastOnCameraStart = async (
+ event: { mediaDeviceType: MediaDeviceType },
+ client: ClientApi,
+ ) => {
+ const config = configRef.current.camera;
+ const onDeviceChange = config.onDeviceChange ?? "replace";
+ const camera = client.devices.camera;
+ const stream = camera.broadcast?.stream;
+
+ if (isBroadcastedTrackChanged("userMedia", client, pending, event.mediaDeviceType)) {
+ if (!stream && config.broadcastOnDeviceStart) {
+ pending = true;
+
+ await camera
+ .addTrack(config.defaultTrackMetadata, config.defaultSimulcastConfig, config.defaultMaxBandwidth)
+ .finally(() => {
+ pending = false;
+ });
+ } else if (stream && onDeviceChange === "replace") {
+ pending = true;
+
+ await camera.replaceTrack().finally(() => {
+ pending = false;
+ });
+ } else if (stream && onDeviceChange === "remove") {
+ pending = true;
+
+ await camera.removeTrack().finally(() => {
+ pending = false;
+ });
+ }
+ }
+ };
+
+ const managerInitialized: ClientEvents["managerInitialized"] = async (
+ event,
+ client,
+ ) => {
+ if (event.video?.media?.stream) {
+ await broadcastOnCameraStart(event, client);
+ }
+ };
+
+ const devicesReady: ClientEvents["devicesReady"] = async (event, client) => {
+ if (event.video.restarted && event.video?.media?.stream) {
+ await broadcastOnCameraStart(event, client);
+ }
+ };
+
+ const deviceReady: ClientEvents["deviceReady"] = async (event, client) => {
+ if (event.trackType === "video") {
+ await broadcastOnCameraStart(event, client);
+ }
+ };
+
+ state.client.on("managerInitialized", managerInitialized);
+ state.client.on("devicesReady", devicesReady);
+ state.client.on("deviceReady", deviceReady);
+
+ return () => {
+ state.client.removeListener("managerInitialized", managerInitialized);
+ state.client.removeListener("devicesReady", devicesReady);
+ state.client.removeListener("deviceReady", deviceReady);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const removeOnCameraStopped: ClientEvents["deviceStopped"] = async (
+ event,
+ client,
+ ) => {
+ const camera = client.devices.camera;
+ const stream = camera.broadcast?.stream;
+ const onDeviceStop = configRef.current.camera.onDeviceStop ?? "mute";
+
+ if (isBroadcastedTrackStopped("userMedia", "video", client.status, event, stream)) {
+ if (onDeviceStop === "mute") {
+ await camera.muteTrack();
+ } else {
+ await camera.removeTrack();
+ }
+ }
+ };
+
+ state.client.on("deviceStopped", removeOnCameraStopped);
+
+ return () => {
+ state.client.removeListener("deviceStopped", removeOnCameraStopped);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const broadcastCameraOnConnect: ClientEvents["joined"] = async (_, client) => {
+ const camera = client.devices.camera;
+ const stream = camera.stream;
+ const config = configRef.current.camera;
+
+ if (stream && config.broadcastOnConnect) {
+ await camera.addTrack(config.defaultTrackMetadata, config.defaultSimulcastConfig, config.defaultMaxBandwidth);
+ }
+ };
+
+ state.client.on("joined", broadcastCameraOnConnect);
+
+ return () => {
+ state.client.removeListener("joined", broadcastCameraOnConnect);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ let pending = false;
+
+ const broadcastOnMicrophoneStart = async (
+ event: { mediaDeviceType: MediaDeviceType },
+ client: ClientApi,
+ ) => {
+ const microphone = client.devices.microphone;
+ const stream = microphone.broadcast?.stream;
+ const config = configRef.current.microphone;
+ const onDeviceChange = config.onDeviceChange ?? "replace";
+
+ if (isBroadcastedTrackChanged("userMedia", client, pending, event.mediaDeviceType)) {
+ if (!stream && config.broadcastOnDeviceStart) {
+ pending = true;
+
+ await microphone.addTrack(config.defaultTrackMetadata, config.defaultMaxBandwidth).finally(() => {
+ pending = false;
+ });
+ } else if (stream && onDeviceChange === "replace") {
+ pending = true;
+
+ await microphone.replaceTrack().finally(() => {
+ pending = false;
+ });
+ } else if (stream && onDeviceChange === "remove") {
+ pending = true;
+
+ await microphone.removeTrack().finally(() => {
+ pending = false;
+ });
+ }
+ }
+ };
+
+ const managerInitialized: ClientEvents["managerInitialized"] = async (
+ event,
+ client,
+ ) => {
+ if (event.audio?.media?.stream) {
+ await broadcastOnMicrophoneStart(event, client);
+ }
+ };
+
+ const devicesReady: ClientEvents["devicesReady"] = async (event, client) => {
+ if (event.audio.restarted && event.audio?.media?.stream) {
+ await broadcastOnMicrophoneStart(event, client);
+ }
+ };
+
+ const deviceReady: ClientEvents["deviceReady"] = async (event, client) => {
+ if (event.trackType === "audio") {
+ await broadcastOnMicrophoneStart(event, client);
+ }
+ };
+
+ state.client.on("managerInitialized", managerInitialized);
+ state.client.on("deviceReady", deviceReady);
+ state.client.on("devicesReady", devicesReady);
+
+ return () => {
+ state.client.removeListener("managerInitialized", managerInitialized);
+ state.client.removeListener("deviceReady", deviceReady);
+ state.client.removeListener("devicesReady", devicesReady);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const onMicrophoneStopped: ClientEvents["deviceStopped"] = async (event, client) => {
+ const microphone = client.devices.microphone;
+ const stream = microphone.broadcast?.stream;
+ const onDeviceStop = configRef.current.microphone.onDeviceStop ?? "mute";
+
+ if (isBroadcastedTrackStopped("userMedia", "audio", client.status, event, stream)) {
+ if (onDeviceStop === "mute") {
+ await microphone.muteTrack();
+ } else {
+ await microphone.removeTrack();
+ }
+ }
+ };
+
+ state.client.on("deviceStopped", onMicrophoneStopped);
+
+ return () => {
+ state.client.removeListener("deviceStopped", onMicrophoneStopped);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const broadcastMicrophoneOnConnect: ClientEvents["joined"] = async (_, client) => {
+ const config = configRef.current.microphone;
+ const microphone = client.devices.microphone;
+
+ if (microphone.stream && config.broadcastOnConnect) {
+ await microphone.addTrack(config.defaultTrackMetadata, config.defaultMaxBandwidth);
+ }
+ };
+
+ state.client.on("joined", broadcastMicrophoneOnConnect);
+
+ return () => {
+ state.client.removeListener("joined", broadcastMicrophoneOnConnect);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ let pending = false;
+
+ const broadcastOnScreenShareStart: ClientEvents["deviceReady"] = async (
+ event: { mediaDeviceType: MediaDeviceType },
+ client,
+ ) => {
+ const screenShare = client.devices.screenShare;
+ const stream = screenShare.broadcast?.stream;
+ const { broadcastOnDeviceStart, defaultTrackMetadata, defaultMaxBandwidth } = configRef.current.screenShare;
+
+ if (
+ isBroadcastedTrackChanged("displayMedia", client, pending, event.mediaDeviceType) &&
+ !stream &&
+ broadcastOnDeviceStart
+ ) {
+ pending = true;
+
+ await screenShare.addTrack(defaultTrackMetadata, defaultMaxBandwidth).finally(() => {
+ pending = false;
+ });
+ }
+ };
+
+ state.client.on("deviceReady", broadcastOnScreenShareStart);
+
+ return () => {
+ state.client.removeListener("deviceReady", broadcastOnScreenShareStart);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const onScreenShareStop: ClientEvents["deviceStopped"] = async (event, client) => {
+ const stream = client.devices.screenShare.broadcast?.stream;
+ if (isBroadcastedTrackStopped("displayMedia", "video", client.status, event, stream)) {
+ await client.devices.screenShare.removeTrack();
+ }
+ };
+
+ state.client.on("deviceStopped", onScreenShareStop);
+
+ return () => {
+ state.client.removeListener("deviceStopped", onScreenShareStop);
+ };
+ }, [state.client]);
+
+ useEffect(() => {
+ const broadcastScreenShareOnConnect: ClientEvents["joined"] = async (_, client) => {
+ if (client.devices.screenShare.stream && configRef.current.screenShare.broadcastOnConnect) {
+ await client.devices.screenShare.addTrack(
+ configRef.current.screenShare.defaultTrackMetadata,
+ configRef.current.screenShare.defaultMaxBandwidth,
+ );
+ }
+ };
+
+ state.client.on("joined", broadcastScreenShareOnConnect);
+
+ return () => {
+ state.client.removeListener("joined", broadcastScreenShareOnConnect);
+ };
+ }, [state.client]);
+
+ return useMemo(
+ () => ({
+ init: () =>
+ state.devices.init({
+ audioTrackConstraints: configRef.current?.microphone?.trackConstraints,
+ videoTrackConstraints: configRef.current?.camera?.trackConstraints,
+ }),
+ }),
+ [state.devices],
+ );
+ };
+};