This commit is contained in:
2025-09-09 11:09:43 +04:00
parent 458486486d
commit 0145de19a3
54 changed files with 11278 additions and 1 deletions

176
lib/CameraSettings.tsx Normal file
View File

@@ -0,0 +1,176 @@
'use client';
import React from 'react';
import {
MediaDeviceMenu,
TrackReference,
TrackToggle,
useLocalParticipant,
VideoTrack,
} from '@livekit/components-react';
import { isLocalTrack, LocalTrackPublication, Track } from 'livekit-client';
// Background image paths
const BACKGROUND_IMAGES = [
{ name: 'Desk', path: { src: '/background-images/samantha-gades-BlIhVfXbi9s-unsplash.jpg' } },
{ name: 'Nature', path: { src: '/background-images/ali-kazal-tbw_KQE3Cbg-unsplash.jpg' } },
];
// Background options
type BackgroundType = 'none' | 'blur' | 'image';
export function CameraSettings() {
const { cameraTrack, localParticipant } = useLocalParticipant();
const [backgroundType, setBackgroundType] = React.useState<BackgroundType>(
(cameraTrack as LocalTrackPublication)?.track?.getProcessor()?.name === 'background-blur'
? 'blur'
: (cameraTrack as LocalTrackPublication)?.track?.getProcessor()?.name === 'virtual-background'
? 'image'
: 'none',
);
const [virtualBackgroundImagePath, setVirtualBackgroundImagePath] = React.useState<string | null>(
null,
);
const camTrackRef: TrackReference | undefined = React.useMemo(() => {
return cameraTrack
? { participant: localParticipant, publication: cameraTrack, source: Track.Source.Camera }
: undefined;
}, [localParticipant, cameraTrack]);
const selectBackground = (type: BackgroundType, imagePath?: string) => {
setBackgroundType(type);
if (type === 'image' && imagePath) {
setVirtualBackgroundImagePath(imagePath);
} else if (type !== 'image') {
setVirtualBackgroundImagePath(null);
}
};
React.useEffect(() => {
if (isLocalTrack(cameraTrack?.track)) {
if (backgroundType === 'blur') {
import('@livekit/track-processors').then(({ BackgroundBlur }) => {
cameraTrack.track?.setProcessor(BackgroundBlur());
});
} else if (backgroundType === 'image' && virtualBackgroundImagePath) {
import('@livekit/track-processors').then(({ VirtualBackground }) => {
cameraTrack.track?.setProcessor(VirtualBackground(virtualBackgroundImagePath));
});
} else {
cameraTrack.track?.stopProcessor();
}
}
}, [cameraTrack, backgroundType, virtualBackgroundImagePath]);
return (
<div style={{ display: 'flex', flexDirection: 'column', gap: '10px' }}>
{camTrackRef && (
<VideoTrack
style={{
maxHeight: '280px',
objectFit: 'contain',
objectPosition: 'right',
transform: 'scaleX(-1)',
}}
trackRef={camTrackRef}
/>
)}
<section className="lk-button-group">
<TrackToggle source={Track.Source.Camera}>Camera</TrackToggle>
<div className="lk-button-group-menu">
<MediaDeviceMenu kind="videoinput" />
</div>
</section>
<div style={{ marginTop: '10px' }}>
<div style={{ marginBottom: '8px' }}>Background Effects</div>
<div style={{ display: 'flex', gap: '10px', flexWrap: 'wrap' }}>
<button
onClick={() => selectBackground('none')}
className="lk-button"
aria-pressed={backgroundType === 'none'}
style={{
border: backgroundType === 'none' ? '2px solid #0090ff' : '1px solid #d1d1d1',
minWidth: '80px',
}}
>
None
</button>
<button
onClick={() => selectBackground('blur')}
className="lk-button"
aria-pressed={backgroundType === 'blur'}
style={{
border: backgroundType === 'blur' ? '2px solid #0090ff' : '1px solid #d1d1d1',
minWidth: '80px',
backgroundColor: '#f0f0f0',
position: 'relative',
overflow: 'hidden',
height: '60px',
}}
>
<div
style={{
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
backgroundColor: '#e0e0e0',
filter: 'blur(8px)',
zIndex: 0,
}}
/>
<span
style={{
position: 'relative',
zIndex: 1,
backgroundColor: 'rgba(0,0,0,0.6)',
padding: '2px 5px',
borderRadius: '4px',
fontSize: '12px',
}}
>
Blur
</span>
</button>
{BACKGROUND_IMAGES.map((image) => (
<button
key={image.path.src}
onClick={() => selectBackground('image', image.path.src)}
className="lk-button"
aria-pressed={
backgroundType === 'image' && virtualBackgroundImagePath === image.path.src
}
style={{
backgroundImage: `url(${image.path.src})`,
backgroundSize: 'cover',
backgroundPosition: 'center',
width: '80px',
height: '60px',
border:
backgroundType === 'image' && virtualBackgroundImagePath === image.path.src
? '2px solid #0090ff'
: '1px solid #d1d1d1',
}}
>
<span
style={{
backgroundColor: 'rgba(0,0,0,0.6)',
padding: '2px 5px',
borderRadius: '4px',
fontSize: '12px',
}}
>
{image.name}
</span>
</button>
))}
</div>
</div>
</div>
);
}

251
lib/Debug.tsx Normal file
View File

@@ -0,0 +1,251 @@
import * as React from 'react';
import { useRoomContext } from '@livekit/components-react';
import { setLogLevel, LogLevel, RemoteTrackPublication, setLogExtension } from 'livekit-client';
// @ts-ignore
import { tinykeys } from 'tinykeys';
import { datadogLogs } from '@datadog/browser-logs';
import styles from '../styles/Debug.module.css';
export const useDebugMode = ({ logLevel }: { logLevel?: LogLevel }) => {
const room = useRoomContext();
React.useEffect(() => {
setLogLevel(logLevel ?? 'debug');
if (process.env.NEXT_PUBLIC_DATADOG_CLIENT_TOKEN && process.env.NEXT_PUBLIC_DATADOG_SITE) {
console.log('setting up datadog logs');
datadogLogs.init({
clientToken: process.env.NEXT_PUBLIC_DATADOG_CLIENT_TOKEN,
site: process.env.NEXT_PUBLIC_DATADOG_SITE,
forwardErrorsToLogs: true,
sessionSampleRate: 100,
});
setLogExtension((level, msg, context) => {
switch (level) {
case LogLevel.debug:
datadogLogs.logger.debug(msg, context);
break;
case LogLevel.info:
datadogLogs.logger.info(msg, context);
break;
case LogLevel.warn:
datadogLogs.logger.warn(msg, context);
break;
case LogLevel.error:
datadogLogs.logger.error(msg, context);
break;
default:
break;
}
});
}
// @ts-expect-error
window.__lk_room = room;
return () => {
// @ts-expect-error
window.__lk_room = undefined;
};
}, [room, logLevel]);
};
export const DebugMode = ({ logLevel }: { logLevel?: LogLevel }) => {
const room = useRoomContext();
const [isOpen, setIsOpen] = React.useState(false);
const [, setRender] = React.useState({});
const [roomSid, setRoomSid] = React.useState('');
React.useEffect(() => {
room.getSid().then(setRoomSid);
}, [room]);
useDebugMode({ logLevel });
React.useEffect(() => {
if (window) {
const unsubscribe = tinykeys(window, {
'Shift+D': () => {
console.log('setting open');
setIsOpen((open) => !open);
},
});
// timer to re-render
const interval = setInterval(() => {
setRender({});
}, 1000);
return () => {
unsubscribe();
clearInterval(interval);
};
}
}, [isOpen]);
if (typeof window === 'undefined' || !isOpen) {
return null;
}
const handleSimulate = (event: React.ChangeEvent<HTMLSelectElement>) => {
const { value } = event.target;
if (value == '') {
return;
}
event.target.value = '';
let isReconnect = false;
switch (value) {
case 'signal-reconnect':
isReconnect = true;
// fall through
default:
// @ts-expect-error
room.simulateScenario(value);
}
};
const lp = room.localParticipant;
if (!isOpen) {
return <></>;
} else {
return (
<div className={styles.overlay}>
<section id="room-info">
<h3>
Room Info {room.name}: {roomSid}
</h3>
</section>
<details open>
<summary>
<b>Local Participant: {lp.identity}</b>
</summary>
<details open className={styles.detailsSection}>
<summary>
<b>Published tracks</b>
</summary>
<div>
{Array.from(lp.trackPublications.values()).map((t) => (
<>
<div>
<i>
{t.source.toString()}
&nbsp;<span>{t.trackSid}</span>
</i>
</div>
<table>
<tbody>
<tr>
<td>Kind</td>
<td>
{t.kind}&nbsp;
{t.kind === 'video' && (
<span>
{t.track?.dimensions?.width}x{t.track?.dimensions?.height}
</span>
)}
</td>
</tr>
<tr>
<td>Bitrate</td>
<td>{Math.ceil(t.track!.currentBitrate / 1000)} kbps</td>
</tr>
</tbody>
</table>
</>
))}
</div>
</details>
<details open className={styles.detailsSection}>
<summary>
<b>Permissions</b>
</summary>
<div>
<table>
<tbody>
{lp.permissions &&
Object.entries(lp.permissions).map(([key, val]) => (
<>
<tr>
<td>{key}</td>
{key !== 'canPublishSources' ? (
<td>{val.toString()}</td>
) : (
<td> {val.join(', ')} </td>
)}
</tr>
</>
))}
</tbody>
</table>
</div>
</details>
</details>
<details>
<summary>
<b>Remote Participants</b>
</summary>
{Array.from(room.remoteParticipants.values()).map((p) => (
<details key={p.sid} className={styles.detailsSection}>
<summary>
<b>
{p.identity}
<span></span>
</b>
</summary>
<div>
{Array.from(p.trackPublications.values()).map((t) => (
<>
<div>
<i>
{t.source.toString()}
&nbsp;<span>{t.trackSid}</span>
</i>
</div>
<table>
<tbody>
<tr>
<td>Kind</td>
<td>
{t.kind}&nbsp;
{t.kind === 'video' && (
<span>
{t.dimensions?.width}x{t.dimensions?.height}
</span>
)}
</td>
</tr>
<tr>
<td>Status</td>
<td>{trackStatus(t)}</td>
</tr>
{t.track && (
<tr>
<td>Bitrate</td>
<td>{Math.ceil(t.track.currentBitrate / 1000)} kbps</td>
</tr>
)}
</tbody>
</table>
</>
))}
</div>
</details>
))}
</details>
</div>
);
}
};
function trackStatus(t: RemoteTrackPublication): string {
if (t.isSubscribed) {
return t.isEnabled ? 'enabled' : 'disabled';
} else {
return 'unsubscribed';
}
}

31
lib/KeyboardShortcuts.tsx Normal file
View File

@@ -0,0 +1,31 @@
'use client';
import React from 'react';
import { Track } from 'livekit-client';
import { useTrackToggle } from '@livekit/components-react';
export function KeyboardShortcuts() {
const { toggle: toggleMic } = useTrackToggle({ source: Track.Source.Microphone });
const { toggle: toggleCamera } = useTrackToggle({ source: Track.Source.Camera });
React.useEffect(() => {
function handleShortcut(event: KeyboardEvent) {
// Toggle microphone: Cmd/Ctrl-Shift-A
if (toggleMic && event.key === 'A' && (event.ctrlKey || event.metaKey)) {
event.preventDefault();
toggleMic();
}
// Toggle camera: Cmd/Ctrl-Shift-V
if (event.key === 'V' && (event.ctrlKey || event.metaKey)) {
event.preventDefault();
toggleCamera();
}
}
window.addEventListener('keydown', handleShortcut);
return () => window.removeEventListener('keydown', handleShortcut);
}, [toggleMic, toggleCamera]);
return null;
}

View File

@@ -0,0 +1,55 @@
import React from 'react';
import { useKrispNoiseFilter } from '@livekit/components-react/krisp';
import { TrackToggle } from '@livekit/components-react';
import { MediaDeviceMenu } from '@livekit/components-react';
import { Track } from 'livekit-client';
import { isLowPowerDevice } from './client-utils';
export function MicrophoneSettings() {
const { isNoiseFilterEnabled, setNoiseFilterEnabled, isNoiseFilterPending } = useKrispNoiseFilter(
{
filterOptions: {
bufferOverflowMs: 100,
bufferDropMs: 200,
quality: isLowPowerDevice() ? 'low' : 'medium',
onBufferDrop: () => {
console.warn(
'krisp buffer dropped, noise filter versions >= 0.3.2 will automatically disable the filter',
);
},
},
},
);
React.useEffect(() => {
// enable Krisp by default on non-low power devices
setNoiseFilterEnabled(!isLowPowerDevice());
}, []);
return (
<div
style={{
display: 'flex',
flexDirection: 'row',
gap: '10px',
alignItems: 'center',
justifyContent: 'space-between',
}}
>
<section className="lk-button-group">
<TrackToggle source={Track.Source.Microphone}>Microphone</TrackToggle>
<div className="lk-button-group-menu">
<MediaDeviceMenu kind="audioinput" />
</div>
</section>
<button
className="lk-button"
onClick={() => setNoiseFilterEnabled(!isNoiseFilterEnabled)}
disabled={isNoiseFilterPending}
aria-pressed={isNoiseFilterEnabled}
>
{isNoiseFilterEnabled ? 'Disable' : 'Enable'} Enhanced Noise Cancellation
</button>
</div>
);
}

View File

@@ -0,0 +1,40 @@
import { useIsRecording } from '@livekit/components-react';
import * as React from 'react';
import toast from 'react-hot-toast';
export function RecordingIndicator() {
const isRecording = useIsRecording();
const [wasRecording, setWasRecording] = React.useState(false);
React.useEffect(() => {
if (isRecording !== wasRecording) {
setWasRecording(isRecording);
if (isRecording) {
toast('This meeting is being recorded', {
duration: 3000,
icon: '🎥',
position: 'top-center',
className: 'lk-button',
style: {
backgroundColor: 'var(--lk-danger3)',
color: 'var(--lk-fg)',
},
});
}
}
}, [isRecording]);
return (
<div
style={{
position: 'absolute',
top: '0',
left: '0',
width: '100%',
height: '100%',
boxShadow: isRecording ? 'var(--lk-danger3) 0px 0px 0px 3px inset' : 'none',
pointerEvents: 'none',
}}
></div>
);
}

154
lib/SettingsMenu.tsx Normal file
View File

@@ -0,0 +1,154 @@
'use client';
import * as React from 'react';
import { Track } from 'livekit-client';
import {
useMaybeLayoutContext,
MediaDeviceMenu,
TrackToggle,
useRoomContext,
useIsRecording,
} from '@livekit/components-react';
import styles from '../styles/SettingsMenu.module.css';
import { CameraSettings } from './CameraSettings';
import { MicrophoneSettings } from './MicrophoneSettings';
/**
* @alpha
*/
export interface SettingsMenuProps extends React.HTMLAttributes<HTMLDivElement> {}
/**
* @alpha
*/
export function SettingsMenu(props: SettingsMenuProps) {
const layoutContext = useMaybeLayoutContext();
const room = useRoomContext();
const recordingEndpoint = process.env.NEXT_PUBLIC_LK_RECORD_ENDPOINT;
const settings = React.useMemo(() => {
return {
media: { camera: true, microphone: true, label: 'Media Devices', speaker: true },
recording: recordingEndpoint ? { label: 'Recording' } : undefined,
};
}, []);
const tabs = React.useMemo(
() => Object.keys(settings).filter((t) => t !== undefined) as Array<keyof typeof settings>,
[settings],
);
const [activeTab, setActiveTab] = React.useState(tabs[0]);
const isRecording = useIsRecording();
const [initialRecStatus, setInitialRecStatus] = React.useState(isRecording);
const [processingRecRequest, setProcessingRecRequest] = React.useState(false);
React.useEffect(() => {
if (initialRecStatus !== isRecording) {
setProcessingRecRequest(false);
}
}, [isRecording, initialRecStatus]);
const toggleRoomRecording = async () => {
if (!recordingEndpoint) {
throw TypeError('No recording endpoint specified');
}
if (room.isE2EEEnabled) {
throw Error('Recording of encrypted meetings is currently not supported');
}
setProcessingRecRequest(true);
setInitialRecStatus(isRecording);
let response: Response;
if (isRecording) {
response = await fetch(recordingEndpoint + `/stop?roomName=${room.name}`);
} else {
response = await fetch(recordingEndpoint + `/start?roomName=${room.name}`);
}
if (response.ok) {
} else {
console.error(
'Error handling recording request, check server logs:',
response.status,
response.statusText,
);
setProcessingRecRequest(false);
}
};
return (
<div className="settings-menu" style={{ width: '100%', position: 'relative' }} {...props}>
<div className={styles.tabs}>
{tabs.map(
(tab) =>
settings[tab] && (
<button
className={`${styles.tab} lk-button`}
key={tab}
onClick={() => setActiveTab(tab)}
aria-pressed={tab === activeTab}
>
{
// @ts-ignore
settings[tab].label
}
</button>
),
)}
</div>
<div className="tab-content">
{activeTab === 'media' && (
<>
{settings.media && settings.media.camera && (
<>
<h3>Camera</h3>
<section>
<CameraSettings />
</section>
</>
)}
{settings.media && settings.media.microphone && (
<>
<h3>Microphone</h3>
<section>
<MicrophoneSettings />
</section>
</>
)}
{settings.media && settings.media.speaker && (
<>
<h3>Speaker & Headphones</h3>
<section className="lk-button-group">
<span className="lk-button">Audio Output</span>
<div className="lk-button-group-menu">
<MediaDeviceMenu kind="audiooutput"></MediaDeviceMenu>
</div>
</section>
</>
)}
</>
)}
{activeTab === 'recording' && (
<>
<h3>Record Meeting</h3>
<section>
<p>
{isRecording
? 'Meeting is currently being recorded'
: 'No active recordings for this meeting'}
</p>
<button disabled={processingRecRequest} onClick={() => toggleRoomRecording()}>
{isRecording ? 'Stop' : 'Start'} Recording
</button>
</section>
</>
)}
</div>
<div style={{ display: 'flex', justifyContent: 'flex-end', width: '100%' }}>
<button
className={`lk-button`}
onClick={() => layoutContext?.widget.dispatch?.({ msg: 'toggle_settings' })}
>
Close
</button>
</div>
</div>
);
}

25
lib/client-utils.ts Normal file
View File

@@ -0,0 +1,25 @@
export function encodePassphrase(passphrase: string) {
return encodeURIComponent(passphrase);
}
export function decodePassphrase(base64String: string) {
return decodeURIComponent(base64String);
}
export function generateRoomId(): string {
return `${randomString(4)}-${randomString(4)}`;
}
export function randomString(length: number): string {
let result = '';
const characters = 'abcdefghijklmnopqrstuvwxyz0123456789';
const charactersLength = characters.length;
for (let i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
export function isLowPowerDevice() {
return navigator.hardwareConcurrency < 6;
}

35
lib/getLiveKitURL.test.ts Normal file
View File

@@ -0,0 +1,35 @@
import { describe, it, expect } from 'vitest';
import { getLiveKitURL } from './getLiveKitURL';
describe('getLiveKitURL', () => {
it('returns the original URL if no region is provided', () => {
const url = 'https://myproject.livekit.cloud';
expect(getLiveKitURL(url, null)).toBe(url + '/');
});
it('inserts the region into livekit.cloud URLs', () => {
const url = 'https://myproject.livekit.cloud';
const region = 'eu';
expect(getLiveKitURL(url, region)).toBe('https://myproject.eu.production.livekit.cloud/');
});
it('inserts the region into livekit.cloud URLs and preserves the staging environment', () => {
const url = 'https://myproject.staging.livekit.cloud';
const region = 'eu';
expect(getLiveKitURL(url, region)).toBe('https://myproject.eu.staging.livekit.cloud/');
});
it('returns the original URL for non-livekit.cloud hosts, even with region', () => {
const url = 'https://example.com';
const region = 'us';
expect(getLiveKitURL(url, region)).toBe(url + '/');
});
it('handles URLs with paths and query params', () => {
const url = 'https://myproject.livekit.cloud/room?foo=bar';
const region = 'ap';
expect(getLiveKitURL(url, region)).toBe(
'https://myproject.ap.production.livekit.cloud/room?foo=bar',
);
});
});

12
lib/getLiveKitURL.ts Normal file
View File

@@ -0,0 +1,12 @@
export function getLiveKitURL(projectUrl: string, region: string | null): string {
const url = new URL(projectUrl);
if (region && url.hostname.includes('livekit.cloud')) {
let [projectId, ...hostParts] = url.hostname.split('.');
if (hostParts[0] !== 'staging') {
hostParts = ['production', ...hostParts];
}
const regionURL = [projectId, region, ...hostParts].join('.');
url.hostname = regionURL;
}
return url.toString();
}

28
lib/types.ts Normal file
View File

@@ -0,0 +1,28 @@
import { LocalAudioTrack, LocalVideoTrack, videoCodecs } from 'livekit-client';
import { VideoCodec } from 'livekit-client';
export interface SessionProps {
roomName: string;
identity: string;
audioTrack?: LocalAudioTrack;
videoTrack?: LocalVideoTrack;
region?: string;
turnServer?: RTCIceServer;
forceRelay?: boolean;
}
export interface TokenResult {
identity: string;
accessToken: string;
}
export function isVideoCodec(codec: string): codec is VideoCodec {
return videoCodecs.includes(codec as VideoCodec);
}
export type ConnectionDetails = {
serverUrl: string;
roomName: string;
participantName: string;
participantToken: string;
};

View File

@@ -0,0 +1,71 @@
import {
Room,
ParticipantEvent,
RoomEvent,
RemoteTrack,
RemoteTrackPublication,
VideoQuality,
LocalVideoTrack,
isVideoTrack,
} from 'livekit-client';
import * as React from 'react';
export type LowCPUOptimizerOptions = {
reducePublisherVideoQuality: boolean;
reduceSubscriberVideoQuality: boolean;
disableVideoProcessing: boolean;
};
const defaultOptions: LowCPUOptimizerOptions = {
reducePublisherVideoQuality: true,
reduceSubscriberVideoQuality: true,
disableVideoProcessing: false,
} as const;
/**
* This hook ensures that on devices with low CPU, the performance is optimised when needed.
* This is done by primarily reducing the video quality to low when the CPU is constrained.
*/
export function useLowCPUOptimizer(room: Room, options: Partial<LowCPUOptimizerOptions> = {}) {
const [lowPowerMode, setLowPowerMode] = React.useState(false);
const opts = React.useMemo(() => ({ ...defaultOptions, ...options }), [options]);
React.useEffect(() => {
const handleCpuConstrained = async (track: LocalVideoTrack) => {
setLowPowerMode(true);
console.warn('Local track CPU constrained', track);
if (opts.reducePublisherVideoQuality) {
track.prioritizePerformance();
}
if (opts.disableVideoProcessing && isVideoTrack(track)) {
track.stopProcessor();
}
if (opts.reduceSubscriberVideoQuality) {
room.remoteParticipants.forEach((participant) => {
participant.videoTrackPublications.forEach((publication) => {
publication.setVideoQuality(VideoQuality.LOW);
});
});
}
};
room.localParticipant.on(ParticipantEvent.LocalTrackCpuConstrained, handleCpuConstrained);
return () => {
room.localParticipant.off(ParticipantEvent.LocalTrackCpuConstrained, handleCpuConstrained);
};
}, [room, opts.reducePublisherVideoQuality, opts.reduceSubscriberVideoQuality]);
React.useEffect(() => {
const lowerQuality = (_: RemoteTrack, publication: RemoteTrackPublication) => {
publication.setVideoQuality(VideoQuality.LOW);
};
if (lowPowerMode && opts.reduceSubscriberVideoQuality) {
room.on(RoomEvent.TrackSubscribed, lowerQuality);
}
return () => {
room.off(RoomEvent.TrackSubscribed, lowerQuality);
};
}, [lowPowerMode, room, opts.reduceSubscriberVideoQuality]);
return lowPowerMode;
}

15
lib/useSetupE2EE.ts Normal file
View File

@@ -0,0 +1,15 @@
import React from 'react';
import { ExternalE2EEKeyProvider } from 'livekit-client';
import { decodePassphrase } from './client-utils';
export function useSetupE2EE() {
const e2eePassphrase =
typeof window !== 'undefined' ? decodePassphrase(location.hash.substring(1)) : undefined;
const worker: Worker | undefined =
typeof window !== 'undefined' && e2eePassphrase
? new Worker(new URL('livekit-client/e2ee-worker', import.meta.url))
: undefined;
return { worker, e2eePassphrase };
}