27 Commits

Author SHA1 Message Date
Valere Fedronic
40fdef89eb Merge pull request #3780 from JephDiel/Download-Avatars-with-MC4039
Some checks failed
Build / build_full_element_call (push) Has been cancelled
Build / build_embedded_element_call (push) Has been cancelled
Build / build_sdk_element_call (push) Has been cancelled
Build & publish embedded packages for releases / Versioning (push) Has been cancelled
Test / Run unit tests (push) Has been cancelled
Test / Run end-to-end tests (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
Build / deploy_develop (push) Has been cancelled
Build / docker_for_develop (push) Has been cancelled
Build & publish embedded packages for releases / build_element_call (push) Has been cancelled
Build & publish embedded packages for releases / Publish tarball (push) Has been cancelled
Build & publish embedded packages for releases / Publish NPM (push) Has been cancelled
Build & publish embedded packages for releases / Publish Android AAR (push) Has been cancelled
Build & publish embedded packages for releases / Publish SwiftPM Library (push) Has been cancelled
Build & publish embedded packages for releases / Update release notes (push) Has been cancelled
Download avatars using the MC4039 Widget API
2026-03-26 09:03:15 +01:00
Valere Fedronic
2ab5e07f73 Merge pull request #3801 from element-hq/valere/default_route
Audio Control | Default to using earpiece when doing audio vs speaker for video call (depending on presence of headset or not)
2026-03-25 13:16:53 +01:00
Valere
09092e97c8 fix error in test 2026-03-25 12:15:12 +01:00
Valere
cb6bef3849 try extend timeout? 2026-03-25 10:10:29 +01:00
Valere
4d491be4a9 post merge fix 2026-03-25 08:21:32 +01:00
Valere
aad160a715 Merge branch 'livekit' into valere/default_route 2026-03-24 18:28:54 +01:00
Valere
fc61a36d4a review: improve comments 2026-03-24 18:25:09 +01:00
Valere
1bc2abb84f rename ControlledAudioOutput to iOSControlledAudioOutput 2026-03-24 18:03:33 +01:00
Valere
4f518819d3 review: extract ControlledAudioOutput in its own file 2026-03-24 18:02:27 +01:00
Robin
b40feae7da Merge pull request #3810 from element-hq/robin/button-accessibility
Improve accessibility of microphone, camera, and screen share buttons
2026-03-24 17:36:36 +01:00
Robin
e15761d8ee Merge pull request #3703 from element-hq/renovate/compound
Update Compound
2026-03-24 17:20:01 +01:00
Robin
c60ed50a9d Attempt to fix end-to-end widget tests 2026-03-24 17:04:28 +01:00
Robin
6dcd883fc3 Update test snapshots 2026-03-24 16:10:19 +01:00
Robin
c0d60b2c29 Improve accessibility of microphone, camera, and screen share buttons
Taking Valere's suggestion of giving them the 'switch' role. Also, the aria-label attributes were redundant (having tooltips already gives the buttons aria-labelledby).
2026-03-21 02:53:48 +01:00
Valere
c8b8d350d5 Merge branch 'livekit' into valere/default_route 2026-03-19 18:41:36 +01:00
Valere
9c2b2d4780 fix: playwright, new web popup interfering with test to witch room 2026-03-19 10:44:38 +01:00
Valere
396225a900 fix playwright earpiece mode 2026-03-19 10:44:38 +01:00
Valere
c4ec52ae15 add some test for normal AudiOutput 2026-03-19 10:44:38 +01:00
Valere
4be2bc7560 android: Select default output device based on callIntent
Add comments on existing code
Extracted a specific android controller for isolation and better testing

lint fixes

Fix device update logic and more tests

better typescript
2026-03-19 10:44:38 +01:00
renovate[bot]
32ea8f522c Update Compound 2026-03-15 01:47:49 +00:00
JephDiel
997cc9bb27 Lint fixes v2 2026-03-13 11:41:58 -05:00
JephDiel
fa74aaa81e Import order lint fixes
Co-authored-by: Timo <16718859+toger5@users.noreply.github.com>
2026-03-13 11:33:07 -05:00
JephDiel
af807489f9 Import order lint fixes
Co-authored-by: Timo <16718859+toger5@users.noreply.github.com>
2026-03-13 11:31:55 -05:00
JephDiel
b198721969 Ignore stale downloads
If src or sizePx changes while we're downloading,
discard the now-stale fetch result so we don't
override the fresh one.
2026-03-12 22:20:19 -05:00
JephDiel
8ecb1b3dbf Simplify widget detection
Use the exists check for the widget API directly
instead of a feature flag.
2026-03-12 22:18:38 -05:00
JephDiel
699e31f59a Download Avatar from relevent source
Instead of relying on failures directly use the
available method to download the avatar.
2026-03-09 23:16:12 -05:00
JephDiel
005b965fba Download avatars using the Widget API
If we can't authenticate media because we're
running as a widget, use the MC4039 widget API
instead of a direct fetch to download the avatar.
2026-03-07 19:53:57 -06:00
25 changed files with 1657 additions and 335 deletions

View File

@@ -22,8 +22,8 @@ test("Start a new call then leave and show the feedback screen", async ({
await expect(page.getByTestId("lobby_joinCall")).toBeVisible();
// Check the button toolbar
// await expect(page.getByRole('button', { name: 'Mute microphone' })).toBeVisible();
// await expect(page.getByRole('button', { name: 'Stop video' })).toBeVisible();
// await expect(page.getByRole('switch', { name: 'Mute microphone' })).toBeVisible();
// await expect(page.getByRole('switch', { name: 'Stop video' })).toBeVisible();
await expect(page.getByRole("button", { name: "Settings" })).toBeVisible();
await expect(page.getByRole("button", { name: "End call" })).toBeVisible();

View File

@@ -100,8 +100,16 @@ mobileTest(
{ id: "earpiece", name: "Handset", isEarpiece: true },
{ id: "headphones", name: "Headphones" },
]);
window.controls.setAudioDevice("earpiece");
});
// Open settings to select earpiece
await guestPage.getByRole("button", { name: "Settings" }).click();
await guestPage.getByText("Handset", { exact: true }).click();
// dismiss settings
await guestPage.locator("#root").getByLabel("Settings").press("Escape");
await guestPage.pause();
await expect(
guestPage.getByRole("heading", { name: "Handset Mode" }),
).toBeVisible();

View File

@@ -49,12 +49,12 @@ test("can only interact with header and footer while reconnecting", async ({
).toBeVisible();
// Tab order should jump directly from header to footer, skipping media tiles
await page.getByRole("button", { name: "Mute microphone" }).focus();
await page.getByRole("switch", { name: "Mute microphone" }).focus();
await expect(
page.getByRole("button", { name: "Mute microphone" }),
page.getByRole("switch", { name: "Mute microphone" }),
).toBeFocused();
await page.keyboard.press("Tab");
await expect(page.getByRole("button", { name: "Stop video" })).toBeFocused();
await expect(page.getByRole("switch", { name: "Stop video" })).toBeFocused();
// Most critically, we should be able to press the hangup button
await page.getByRole("button", { name: "End call" }).click();
});

View File

@@ -55,13 +55,10 @@ widgetTest("Create and join a group call", async ({ addUser, browserName }) => {
const frame = user.page
.locator('iframe[title="Element Call"]')
.contentFrame();
// No lobby, should start with video on
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = frame.getByTestId("incall_videomute");
await expect(videoButton).toBeVisible();
// video should be on
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
await expect(
frame.getByRole("switch", { name: "Stop video", checked: true }),
).toBeVisible();
}
// We should see 5 video tiles everywhere now
@@ -101,13 +98,15 @@ widgetTest("Create and join a group call", async ({ addUser, browserName }) => {
const florianFrame = florian.page
.locator('iframe[title="Element Call"]')
.contentFrame();
const florianMuteButton = florianFrame.getByTestId("incall_videomute");
await florianMuteButton.click();
const florianVideoButton = florianFrame.getByRole("switch", {
name: /video/,
});
await expect(florianVideoButton).toHaveAccessibleName("Stop video");
await expect(florianVideoButton).toBeChecked();
await florianVideoButton.click();
// Now the button should indicate we can start video
await expect(florianMuteButton).toHaveAttribute(
"aria-label",
/^Start video$/,
);
await expect(florianVideoButton).toHaveAccessibleName("Start video");
await expect(florianVideoButton).not.toBeChecked();
// wait a bit for the state to propagate
await valere.page.waitForTimeout(3000);

View File

@@ -47,14 +47,17 @@ widgetTest("Footer interaction in PiP", async ({ addUser, browserName }) => {
{
// Check for a bug where the video had the wrong fit in PIP
const hangupBtn = iFrame.getByRole("button", { name: "End call" });
const audioBtn = iFrame.getByTestId("incall_mute");
const videoBtn = iFrame.getByTestId("incall_videomute");
await expect(hangupBtn).toBeVisible();
const audioBtn = iFrame.getByRole("switch", { name: /microphone/ });
const videoBtn = iFrame.getByRole("switch", { name: /video/ });
await expect(
iFrame.getByRole("button", { name: "End call" }),
).toBeVisible();
await expect(audioBtn).toBeVisible();
await expect(videoBtn).toBeVisible();
await expect(audioBtn).toHaveAttribute("aria-label", /^Mute microphone$/);
await expect(videoBtn).toHaveAttribute("aria-label", /^Stop video$/);
await expect(audioBtn).toHaveAccessibleName("Mute microphone");
await expect(audioBtn).toBeChecked();
await expect(videoBtn).toHaveAccessibleName("Stop video");
await expect(videoBtn).toBeChecked();
await videoBtn.click();
await audioBtn.click();
@@ -62,7 +65,9 @@ widgetTest("Footer interaction in PiP", async ({ addUser, browserName }) => {
// stop hovering on any of the buttons
await iFrame.getByTestId("videoTile").hover();
await expect(audioBtn).toHaveAttribute("aria-label", /^Unmute microphone$/);
await expect(videoBtn).toHaveAttribute("aria-label", /^Start video$/);
await expect(audioBtn).toHaveAccessibleName("Unmute microphone");
await expect(audioBtn).not.toBeChecked();
await expect(videoBtn).toHaveAccessibleName("Start video");
await expect(videoBtn).not.toBeChecked();
}
});

View File

@@ -40,16 +40,14 @@ widgetTest("Put call in PIP", async ({ addUser, browserName }) => {
await TestHelpers.joinCallInCurrentRoom(timo.page);
{
const frame = timo.page
.locator('iframe[title="Element Call"]')
.contentFrame();
const videoButton = frame.getByTestId("incall_videomute");
await expect(videoButton).toBeVisible();
// check that the video is on
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
}
await expect(
frame.getByRole("switch", { name: "Stop video", checked: true }),
).toBeVisible();
// Switch to the other room, the call should go to PIP
await TestHelpers.switchToRoomNamed(valere.page, "DoubleTask");

View File

@@ -152,6 +152,22 @@ export class TestHelpers {
}
}
public static async maybeDismissKeyBackupToast(page: Page): Promise<void> {
const toast = page
.locator(".mx_Toast_toast")
.getByText("Back up your chats");
try {
await expect(toast).toBeVisible({ timeout: 700 });
await page
.locator(".mx_Toast_toast")
.getByRole("button", { name: "Dismiss" })
.click();
} catch {
// toast not visible, continue as normal
}
}
public static async createRoom(
name: string,
page: Page,
@@ -167,6 +183,7 @@ export class TestHelpers {
await page.getByRole("button", { name: "Create room" }).click();
await expect(page.getByText("You created this room.")).toBeVisible();
await expect(page.getByText("Encryption enabled")).toBeVisible();
await TestHelpers.maybeDismissKeyBackupToast(page);
// Invite users if any
if (andInvite.length > 0) {
@@ -201,6 +218,7 @@ export class TestHelpers {
await expect(
page.getByRole("main").getByRole("heading", { name: roomName }),
).toBeVisible();
await TestHelpers.maybeDismissKeyBackupToast(page);
}
/**

View File

@@ -54,34 +54,36 @@ widgetTest(
.contentFrame();
// ASSERT the button states for whistler (the callee)
{
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = whistlerFrame.getByTestId("incall_videomute");
// video should be off by default in a voice call
await expect(videoButton).toHaveAttribute("aria-label", /^Start video$/);
const audioButton = whistlerFrame.getByTestId("incall_mute");
await expect(
whistlerFrame.getByRole("switch", {
name: "Start video",
checked: false,
}),
).toBeVisible();
// audio should be on for the voice call
await expect(audioButton).toHaveAttribute(
"aria-label",
/^Mute microphone$/,
);
}
await expect(
whistlerFrame.getByRole("switch", {
name: "Mute microphone",
checked: true,
}),
).toBeVisible();
// ASSERT the button states for brools (the caller)
{
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = brooksFrame.getByTestId("incall_videomute");
// video should be off by default in a voice call
await expect(videoButton).toHaveAttribute("aria-label", /^Start video$/);
const audioButton = brooksFrame.getByTestId("incall_mute");
await expect(
whistlerFrame.getByRole("switch", {
name: "Start video",
checked: false,
}),
).toBeVisible();
// audio should be on for the voice call
await expect(audioButton).toHaveAttribute(
"aria-label",
/^Mute microphone$/,
);
}
await expect(
whistlerFrame.getByRole("switch", {
name: "Mute microphone",
checked: true,
}),
).toBeVisible();
// In order to confirm that the call is disconnected we will check that the message composer is shown again.
// So first we need to confirm that it is hidden when in the call.
@@ -93,10 +95,7 @@ widgetTest(
).not.toBeVisible();
// ASSERT hanging up on one side ends the call for both
{
const hangupButton = brooksFrame.getByTestId("incall_leave");
await hangupButton.click();
}
await brooksFrame.getByRole("button", { name: "End call" }).click();
// The widget should be closed on both sides and the timeline should be back on screen
await expect(
@@ -148,34 +147,30 @@ widgetTest(
.contentFrame();
// ASSERT the button states for whistler (the callee)
{
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = whistlerFrame.getByTestId("incall_videomute");
// video should be on by default in a voice call
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
const audioButton = whistlerFrame.getByTestId("incall_mute");
// audio should be on for the voice call
await expect(audioButton).toHaveAttribute(
"aria-label",
/^Mute microphone$/,
);
}
// video should be off by default in a video call
await expect(
whistlerFrame.getByRole("switch", { name: "Stop video", checked: true }),
).toBeVisible();
// audio should be on too
await expect(
whistlerFrame.getByRole("switch", {
name: "Mute microphone",
checked: true,
}),
).toBeVisible();
// ASSERT the button states for brools (the caller)
{
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = brooksFrame.getByTestId("incall_videomute");
// video should be on by default in a voice call
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
const audioButton = brooksFrame.getByTestId("incall_mute");
// audio should be on for the voice call
await expect(audioButton).toHaveAttribute(
"aria-label",
/^Mute microphone$/,
);
}
// video should be off by default in a video call
await expect(
whistlerFrame.getByRole("switch", { name: "Stop video", checked: true }),
).toBeVisible();
// audio should be on too
await expect(
whistlerFrame.getByRole("switch", {
name: "Mute microphone",
checked: true,
}),
).toBeVisible();
// In order to confirm that the call is disconnected we will check that the message composer is shown again.
// So first we need to confirm that it is hidden when in the call.
@@ -187,10 +182,7 @@ widgetTest(
).not.toBeVisible();
// ASSERT hanging up on one side ends the call for both
{
const hangupButton = brooksFrame.getByTestId("incall_leave");
await hangupButton.click();
}
await brooksFrame.getByRole("button", { name: "End call" }).click();
// The widget should be closed on both sides and the timeline should be back on screen
await expect(

View File

@@ -9,14 +9,18 @@ import { afterEach, expect, test, vi } from "vitest";
import { render, screen } from "@testing-library/react";
import { type MatrixClient } from "matrix-js-sdk";
import { type FC, type PropsWithChildren } from "react";
import { type WidgetApi } from "matrix-widget-api";
import { ClientContextProvider } from "./ClientContext";
import { Avatar } from "./Avatar";
import { mockMatrixRoomMember, mockRtcMembership } from "./utils/test";
import { widget } from "./widget";
const TestComponent: FC<
PropsWithChildren<{ client: MatrixClient; supportsThumbnails?: boolean }>
> = ({ client, children, supportsThumbnails }) => {
PropsWithChildren<{
client: MatrixClient;
}>
> = ({ client, children }) => {
return (
<ClientContextProvider
value={{
@@ -24,7 +28,6 @@ const TestComponent: FC<
disconnected: false,
supportedFeatures: {
reactions: true,
thumbnails: supportsThumbnails ?? true,
},
setClient: vi.fn(),
authenticated: {
@@ -40,6 +43,12 @@ const TestComponent: FC<
);
};
vi.mock("./widget", () => ({
widget: {
api: null, // Ideally we'd only mock this in the as a widget test so the whole module is otherwise null, but just nulling `api` by default works well enough
},
}));
afterEach(() => {
vi.unstubAllGlobals();
});
@@ -73,36 +82,7 @@ test("should just render a placeholder when the user has no avatar", () => {
expect(client.mxcUrlToHttp).toBeCalledTimes(0);
});
test("should just render a placeholder when thumbnails are not supported", () => {
const client = vi.mocked<MatrixClient>({
getAccessToken: () => "my-access-token",
mxcUrlToHttp: () => vi.fn(),
} as unknown as MatrixClient);
vi.spyOn(client, "mxcUrlToHttp");
const member = mockMatrixRoomMember(
mockRtcMembership("@alice:example.org", "AAAA"),
{
getMxcAvatarUrl: () => "mxc://example.org/alice-avatar",
},
);
const displayName = "Alice";
render(
<TestComponent client={client} supportsThumbnails={false}>
<Avatar
id={member.userId}
name={displayName}
size={96}
src={member.getMxcAvatarUrl()}
/>
</TestComponent>,
);
const element = screen.getByRole("img", { name: "@alice:example.org" });
expect(element.tagName).toEqual("SPAN");
expect(client.mxcUrlToHttp).toBeCalledTimes(0);
});
test("should attempt to fetch authenticated media", async () => {
test("should attempt to fetch authenticated media from the server", async () => {
const expectedAuthUrl = "http://example.org/media/alice-avatar";
const expectedObjectURL = "my-object-url";
const accessToken = "my-access-token";
@@ -154,3 +134,47 @@ test("should attempt to fetch authenticated media", async () => {
headers: { Authorization: `Bearer ${accessToken}` },
});
});
test("should attempt to use widget API if running as a widget", async () => {
const expectedMXCUrl = "mxc://example.org/alice-avatar";
const expectedObjectURL = "my-object-url";
const theBlob = new Blob([]);
// vitest doesn't have a implementation of create/revokeObjectURL, so we need
// to delete the property. It's a bit odd, but it works.
Reflect.deleteProperty(global.window.URL, "createObjectURL");
globalThis.URL.createObjectURL = vi.fn().mockReturnValue(expectedObjectURL);
Reflect.deleteProperty(global.window.URL, "revokeObjectURL");
globalThis.URL.revokeObjectURL = vi.fn();
const client = vi.mocked<MatrixClient>({
getAccessToken: () => undefined,
} as unknown as MatrixClient);
widget!.api = { downloadFile: vi.fn() } as unknown as WidgetApi;
vi.spyOn(widget!.api, "downloadFile").mockResolvedValue({ file: theBlob });
const member = mockMatrixRoomMember(
mockRtcMembership("@alice:example.org", "AAAA"),
{
getMxcAvatarUrl: () => expectedMXCUrl,
},
);
const displayName = "Alice";
render(
<TestComponent client={client}>
<Avatar
id={member.userId}
name={displayName}
size={96}
src={member.getMxcAvatarUrl()}
/>
</TestComponent>,
);
// Fetch is asynchronous, so wait for this to resolve.
await vi.waitUntil(() =>
document.querySelector(`img[src='${expectedObjectURL}']`),
);
expect(widget!.api.downloadFile).toBeCalledWith(expectedMXCUrl);
});

View File

@@ -14,8 +14,10 @@ import {
} from "react";
import { Avatar as CompoundAvatar } from "@vector-im/compound-web";
import { type MatrixClient } from "matrix-js-sdk";
import { type WidgetApi } from "matrix-widget-api";
import { useClientState } from "./ClientContext";
import { widget } from "./widget";
export enum Size {
XS = "xs",
@@ -78,50 +80,54 @@ export const Avatar: FC<Props> = ({
const sizePx = useMemo(
() =>
Object.values(Size).includes(size as Size)
? sizes.get(size as Size)
? sizes.get(size as Size)!
: (size as number),
[size],
);
const [avatarUrl, setAvatarUrl] = useState<string | undefined>(undefined);
// In theory, a change in `clientState` or `sizePx` could run extra getAvatarFromWidgetAPI calls, but in practice they should be stable long before this code runs.
useEffect(() => {
if (clientState?.state !== "valid") {
return;
}
const { authenticated, supportedFeatures } = clientState;
const client = authenticated?.client;
if (!client || !src || !sizePx || !supportedFeatures.thumbnails) {
if (!src) {
setAvatarUrl(undefined);
return;
}
const token = client.getAccessToken();
if (!token) {
return;
}
const resolveSrc = getAvatarUrl(client, src, sizePx);
if (!resolveSrc) {
let blob: Promise<Blob>;
if (widget?.api) {
blob = getAvatarFromWidgetAPI(widget.api, src);
} else if (
clientState?.state === "valid" &&
clientState.authenticated?.client &&
sizePx
) {
blob = getAvatarFromServer(clientState.authenticated.client, src, sizePx);
} else {
setAvatarUrl(undefined);
return;
}
let objectUrl: string | undefined;
fetch(resolveSrc, {
headers: {
Authorization: `Bearer ${token}`,
},
})
.then(async (req) => req.blob())
let stale = false;
blob
.then((blob) => {
if (stale) {
return;
}
objectUrl = URL.createObjectURL(blob);
setAvatarUrl(objectUrl);
})
.catch((ex) => {
if (stale) {
return;
}
setAvatarUrl(undefined);
});
return (): void => {
stale = true;
if (objectUrl) {
URL.revokeObjectURL(objectUrl);
}
@@ -140,3 +146,44 @@ export const Avatar: FC<Props> = ({
/>
);
};
async function getAvatarFromServer(
client: MatrixClient,
src: string,
sizePx: number,
): Promise<Blob> {
const httpSrc = getAvatarUrl(client, src, sizePx);
if (!httpSrc) {
throw new Error("Failed to get http avatar URL");
}
const token = client.getAccessToken();
if (!token) {
throw new Error("Failed to get access token");
}
const request = await fetch(httpSrc, {
headers: {
Authorization: `Bearer ${token}`,
},
});
const blob = await request.blob();
return blob;
}
async function getAvatarFromWidgetAPI(
api: WidgetApi,
src: string,
): Promise<Blob> {
const response = await api.downloadFile(src);
const file = response.file;
// element-web sends a Blob, and the MSC4039 is considering changing the spec to strictly Blob, so only handling that
if (!(file instanceof Blob)) {
throw new Error("Downloaded file is not a Blob");
}
return file;
}

View File

@@ -48,7 +48,6 @@ export type ValidClientState = {
disconnected: boolean;
supportedFeatures: {
reactions: boolean;
thumbnails: boolean;
};
setClient: (client: MatrixClient, session: Session) => void;
};
@@ -249,7 +248,6 @@ export const ClientProvider: FC<Props> = ({ children }) => {
const [isDisconnected, setIsDisconnected] = useState(false);
const [supportsReactions, setSupportsReactions] = useState(false);
const [supportsThumbnails, setSupportsThumbnails] = useState(false);
const state: ClientState | undefined = useMemo(() => {
if (alreadyOpenedErr) {
@@ -275,7 +273,6 @@ export const ClientProvider: FC<Props> = ({ children }) => {
disconnected: isDisconnected,
supportedFeatures: {
reactions: supportsReactions,
thumbnails: supportsThumbnails,
},
};
}, [
@@ -286,7 +283,6 @@ export const ClientProvider: FC<Props> = ({ children }) => {
setClient,
isDisconnected,
supportsReactions,
supportsThumbnails,
]);
const onSync = useCallback(
@@ -312,8 +308,6 @@ export const ClientProvider: FC<Props> = ({ children }) => {
}
if (initClientState.widgetApi) {
// There is currently no widget API for authenticated media thumbnails.
setSupportsThumbnails(false);
const reactSend = initClientState.widgetApi.hasCapability(
"org.matrix.msc2762.send.event:m.reaction",
);
@@ -335,7 +329,6 @@ export const ClientProvider: FC<Props> = ({ children }) => {
}
} else {
setSupportsReactions(true);
setSupportsThumbnails(true);
}
return (): void => {

View File

@@ -14,7 +14,7 @@ exports[`AppBar > renders 1`] = `
>
<button
aria-labelledby="_r_0_"
class="_icon-button_1pz9o_8"
class="_icon-button_1215g_8"
data-kind="primary"
role="button"
style="--cpd-icon-button-size: 32px;"

View File

@@ -37,9 +37,10 @@ export const MicButton: FC<MicButtonProps> = ({ enabled, ...props }) => {
<Tooltip label={label}>
<CpdButton
iconOnly
aria-label={label}
Icon={Icon}
kind={enabled ? "primary" : "secondary"}
role="switch"
aria-checked={enabled}
{...props}
/>
</Tooltip>
@@ -62,9 +63,10 @@ export const VideoButton: FC<VideoButtonProps> = ({ enabled, ...props }) => {
<Tooltip label={label}>
<CpdButton
iconOnly
aria-label={label}
Icon={Icon}
kind={enabled ? "primary" : "secondary"}
role="switch"
aria-checked={enabled}
{...props}
/>
</Tooltip>
@@ -91,6 +93,8 @@ export const ShareScreenButton: FC<ShareScreenButtonProps> = ({
iconOnly
Icon={ShareScreenSolidIcon}
kind={enabled ? "primary" : "secondary"}
role="switch"
aria-checked={enabled}
{...props}
/>
</Tooltip>
@@ -112,7 +116,6 @@ export const EndCallButton: FC<EndCallButtonProps> = ({
<CpdButton
className={classNames(className, styles.endCall)}
iconOnly
aria-label={t("hangup_button_label")}
Icon={EndCallIcon}
destructive
{...props}

View File

@@ -33,12 +33,38 @@ export interface Controls {
showNativeOutputDevicePicker?: () => void;
}
/**
* Output Audio device when using the controlled audio output mode (mobile).
*/
export interface OutputDevice {
id: string;
name: string;
/**
* `forEarpiece` in an iOS only flag, that will be set on the default speaker device.
* The default speaker device will be used for the earpiece mode by
* using a stereo pan and reducing the volume significantly. (in combination this is similar to a dedicated earpiece mode)
* - on iOS this is true if output is routed to speaker.
* In that case then ElementCalls manually appends an earpiece device with id `EARPIECE_CONFIG_ID` and `{ type: "earpiece" }`
* - on Android this is unused.
*/
forEarpiece?: boolean;
/**
* Is the device the OS earpiece audio configuration?
* - on iOS always undefined
* - on Android true for the `TYPE_BUILTIN_EARPIECE`
*/
isEarpiece?: boolean;
/**
* Is the device the OS default speaker:
* - on iOS always true if output is routed to speaker. In other case iOS on declare a `dummy` id device.
* - on Android true for the `TYPE_BUILTIN_SPEAKER`
*/
isSpeaker?: boolean;
/**
* Is the device the OS default external headset (bluetooth):
* - on iOS always undefined.
* - on Android true for the `TYPE_BLUETOOTH_SCO`
*/
isExternalHeadset?: boolean;
}
@@ -47,8 +73,16 @@ export interface OutputDevice {
*/
export const setPipEnabled$ = new Subject<boolean>();
/**
* Stores the list of available controlled audio output devices.
* This is set when the native code calls `setAvailableAudioDevices` with the list of available audio output devices.
*/
export const availableOutputDevices$ = new Subject<OutputDevice[]>();
/**
* Stores the current audio output device id.
* This is set when the native code calls `setAudioDevice`
*/
export const outputDevice$ = new Subject<string>();
/**
@@ -80,16 +114,41 @@ window.controls = {
setPipEnabled$.next(false);
},
/**
* Reverse engineered:
*
* - on iOS:
* This always a list of one thing. If current route output is speaker it returns
* the single `{"id":"Speaker","name":"Speaker","forEarpiece":true,"isSpeaker":true}` Notice that EC will
* also manually add a virtual earpiece device with id `EARPIECE_CONFIG_ID` and `{ type: "earpiece" }`.
* If the route output is not speaker then it will be `{id: 'dummy', name: 'dummy'}`
*
*
* - on Android:
* This is a list of all available output audio devices. The `id` is the Android AudioDeviceInfo.getId()
* and the `name` is based the Android AudioDeviceInfo.productName (mapped to static strings for known types)
* The `isEarpiece`, `isSpeaker` and `isExternalHeadset` are set based on the Android AudioDeviceInfo.type
* matching the corresponding types for earpiece, speaker and bluetooth headset.
*/
setAvailableAudioDevices(devices: OutputDevice[]): void {
logger.info("setAvailableAudioDevices called from native:", devices);
logger.info(
"[MediaDevices controls] setAvailableAudioDevices called from native:",
devices,
);
availableOutputDevices$.next(devices);
},
setAudioDevice(id: string): void {
logger.info("setAudioDevice called from native", id);
logger.info(
"[MediaDevices controls] setAudioDevice called from native",
id,
);
outputDevice$.next(id);
},
setAudioEnabled(enabled: boolean): void {
logger.info("setAudioEnabled called from native:", enabled);
logger.info(
"[MediaDevices controls] setAudioEnabled called from native:",
enabled,
);
if (!setAudioEnabled$.observed)
throw new Error(
"Output controls are disabled. No setAudioEnabled$ observer",

View File

@@ -67,6 +67,6 @@ Initializer.initBeforeReact()
);
})
.catch((e) => {
logger.error("Failed to initialize app", e);
logger.error(`Failed to initialize app ${e.message}`, e);
root.render(e.message);
});

View File

@@ -127,25 +127,12 @@ exports[`InCallView > rendering > renders 1`] = `
viewBox="0 0 24 24"
width="1em"
xmlns="http://www.w3.org/2000/svg"
>
<g
clip-path="url(#a)"
>
<path
clip-rule="evenodd"
d="M8.929 15.1a13.6 13.6 0 0 0 4.654 3.066q2.62 1.036 5.492.923h.008l.003-.004.003-.002-.034-3.124-3.52-.483-1.791 1.792-.645-.322a13.5 13.5 0 0 1-3.496-2.52 13.4 13.4 0 0 1-2.52-3.496l-.322-.644 1.792-1.792-.483-3.519-3.123-.034-.003.002-.003.004v.002a13.65 13.65 0 0 0 .932 5.492A13.4 13.4 0 0 0 8.93 15.1m3.92 4.926a15.6 15.6 0 0 1-5.334-3.511 15.4 15.4 0 0 1-3.505-5.346 15.6 15.6 0 0 1-1.069-6.274 1.93 1.93 0 0 1 .589-1.366c.366-.366.84-.589 1.386-.589h.01l3.163.035a1.96 1.96 0 0 1 1.958 1.694v.005l.487 3.545v.003c.043.297.025.605-.076.907a2 2 0 0 1-.485.773l-.762.762a11.4 11.4 0 0 0 3.206 3.54q.457.33.948.614l.762-.761a2 2 0 0 1 .774-.486c.302-.1.61-.118.907-.076l3.553.487a1.96 1.96 0 0 1 1.694 1.958l.034 3.174c0 .546-.223 1.02-.588 1.386-.361.36-.827.582-1.363.588a15.3 15.3 0 0 1-6.29-1.062"
d="M8.929 15.1a13.6 13.6 0 0 0 4.654 3.066q2.62 1.036 5.492.923h.008l.003-.004.003-.002-.034-3.124-3.52-.483-1.791 1.792-.645-.322a13.5 13.5 0 0 1-3.496-2.52 13.4 13.4 0 0 1-2.52-3.496l-.322-.645 1.792-1.791-.483-3.52-3.123-.033-.003.002-.003.004v.002a13.65 13.65 0 0 0 .932 5.492A13.4 13.4 0 0 0 8.93 15.1m3.92 4.926a15.6 15.6 0 0 1-5.334-3.511 15.4 15.4 0 0 1-3.505-5.346 15.6 15.6 0 0 1-1.069-6.274 1.93 1.93 0 0 1 .589-1.366c.366-.366.84-.589 1.386-.589h.01l3.163.035a1.96 1.96 0 0 1 1.958 1.694v.005l.487 3.545v.003c.043.297.025.605-.076.907a2 2 0 0 1-.485.773l-.762.762a11.3 11.3 0 0 0 1.806 2.348 11.4 11.4 0 0 0 2.348 1.806l.762-.762a2 2 0 0 1 .774-.485c.302-.1.61-.118.907-.076l3.553.487a1.96 1.96 0 0 1 1.694 1.958l.034 3.174c0 .546-.223 1.02-.588 1.386-.36.36-.827.582-1.363.588a15.3 15.3 0 0 1-6.29-1.062"
fill-rule="evenodd"
/>
</g>
<defs>
<clippath
id="a"
>
<path
d="M0 0h24v24H0z"
/>
</clippath>
</defs>
</svg>
</div>
<h2
@@ -285,14 +272,14 @@ exports[`InCallView > rendering > renders 1`] = `
class="buttons"
>
<button
aria-checked="false"
aria-disabled="true"
aria-label="Unmute microphone"
aria-labelledby="_r_8_"
class="_button_13vu4_8 _has-icon_13vu4_60 _icon-only_13vu4_53"
data-kind="secondary"
data-size="lg"
data-testid="incall_mute"
role="button"
role="switch"
tabindex="0"
>
<svg
@@ -309,14 +296,14 @@ exports[`InCallView > rendering > renders 1`] = `
</svg>
</button>
<button
aria-checked="false"
aria-disabled="true"
aria-label="Start video"
aria-labelledby="_r_d_"
class="_button_13vu4_8 _has-icon_13vu4_60 _icon-only_13vu4_53"
data-kind="secondary"
data-size="lg"
data-testid="incall_videomute"
role="button"
role="switch"
tabindex="0"
>
<svg
@@ -354,7 +341,6 @@ exports[`InCallView > rendering > renders 1`] = `
</svg>
</button>
<button
aria-label="End call"
aria-labelledby="_r_n_"
class="_button_13vu4_8 endCall _has-icon_13vu4_60 _icon-only_13vu4_53 _destructive_13vu4_110"
data-kind="primary"

View File

@@ -234,12 +234,12 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
class="_inline-field-control_19upo_44"
>
<div
class="_container_1qhtc_10"
class="_container_1ug7n_10"
>
<input
aria-describedby="radix-_r_9_ radix-_r_b_ radix-_r_d_"
checked=""
class="_input_1qhtc_18"
class="_input_1ug7n_18"
id="radix-_r_8_"
name="_r_0_"
title=""
@@ -247,7 +247,7 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
value="legacy"
/>
<div
class="_ui_1qhtc_19"
class="_ui_1ug7n_19"
/>
</div>
</div>
@@ -275,11 +275,11 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
class="_inline-field-control_19upo_44"
>
<div
class="_container_1qhtc_10"
class="_container_1ug7n_10"
>
<input
aria-describedby="radix-_r_9_ radix-_r_b_ radix-_r_d_"
class="_input_1qhtc_18"
class="_input_1ug7n_18"
id="radix-_r_a_"
name="_r_0_"
title=""
@@ -287,7 +287,7 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
value="compatibility"
/>
<div
class="_ui_1qhtc_19"
class="_ui_1ug7n_19"
/>
</div>
</div>
@@ -315,11 +315,11 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
class="_inline-field-control_19upo_44"
>
<div
class="_container_1qhtc_10"
class="_container_1ug7n_10"
>
<input
aria-describedby="radix-_r_9_ radix-_r_b_ radix-_r_d_"
class="_input_1qhtc_18"
class="_input_1ug7n_18"
id="radix-_r_c_"
name="_r_0_"
title=""
@@ -327,7 +327,7 @@ exports[`DeveloperSettingsTab > renders and matches snapshot 1`] = `
value="matrix_2_0"
/>
<div
class="_ui_1qhtc_19"
class="_ui_1ug7n_19"
/>
</div>
</div>

View File

@@ -78,7 +78,6 @@ function renderWithMockClient(
disconnected: false,
supportedFeatures: {
reactions: true,
thumbnails: true,
},
setClient: vi.fn(),
authenticated: {

View File

@@ -0,0 +1,563 @@
/*
Copyright 2026 Element Corp.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { it, vi, expect, beforeEach, afterEach, describe } from "vitest";
import { firstValueFrom, of, Subject, take, toArray } from "rxjs";
import { type RTCCallIntent } from "matrix-js-sdk/lib/matrixrtc";
import { AndroidControlledAudioOutput } from "./AndroidControlledAudioOutput.ts";
import type { Controls, OutputDevice } from "../controls";
import { ObservableScope } from "./ObservableScope";
import { withTestScheduler } from "../utils/test";
// All the following device types are real device types that have been observed in the wild on Android devices,
// gathered from logs.
// There are no BT Speakers because they are currently filtered out by EXA (native layer)
// A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built in a device.
const SPEAKER_DEVICE: OutputDevice = {
id: "3",
name: "Built-in speaker",
isEarpiece: false,
isSpeaker: true,
isExternalHeadset: false,
};
// A device type describing the attached earphone speaker.
const EARPIECE_DEVICE: OutputDevice = {
id: "2",
name: "Built-in earpiece",
isEarpiece: true,
isSpeaker: false,
isExternalHeadset: false,
};
// A device type describing a Bluetooth device typically used for telephony
const BT_HEADSET_DEVICE: OutputDevice = {
id: "2226",
name: "Bluetooth - OpenMove by Shokz",
isEarpiece: false,
isSpeaker: false,
isExternalHeadset: true,
};
// A device type describing a USB audio headset.
const USB_HEADSET_DEVICE: OutputDevice = {
id: "29440",
name: "USB headset - USB-Audio - AB13X USB Audio",
isEarpiece: false,
isSpeaker: false,
isExternalHeadset: false,
};
// A device type describing a headset, which is the combination of a headphones and microphone
const WIRED_HEADSET_DEVICE: OutputDevice = {
id: "54509",
name: "Wired headset - 23117RA68G",
isEarpiece: false,
isSpeaker: false,
isExternalHeadset: false,
};
// A device type describing a pair of wired headphones
const WIRED_HEADPHONE_DEVICE: OutputDevice = {
id: "679",
name: "Wired headphones - TB02",
isEarpiece: false,
isSpeaker: false,
isExternalHeadset: false,
};
/**
* The base device list that is always present on Android devices.
* This list is ordered by the OS, the speaker is listed before the earpiece.
*/
const BASE_DEVICE_LIST = [SPEAKER_DEVICE, EARPIECE_DEVICE];
const BT_HEADSET_BASE_DEVICE_LIST = [BT_HEADSET_DEVICE, ...BASE_DEVICE_LIST];
const WIRED_HEADSET_BASE_DEVICE_LIST = [
WIRED_HEADSET_DEVICE,
...BASE_DEVICE_LIST,
];
/**
* A full device list containing all the observed device types in the wild on Android devices.
* Ordered as they would be ordered by the OS.
*/
const FULL_DEVICE_LIST = [
BT_HEADSET_DEVICE,
USB_HEADSET_DEVICE,
WIRED_HEADSET_DEVICE,
WIRED_HEADPHONE_DEVICE,
...BASE_DEVICE_LIST,
];
let testScope: ObservableScope;
let mockControls: Controls;
beforeEach(() => {
testScope = new ObservableScope();
mockControls = {
onAudioDeviceSelect: vi.fn(),
onOutputDeviceSelect: vi.fn(),
} as unknown as Controls;
});
afterEach(() => {
testScope.end();
});
describe("Default selection", () => {
it("Default to speaker for video calls", async () => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(BASE_DEVICE_LIST),
testScope,
"video",
mockControls,
);
const emissions = await firstValueFrom(
controlledAudioOutput.selected$.pipe(take(1), toArray()),
);
expect(emissions).toEqual([
{ id: SPEAKER_DEVICE.id, virtualEarpiece: false },
]);
[
mockControls.onAudioDeviceSelect,
mockControls.onOutputDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(SPEAKER_DEVICE.id);
});
});
it("Default to earpiece for audio calls for base config", async () => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(BASE_DEVICE_LIST),
testScope,
"audio",
mockControls,
);
const emissions = await firstValueFrom(
controlledAudioOutput.selected$.pipe(take(1), toArray()),
);
expect(emissions).toEqual([
{ id: EARPIECE_DEVICE.id, virtualEarpiece: false },
]);
[
mockControls.onAudioDeviceSelect,
mockControls.onOutputDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(EARPIECE_DEVICE.id);
});
});
["audio", "video"].forEach((callIntent) => {
it(`Default to BT headset for ${callIntent} calls if present`, async () => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(BT_HEADSET_BASE_DEVICE_LIST),
testScope,
callIntent,
mockControls,
);
const emissions = await firstValueFrom(
controlledAudioOutput.selected$.pipe(take(1), toArray()),
);
expect(emissions).toEqual([
{ id: BT_HEADSET_DEVICE.id, virtualEarpiece: false },
]);
[
mockControls.onAudioDeviceSelect,
mockControls.onOutputDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(BT_HEADSET_DEVICE.id);
});
});
});
["audio", "video"].forEach((callIntent) => {
it(`Default to wired headset for ${callIntent} calls if present`, async () => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(WIRED_HEADSET_BASE_DEVICE_LIST),
testScope,
callIntent,
mockControls,
);
const emissions = await firstValueFrom(
controlledAudioOutput.selected$.pipe(take(1), toArray()),
);
expect(emissions).toEqual([
{ id: WIRED_HEADSET_DEVICE.id, virtualEarpiece: false },
]);
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledExactlyOnceWith(
WIRED_HEADSET_DEVICE.id,
);
expect(mockControls.onOutputDeviceSelect).toHaveBeenCalledExactlyOnceWith(
WIRED_HEADSET_DEVICE.id,
);
});
});
});
describe("Test mappings", () => {
it("Should map output device to correct AudioDeviceLabel", async () => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(FULL_DEVICE_LIST),
testScope,
undefined,
mockControls,
);
const availableDevices = await firstValueFrom(
controlledAudioOutput.available$.pipe(take(1)),
);
expect(availableDevices).toEqual(
new Map([
[BT_HEADSET_DEVICE.id, { type: "name", name: BT_HEADSET_DEVICE.name }],
[
USB_HEADSET_DEVICE.id,
{ type: "name", name: USB_HEADSET_DEVICE.name },
],
[
WIRED_HEADSET_DEVICE.id,
{ type: "name", name: WIRED_HEADSET_DEVICE.name },
],
[
WIRED_HEADPHONE_DEVICE.id,
{ type: "name", name: WIRED_HEADPHONE_DEVICE.name },
],
[SPEAKER_DEVICE.id, { type: "speaker" }],
[EARPIECE_DEVICE.id, { type: "earpiece" }],
]),
);
});
});
describe("Test select a device", () => {
it(`Switch to correct device `, () => {
withTestScheduler(({ cold, schedule, expectObservable, flush }) => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
cold("a", { a: FULL_DEVICE_LIST }),
testScope,
undefined,
mockControls,
);
schedule("-abc", {
a: () => controlledAudioOutput.select(EARPIECE_DEVICE.id),
b: () => controlledAudioOutput.select(USB_HEADSET_DEVICE.id),
c: () => controlledAudioOutput.select(SPEAKER_DEVICE.id),
});
expectObservable(controlledAudioOutput.selected$).toBe("abcd", {
// virtualEarpiece is always false on android.
// Initially the BT_HEADSET is selected.
a: { id: BT_HEADSET_DEVICE.id, virtualEarpiece: false },
b: { id: EARPIECE_DEVICE.id, virtualEarpiece: false },
c: { id: USB_HEADSET_DEVICE.id, virtualEarpiece: false },
d: { id: SPEAKER_DEVICE.id, virtualEarpiece: false },
});
flush();
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(4);
expect(mockFn).toHaveBeenNthCalledWith(1, BT_HEADSET_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(2, EARPIECE_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(3, USB_HEADSET_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(4, SPEAKER_DEVICE.id);
});
});
});
it(`manually switch then a bt headset is added`, () => {
withTestScheduler(({ cold, schedule, expectObservable, flush }) => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
cold("a--b", {
a: BASE_DEVICE_LIST,
b: BT_HEADSET_BASE_DEVICE_LIST,
}),
testScope,
"audio",
mockControls,
);
// Default was earpiece (audio call), let's switch to speaker
schedule("-a--", {
a: () => controlledAudioOutput.select(SPEAKER_DEVICE.id),
});
expectObservable(controlledAudioOutput.selected$).toBe("ab-c", {
// virtualEarpiece is always false on android.
// Initially the BT_HEADSET is selected.
a: { id: EARPIECE_DEVICE.id, virtualEarpiece: false },
b: { id: SPEAKER_DEVICE.id, virtualEarpiece: false },
c: { id: BT_HEADSET_DEVICE.id, virtualEarpiece: false },
});
flush();
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(3);
expect(mockFn).toHaveBeenNthCalledWith(1, EARPIECE_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(2, SPEAKER_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(3, BT_HEADSET_DEVICE.id);
});
});
});
it(`Go back to the previously selected after the auto-switch device goes away`, () => {
withTestScheduler(({ cold, schedule, expectObservable, flush }) => {
const controlledAudioOutput = new AndroidControlledAudioOutput(
cold("a--b-c", {
a: BASE_DEVICE_LIST,
b: BT_HEADSET_BASE_DEVICE_LIST,
c: BASE_DEVICE_LIST,
}),
testScope,
"audio",
mockControls,
);
// Default was earpiece (audio call), let's switch to speaker
schedule("-a---", {
a: () => controlledAudioOutput.select(SPEAKER_DEVICE.id),
});
expectObservable(controlledAudioOutput.selected$).toBe("ab-c-d", {
// virtualEarpiece is always false on android.
// Initially the BT_HEADSET is selected.
a: { id: EARPIECE_DEVICE.id, virtualEarpiece: false },
b: { id: SPEAKER_DEVICE.id, virtualEarpiece: false },
c: { id: BT_HEADSET_DEVICE.id, virtualEarpiece: false },
d: { id: SPEAKER_DEVICE.id, virtualEarpiece: false },
});
flush();
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(4);
expect(mockFn).toHaveBeenNthCalledWith(1, EARPIECE_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(2, SPEAKER_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(3, BT_HEADSET_DEVICE.id);
expect(mockFn).toHaveBeenNthCalledWith(4, SPEAKER_DEVICE.id);
});
});
});
});
describe("Available device changes", () => {
let availableSource$: Subject<OutputDevice[]>;
const createAudioControlledOutput = (
intent: RTCCallIntent,
): AndroidControlledAudioOutput => {
return new AndroidControlledAudioOutput(
availableSource$,
testScope,
intent,
mockControls,
);
};
beforeEach(() => {
availableSource$ = new Subject<OutputDevice[]>();
});
it("When a BT headset is added, control should switch to use it", () => {
createAudioControlledOutput("video");
// Emit the base device list, the speaker should be selected
availableSource$.next(BASE_DEVICE_LIST);
// Initially speaker would be selected
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(SPEAKER_DEVICE.id);
});
// Emit a new device list with a BT device, the control should switch to it
availableSource$.next([BT_HEADSET_DEVICE, ...BASE_DEVICE_LIST]);
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(2);
expect(mockFn).toHaveBeenLastCalledWith(BT_HEADSET_DEVICE.id);
});
});
// Android does not set `isExternalHeadset` to true for wired headphones, so we can't test this case.'
it.skip("When a wired headset is added, control should switch to use it", async () => {
const controlledAudioOutput = createAudioControlledOutput("video");
// Emit the base device list, the speaker should be selected
availableSource$.next(BASE_DEVICE_LIST);
await firstValueFrom(controlledAudioOutput.selected$.pipe(take(1)));
// Initially speaker would be selected
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(SPEAKER_DEVICE.id);
});
// Emit a new device list with a wired headset, the control should switch to it
availableSource$.next([WIRED_HEADPHONE_DEVICE, ...BASE_DEVICE_LIST]);
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(2);
expect(mockFn).toHaveBeenLastCalledWith(WIRED_HEADPHONE_DEVICE.id);
});
});
it("When the active bt headset is removed on audio call, control should switch to earpiece", () => {
createAudioControlledOutput("audio");
// Emit the BT headset device list, the BT headset should be selected
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
// Initially speaker would be selected
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(BT_HEADSET_DEVICE.id);
});
// Emit a new device list without the BT headset, the control should switch to the earpiece for
// audio calls
availableSource$.next(BASE_DEVICE_LIST);
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(2);
expect(mockFn).toHaveBeenLastCalledWith(EARPIECE_DEVICE.id);
});
});
it("When the active bt headset is removed on video call, control should switch to speaker", () => {
createAudioControlledOutput("video");
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
// Initially bt headset would be selected
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(BT_HEADSET_DEVICE.id);
});
// Emit a new device list without the BT headset, the control should switch to speaker for video call
availableSource$.next(BASE_DEVICE_LIST);
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(2);
expect(mockFn).toHaveBeenLastCalledWith(SPEAKER_DEVICE.id);
});
});
it("Do not repeatidly set the same device", () => {
createAudioControlledOutput("video");
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
// Initially bt headset would be selected
[
mockControls.onOutputDeviceSelect,
mockControls.onAudioDeviceSelect,
].forEach((mockFn) => {
expect(mockFn).toHaveBeenCalledTimes(1);
expect(mockFn).toHaveBeenCalledWith(BT_HEADSET_DEVICE.id);
});
});
});
describe("Scope management", () => {
it("Should stop emitting when scope ends", () => {
const aScope = new ObservableScope();
const controlledAudioOutput = new AndroidControlledAudioOutput(
of(BASE_DEVICE_LIST),
aScope,
undefined,
mockControls,
);
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledOnce();
aScope.end();
controlledAudioOutput.select(EARPIECE_DEVICE.id);
expect(mockControls.onAudioDeviceSelect).not.toHaveBeenCalledTimes(2);
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledOnce();
});
it("Should stop updating when scope ends", () => {
const aScope = new ObservableScope();
const availableSource$ = new Subject<OutputDevice[]>();
new AndroidControlledAudioOutput(
availableSource$,
aScope,
undefined,
mockControls,
);
availableSource$.next(BT_HEADSET_BASE_DEVICE_LIST);
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledOnce();
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledWith(
BT_HEADSET_DEVICE.id,
);
aScope.end();
availableSource$.next(BASE_DEVICE_LIST);
expect(mockControls.onAudioDeviceSelect).not.toHaveBeenCalledTimes(2);
// Should have been called only once with the initial BT_HEADSET_DEVICE.id
expect(mockControls.onAudioDeviceSelect).toHaveBeenCalledOnce();
});
});

View File

@@ -0,0 +1,360 @@
/*
Copyright 2026 Element Corp.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { logger as rootLogger } from "matrix-js-sdk/lib/logger";
import {
distinctUntilChanged,
map,
merge,
type Observable,
scan,
startWith,
Subject,
tap,
} from "rxjs";
import {
type AudioOutputDeviceLabel,
type MediaDevice,
type SelectedAudioOutputDevice,
} from "./MediaDevices.ts";
import type { ObservableScope } from "./ObservableScope.ts";
import type { RTCCallIntent } from "matrix-js-sdk/lib/matrixrtc";
import { type Controls, type OutputDevice } from "../controls.ts";
import { type Behavior } from "./Behavior.ts";
type ControllerState = {
/**
* The list of available output devices, ordered by preference order (most preferred first).
*/
devices: OutputDevice[];
/**
* Explicit user preference for the selected device.
*/
preferredDeviceId: string | undefined;
/**
* The effective selected device, always valid against available devices.
*/
selectedDeviceId: string | undefined;
};
/**
* The possible actions that can be performed on the controller,
* either by the user or by the system.
*/
type ControllerAction =
| { type: "selectDevice"; deviceId: string | undefined }
| { type: "deviceUpdated"; devices: OutputDevice[] };
/**
* The implementation of the audio output media device for Android when using the controlled audio output mode.
*
* In this mode, the hosting application (e.g. Element Mobile) is responsible for providing the list of available audio output devices.
* There are some android specific logic compared to others:
* - AndroidControlledAudioOutput is the only one responsible for selecting the best output device.
* - On android, we don't listen to the selected device from native code (control.setAudioDevice).
* - If a new device is added or removed, this controller will determine the new selected device based
* on the available devices (that is ordered by preference order) and the user's selection (if any).
*
* Given the differences in how the native code is handling the audio routing on Android compared to iOS,
* we have this separate implementation. It allows us to have proper testing and avoid side effects
* from platform specific logic breaking the other platform's implementation.
*/
export class AndroidControlledAudioOutput implements MediaDevice<
AudioOutputDeviceLabel,
SelectedAudioOutputDevice
> {
private logger = rootLogger.getChild(
"[MediaDevices AndroidControlledAudioOutput]",
);
/**
* STATE stream: the current state of the controller, including the list of available devices and the selected device.
*/
private readonly controllerState$: Behavior<ControllerState>;
/**
* @inheritdoc
*/
public readonly available$: Behavior<Map<string, AudioOutputDeviceLabel>>;
/**
* Effective selected device, always valid against available devices.
*
* On android, we don't listen to the selected device from native code (control.setAudioDevice).
* Instead, we determine the selected device ourselves based on the available devices and the user's selection (if any).
*/
public readonly selected$: Behavior<SelectedAudioOutputDevice | undefined>;
// COMMAND stream: user asks to select a device
private readonly selectDeviceCommand$ = new Subject<string | undefined>();
public select(id: string): void {
this.logger.info(`select device: ${id}`);
this.selectDeviceCommand$.next(id);
}
/**
* Creates an instance of AndroidControlledAudioOutput.
*
* @constructor
* @param controlledDevices$ - The list of available output devices coming from the hosting application, ordered by preference order (most preferred first).
* @param scope - The ObservableScope to create the Behaviors in.
* @param initialIntent - The initial call intent (e.g. "audio" or "video") that can be used to determine the default audio routing (e.g. default to earpiece for audio calls and speaker for video calls).
* @param controls - The controls provided by the hosting application to control the audio routing and notify of user actions.
*/
public constructor(
private readonly controlledDevices$: Observable<OutputDevice[]>,
private readonly scope: ObservableScope,
private initialIntent: RTCCallIntent | undefined = undefined,
controls: Controls,
) {
this.controllerState$ = this.startObservingState$();
this.selected$ = this.effectiveSelectionFromState$(this.controllerState$);
this.available$ = scope.behavior(
this.controllerState$.pipe(
map((state) => {
this.logger.info("available devices updated:", state.devices);
return new Map<string, AudioOutputDeviceLabel>(
state.devices.map((outputDevice) => {
return [outputDevice.id, mapDeviceToLabel(outputDevice)];
}),
);
}),
),
);
// Effect 1: notify host when effective selection changes
this.selected$
// It is a behavior so it has built-in distinct until change
.pipe(scope.bind())
.subscribe((device) => {
// Let the hosting application know which output device has been selected.
if (device !== undefined) {
this.logger.info("onAudioDeviceSelect called:", device);
controls.onAudioDeviceSelect?.(device.id);
// Also invoke the deprecated callback for backward compatibility
// TODO: it appears that on Android the hosting application is only using the deprecated callback (onOutputDeviceSelect)
// and not the new one (onAudioDeviceSelect), we should clean this up and only have one callback for audio device selection.
controls.onOutputDeviceSelect?.(device.id);
}
});
}
private startObservingState$(): Behavior<ControllerState> {
const initialState: ControllerState = {
devices: [],
preferredDeviceId: undefined,
selectedDeviceId: undefined,
};
// Merge the two possible inputs observable as a single
// stream of actions that will update the state of the controller.
const actions$: Observable<ControllerAction> = merge(
this.controlledDevices$.pipe(
map(
(devices) =>
({ type: "deviceUpdated", devices }) satisfies ControllerAction,
),
),
this.selectDeviceCommand$.pipe(
map(
(deviceId) =>
({ type: "selectDevice", deviceId }) satisfies ControllerAction,
),
),
);
const initialAction: ControllerAction = {
type: "deviceUpdated",
devices: [],
};
return this.scope.behavior(
actions$.pipe(
startWith(initialAction),
scan((state, action): ControllerState => {
switch (action.type) {
case "deviceUpdated": {
const chosenDevice = this.chooseEffectiveSelection({
previousDevices: state.devices,
availableDevices: action.devices,
currentSelectedId: state.selectedDeviceId,
preferredDeviceId: state.preferredDeviceId,
});
return {
...state,
devices: action.devices,
selectedDeviceId: chosenDevice,
};
}
case "selectDevice": {
const chosenDevice = this.chooseEffectiveSelection({
previousDevices: state.devices,
availableDevices: state.devices,
currentSelectedId: state.selectedDeviceId,
preferredDeviceId: action.deviceId,
});
return {
...state,
preferredDeviceId: action.deviceId,
selectedDeviceId: chosenDevice,
};
}
}
}, initialState),
),
);
}
private effectiveSelectionFromState$(
state$: Observable<ControllerState>,
): Behavior<SelectedAudioOutputDevice | undefined> {
return this.scope.behavior(
state$
.pipe(
map((state) => {
if (state.selectedDeviceId) {
return {
id: state.selectedDeviceId,
/** This is an iOS thing, always false for android*/
virtualEarpiece: false,
};
}
return undefined;
}),
distinctUntilChanged((a, b) => a?.id === b?.id),
)
.pipe(
tap((selected) => {
this.logger.debug(`selected device: ${selected?.id}`);
}),
),
);
}
private chooseEffectiveSelection(args: {
previousDevices: OutputDevice[];
availableDevices: OutputDevice[];
currentSelectedId: string | undefined;
preferredDeviceId: string | undefined;
}): string | undefined {
const {
previousDevices,
availableDevices,
currentSelectedId,
preferredDeviceId,
} = args;
this.logger.debug(`chooseEffectiveSelection with args:`, args);
// Take preferredDeviceId in priority or default to the last effective selection.
const activeSelectedDeviceId = preferredDeviceId || currentSelectedId;
const isAvailable = availableDevices.some(
(device) => device.id === activeSelectedDeviceId,
);
// If there is no current device, or it is not available anymore,
// choose the default device selection logic.
if (activeSelectedDeviceId === undefined || !isAvailable) {
this.logger.debug(
`No current device or it is not available, using default selection logic.`,
);
// use the default selection logic
return this.chooseDefaultDeviceId(availableDevices);
}
// Is there a new added device?
// If a device is added, we might want to switch to it if it's more preferred than the currently selected device.
const newDeviceWasAdded = availableDevices.some(
(device) => !previousDevices.some((d) => d.id === device.id),
);
if (newDeviceWasAdded) {
// TODO only want to check from the added device, not all devices.?
// check if the currently selected device is the most preferred one, if not switch to the most preferred one.
const mostPreferredDevice = availableDevices[0];
this.logger.debug(
`A new device was added, checking if we should switch to it.`,
mostPreferredDevice,
);
if (mostPreferredDevice.id !== activeSelectedDeviceId) {
// Given this is automatic switching, we want to be careful and only switch to a more private device
// (e.g. from speaker to a BT headset) but not switch from a more private device to a less private one
// (e.g. from a BT headset to the speaker), as that can be disruptive for the user if it happens unexpectedly.
if (mostPreferredDevice.isExternalHeadset == true) {
this.logger.info(
`The currently selected device ${mostPreferredDevice.id} is not the most preferred one, switching to the most preferred one ${activeSelectedDeviceId} instead.`,
);
// Let's switch as it is a more private device.
return mostPreferredDevice.id;
}
}
}
// no changes
return activeSelectedDeviceId;
}
/**
* The logic for the default is different based on the call type.
* For example for a voice call we want to default to the earpiece if it's available,
* but for a video call we want to default to the speaker.
* If the user is using a BT headset we want to default to that, as it's likely what they want to use for both video and voice calls.
*
* @param available the available audio output devices to choose from, keyed by their id, sorted by likelihood of it being used for communication.
*
*/
private chooseDefaultDeviceId(available: OutputDevice[]): string | undefined {
this.logger.debug(
`Android routing logic intent: ${this.initialIntent} finding best default...`,
);
if (this.initialIntent === "audio") {
const systemProposed = available[0];
// If no headset is connected, android will route to the speaker by default,
// but for a voice call we want to route to the earpiece instead,
// so override the system proposed routing in that case.
if (systemProposed?.isSpeaker == true) {
// search for the earpiece
const earpieceDevice = available.find(
(device) => device.isEarpiece == true,
);
if (earpieceDevice) {
this.logger.debug(
`Android routing: Switch to earpiece instead of speaker for voice call`,
);
return earpieceDevice.id;
} else {
this.logger.debug(
`Android routing: no earpiece found, cannot switch, use system proposed routing`,
);
return systemProposed.id;
}
} else {
this.logger.debug(
`Android routing: Use system proposed routing `,
systemProposed,
);
return systemProposed?.id;
}
} else {
// Use the system best proposed best routing.
return available[0]?.id;
}
}
}
// Utilities
function mapDeviceToLabel(device: OutputDevice): AudioOutputDeviceLabel {
const { name, isEarpiece, isSpeaker } = device;
if (isEarpiece) return { type: "earpiece" };
else if (isSpeaker) return { type: "speaker" };
else return { type: "name", name };
}

View File

@@ -0,0 +1,193 @@
/*
Copyright 2026 Element Corp.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { afterEach, beforeEach, describe, vi, it } from "vitest";
import * as ComponentsCore from "@livekit/components-core";
import { ObservableScope } from "./ObservableScope";
import { AudioOutput } from "./MediaDevices";
import { withTestScheduler } from "../utils/test";
const BT_SPEAKER = {
deviceId: "f9fc8f5f94578fe3abd89e086c1e78c08477aa564dd9e917950f0e7ebb37a6a2",
kind: "audiooutput",
label: "JBL (Bluetooth)",
groupId: "309a5c086cd8eb885a164046db6ec834c349be01d86448d02c1a5279456ff9e4",
} as unknown as MediaDeviceInfo;
const BUILT_IN_SPEAKER = {
deviceId: "acdbb8546ea6fa85ba2d861e9bcc0e71810d03bbaf6d1712c69e8d9c0c6c2e0a",
kind: "audiooutput",
label: "MacBook Speakers (Built-in)",
groupId: "08a5a3a486473aaa898eb81cda3113f3e21053fb8b84155f4e612fe3f8db5d17",
} as unknown as MediaDeviceInfo;
const BT_HEADSET = {
deviceId: "ff8e6edb4ebb512b2b421335bfd14994a5b4c7192b3e84a8696863d83cf46d12",
kind: "audiooutput",
label: "OpenMove (Bluetooth)",
groupId: "c2893c2438c44248368e0533300245c402764991506f42cd73818dc8c3ee9c88",
} as unknown as MediaDeviceInfo;
const AMAC_DEVICE_LIST = [BT_SPEAKER, BUILT_IN_SPEAKER];
const AMAC_DEVICE_LIST_WITH_DEFAULT = [
asDefault(BUILT_IN_SPEAKER),
...AMAC_DEVICE_LIST,
];
const AMAC_HS_DEVICE_LIST = [
asDefault(BT_HEADSET),
BT_SPEAKER,
BT_HEADSET,
BUILT_IN_SPEAKER,
];
const LAPTOP_SPEAKER = {
deviceId: "EcUxTMu8He2wz+3Y8m/u0fy6M92pUk=",
kind: "audiooutput",
label: "Raptor AVS Speaker",
groupId: "kSrdanhpEDLg3vN8z6Z9MJ1EdanB8zI+Q1dxA=",
} as unknown as MediaDeviceInfo;
const MONITOR_SPEAKER = {
deviceId: "gBryZdAdC8I/rrJpr9r6R+rZzKkoIK5cpU=",
kind: "audiooutput",
label: "Raptor AVS HDMI / DisplayPort 1 Output",
groupId: "kSrdanhpEDLg3vN8z6Z9MJ1EdanB8zI+Q1dxA=",
} as unknown as MediaDeviceInfo;
const DEVICE_LIST_B = [LAPTOP_SPEAKER, MONITOR_SPEAKER];
// On chrome, there is an additional synthetic device called "Default - <device name>",
// it represents what the OS default is now.
function asDefault(device: MediaDeviceInfo): MediaDeviceInfo {
return {
...device,
deviceId: "default",
label: `Default - ${device.label}`,
};
}
// When the authorization is not yet granted, every device is still listed
// but only with empty/blank labels and ids.
// This is a transition state.
function toBlankDevice(device: MediaDeviceInfo): MediaDeviceInfo {
return {
...device,
deviceId: "",
label: "",
groupId: "",
};
}
vi.mock("@livekit/components-core", () => ({
createMediaDeviceObserver: vi.fn(),
}));
describe("AudioOutput Tests", () => {
let testScope: ObservableScope;
beforeEach(() => {
testScope = new ObservableScope();
});
afterEach(() => {
testScope.end();
});
it("should select the default audio output device", () => {
// In a real life setup there would be first a blanked list
// then the real one.
withTestScheduler(({ behavior, cold, expectObservable }) => {
vi.mocked(ComponentsCore.createMediaDeviceObserver).mockReturnValue(
cold("ab", {
// In a real life setup there would be first a blanked list
// then the real one.
a: AMAC_DEVICE_LIST_WITH_DEFAULT.map(toBlankDevice),
b: AMAC_DEVICE_LIST_WITH_DEFAULT,
}),
);
const audioOutput = new AudioOutput(
behavior("a", { a: true }),
testScope,
);
expectObservable(audioOutput.selected$).toBe("ab", {
a: undefined,
b: { id: "default", virtualEarpiece: false },
});
});
});
it("Select the correct device when requested", () => {
// In a real life setup there would be first a blanked list
// then the real one.
withTestScheduler(({ behavior, cold, schedule, expectObservable }) => {
vi.mocked(ComponentsCore.createMediaDeviceObserver).mockReturnValue(
cold("ab", {
// In a real life setup there would be first a blanked list
// then the real one.
a: DEVICE_LIST_B.map(toBlankDevice),
b: DEVICE_LIST_B,
}),
);
const audioOutput = new AudioOutput(
behavior("a", { a: true }),
testScope,
);
schedule("--abc", {
a: () => audioOutput.select(MONITOR_SPEAKER.deviceId),
b: () => audioOutput.select(LAPTOP_SPEAKER.deviceId),
c: () => audioOutput.select(MONITOR_SPEAKER.deviceId),
});
expectObservable(audioOutput.selected$).toBe("abcde", {
a: undefined,
b: { id: LAPTOP_SPEAKER.deviceId, virtualEarpiece: false },
c: { id: MONITOR_SPEAKER.deviceId, virtualEarpiece: false },
d: { id: LAPTOP_SPEAKER.deviceId, virtualEarpiece: false },
e: { id: MONITOR_SPEAKER.deviceId, virtualEarpiece: false },
});
});
});
it("Test mappings", () => {
// In a real life setup there would be first a blanked list
// then the real one.
withTestScheduler(({ behavior, cold, schedule, expectObservable }) => {
vi.mocked(ComponentsCore.createMediaDeviceObserver).mockReturnValue(
cold("a", {
// In a real life setup there would be first a blanked list
// then the real one.
a: AMAC_HS_DEVICE_LIST,
}),
);
const audioOutput = new AudioOutput(
behavior("a", { a: true }),
testScope,
);
const expectedMappings = new Map([
[`default`, { type: "name", name: asDefault(BT_HEADSET).label }],
[BT_SPEAKER.deviceId, { type: "name", name: BT_SPEAKER.label }],
[BT_HEADSET.deviceId, { type: "name", name: BT_HEADSET.label }],
[
BUILT_IN_SPEAKER.deviceId,
{ type: "name", name: BUILT_IN_SPEAKER.label },
],
]);
expectObservable(audioOutput.available$).toBe("a", {
a: expectedMappings,
});
});
});
});

View File

@@ -0,0 +1,132 @@
/*
Copyright 2026 Element Corp.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { logger as rootLogger } from "matrix-js-sdk/lib/logger";
import { combineLatest, merge, startWith, Subject, tap } from "rxjs";
import {
availableOutputDevices$ as controlledAvailableOutputDevices$,
outputDevice$ as controlledOutputSelection$,
} from "../controls.ts";
import type { Behavior } from "./Behavior.ts";
import type { ObservableScope } from "./ObservableScope.ts";
import {
type AudioOutputDeviceLabel,
availableRawDevices$,
iosDeviceMenu$,
type MediaDevice,
type SelectedAudioOutputDevice,
} from "./MediaDevices.ts";
// This hardcoded id is used in EX ios! It can only be changed in coordination with
// the ios swift team.
const EARPIECE_CONFIG_ID = "earpiece-id";
/**
* A special implementation of audio output that allows the hosting application
* to have more control over the device selection process. This is used when the
* `controlledAudioDevices` URL parameter is set, which is currently only true on mobile.
*/
export class IOSControlledAudioOutput implements MediaDevice<
AudioOutputDeviceLabel,
SelectedAudioOutputDevice
> {
private logger = rootLogger.getChild("[MediaDevices ControlledAudioOutput]");
// We need to subscribe to the raw devices so that the OS does update the input
// back to what it was before. otherwise we will switch back to the default
// whenever we allocate a new stream.
public readonly availableRaw$ = availableRawDevices$(
"audiooutput",
this.usingNames$,
this.scope,
this.logger,
);
public readonly available$ = this.scope.behavior(
combineLatest(
[controlledAvailableOutputDevices$.pipe(startWith([])), iosDeviceMenu$],
(availableRaw, iosDeviceMenu) => {
const available = new Map<string, AudioOutputDeviceLabel>(
availableRaw.map(
({ id, name, isEarpiece, isSpeaker /*,isExternalHeadset*/ }) => {
let deviceLabel: AudioOutputDeviceLabel;
// if (isExternalHeadset) // Do we want this?
if (isEarpiece) deviceLabel = { type: "earpiece" };
else if (isSpeaker) deviceLabel = { type: "speaker" };
else deviceLabel = { type: "name", name };
return [id, deviceLabel];
},
),
);
// Create a virtual earpiece device in case a non-earpiece device is
// designated for this purpose
if (iosDeviceMenu && availableRaw.some((d) => d.forEarpiece)) {
this.logger.info(
`IOS Add virtual earpiece device with id ${EARPIECE_CONFIG_ID}`,
);
available.set(EARPIECE_CONFIG_ID, { type: "earpiece" });
}
return available;
},
),
);
private readonly deviceSelection$ = new Subject<string>();
public select(id: string): void {
this.logger.info(`select device: ${id}`);
this.deviceSelection$.next(id);
}
public readonly selected$ = this.scope.behavior(
combineLatest(
[
this.available$,
merge(
controlledOutputSelection$.pipe(startWith(undefined)),
this.deviceSelection$,
),
],
(available, preferredId) => {
const id = preferredId ?? available.keys().next().value;
return id === undefined
? undefined
: { id, virtualEarpiece: id === EARPIECE_CONFIG_ID };
},
).pipe(
tap((selected) => {
this.logger.debug(`selected device: ${selected?.id}`);
}),
),
);
public constructor(
private readonly usingNames$: Behavior<boolean>,
private readonly scope: ObservableScope,
) {
this.selected$.subscribe((device) => {
// Let the hosting application know which output device has been selected.
// This information is probably only of interest if the earpiece mode has
// been selected - for example, Element X iOS listens to this to determine
// whether it should enable the proximity sensor.
if (device !== undefined) {
this.logger.info("onAudioDeviceSelect called:", device);
window.controls.onAudioDeviceSelect?.(device.id);
// Also invoke the deprecated callback for backward compatibility
window.controls.onOutputDeviceSelect?.(device.id);
}
});
this.available$.subscribe((available) => {
this.logger.debug("available devices:", available);
});
this.availableRaw$.subscribe((availableRaw) => {
this.logger.debug("available raw devices:", availableRaw);
});
}
}

View File

@@ -9,35 +9,28 @@ import {
combineLatest,
filter,
map,
merge,
type Observable,
pairwise,
startWith,
Subject,
switchMap,
type Observable,
} from "rxjs";
import { createMediaDeviceObserver } from "@livekit/components-core";
import { type Logger, logger as rootLogger } from "matrix-js-sdk/lib/logger";
import {
alwaysShowIphoneEarpiece as alwaysShowIphoneEarpieceSetting,
audioInput as audioInputSetting,
audioOutput as audioOutputSetting,
videoInput as videoInputSetting,
alwaysShowIphoneEarpiece as alwaysShowIphoneEarpieceSetting,
} from "../settings/settings";
import { type ObservableScope } from "./ObservableScope";
import {
outputDevice$ as controlledOutputSelection$,
availableOutputDevices$ as controlledAvailableOutputDevices$,
} from "../controls";
import { availableOutputDevices$ as controlledAvailableOutputDevices$ } from "../controls";
import { getUrlParams } from "../UrlParams";
import { platform } from "../Platform";
import { switchWhen } from "../utils/observable";
import { type Behavior, constant } from "./Behavior";
// This hardcoded id is used in EX ios! It can only be changed in coordination with
// the ios swift team.
const EARPIECE_CONFIG_ID = "earpiece-id";
import { AndroidControlledAudioOutput } from "./AndroidControlledAudioOutput.ts";
import { IOSControlledAudioOutput } from "./IOSControlledAudioOutput.ts";
export type DeviceLabel =
| { type: "name"; name: string }
@@ -49,10 +42,18 @@ export type AudioOutputDeviceLabel =
| { type: "earpiece" }
| { type: "default"; name: string | null };
/**
* Base selected-device value shared by all media kinds.
*
* `id` is the effective device identifier used by browser media APIs.
*/
export interface SelectedDevice {
id: string;
}
/**
* Selected audio input value with audio-input-specific metadata.
*/
export interface SelectedAudioInputDevice extends SelectedDevice {
/**
* Emits whenever we think that this audio input device has logically changed
@@ -61,6 +62,9 @@ export interface SelectedAudioInputDevice extends SelectedDevice {
hardwareDeviceChange$: Observable<void>;
}
/**
* Selected audio output value with output-routing-specific metadata.
*/
export interface SelectedAudioOutputDevice extends SelectedDevice {
/**
* Whether this device is a "virtual earpiece" device. If so, we should output
@@ -69,23 +73,42 @@ export interface SelectedAudioOutputDevice extends SelectedDevice {
virtualEarpiece: boolean;
}
/**
* Common reactive contract for selectable input/output media devices (mic, speaker, camera).
*
* `Label` is the type used to represent a device in UI lists.
* `Selected` is the type used to represent the active selection for a device kind.
*/
export interface MediaDevice<Label, Selected> {
/**
* A map from available device IDs to labels.
* Reactive map of currently available devices keyed by device ID.
*
* `Label` defines the UI-facing label data structure for each device type.
*/
available$: Behavior<Map<string, Label>>;
/**
* The selected device.
* The active device selection.
* Can be `undefined` when no device is yet selected.
*
* When defined, `Selected` contains the selected device ID plus any
* type-specific metadata.
*/
selected$: Behavior<Selected | undefined>;
/**
* Selects a new device.
* Requests selection of a device by ID.
*
* Implementations typically persist this preference and let `selected$`
* converge to the effective device (which may differ if the requested ID is
* unavailable).
*/
select(id: string): void;
}
/**
* An observable that represents if we should display the devices menu for iOS.
*
* This implies the following
* - hide any input devices (they do not work anyhow on ios)
* - Show a button to show the native output picker instead.
@@ -95,7 +118,7 @@ export interface MediaDevice<Label, Selected> {
export const iosDeviceMenu$ =
platform === "ios" ? constant(true) : alwaysShowIphoneEarpieceSetting.value$;
function availableRawDevices$(
export function availableRawDevices$(
kind: MediaDeviceKind,
usingNames$: Behavior<boolean>,
scope: ObservableScope,
@@ -146,16 +169,23 @@ function selectDevice$<Label>(
): Observable<string | undefined> {
return combineLatest([available$, preferredId$], (available, preferredId) => {
if (available.size) {
// If the preferred device is available, use it. Or if every available
// device ID is falsy, the browser is probably just being paranoid about
// fingerprinting and we should still try using the preferred device.
// Worst case it is not available and the browser will gracefully fall
// back to some other device for us when requesting the media stream.
// Otherwise, select the first available device.
return (preferredId !== undefined && available.has(preferredId)) ||
(available.size === 1 && available.has(""))
? preferredId
: available.keys().next().value;
if (preferredId !== undefined && available.has(preferredId)) {
// If the preferred device is available, use it.
return preferredId;
} else if (available.size === 1 && available.has("")) {
// In some cases the enumerateDevices will list the devices with empty string details:
// `{deviceId:'', kind:'audiooutput|audioinput|videoinput', label:'', groupId:''}`
// This can happen when:
// 1. The user has not yet granted permissions to microphone/devices
// 2. The page is not running in a secure context (e.g. localhost or https)
// 3. In embedded WebViews, restrictions are often tighter, need active capture..
// 3. The browser is blocking access to device details for privacy reasons (?)
// This is most likely transitional, so keep the current device selected until we get a more accurate enumerateDevices.
return preferredId;
} else {
// No preferred, so pick a default.
return available.keys().next().value;
}
}
return undefined;
});
@@ -212,7 +242,7 @@ class AudioInput implements MediaDevice<DeviceLabel, SelectedAudioInputDevice> {
}
}
class AudioOutput implements MediaDevice<
export class AudioOutput implements MediaDevice<
AudioOutputDeviceLabel,
SelectedAudioOutputDevice
> {
@@ -251,14 +281,16 @@ class AudioOutput implements MediaDevice<
public readonly selected$ = this.scope.behavior(
selectDevice$(this.available$, audioOutputSetting.value$).pipe(
map((id) =>
id === undefined
? undefined
: {
map((id) => {
if (id === undefined) {
return undefined;
} else {
return {
id,
virtualEarpiece: false,
},
),
};
}
}),
),
);
public select(id: string): void {
@@ -275,103 +307,6 @@ class AudioOutput implements MediaDevice<
}
}
class ControlledAudioOutput implements MediaDevice<
AudioOutputDeviceLabel,
SelectedAudioOutputDevice
> {
private logger = rootLogger.getChild("[MediaDevices ControlledAudioOutput]");
// We need to subscribe to the raw devices so that the OS does update the input
// back to what it was before. otherwise we will switch back to the default
// whenever we allocate a new stream.
public readonly availableRaw$ = availableRawDevices$(
"audiooutput",
this.usingNames$,
this.scope,
this.logger,
);
public readonly available$ = this.scope.behavior(
combineLatest(
[controlledAvailableOutputDevices$.pipe(startWith([])), iosDeviceMenu$],
(availableRaw, iosDeviceMenu) => {
const available = new Map<string, AudioOutputDeviceLabel>(
availableRaw.map(
({ id, name, isEarpiece, isSpeaker /*,isExternalHeadset*/ }) => {
let deviceLabel: AudioOutputDeviceLabel;
// if (isExternalHeadset) // Do we want this?
if (isEarpiece) deviceLabel = { type: "earpiece" };
else if (isSpeaker) deviceLabel = { type: "speaker" };
else deviceLabel = { type: "name", name };
return [id, deviceLabel];
},
),
);
// Create a virtual earpiece device in case a non-earpiece device is
// designated for this purpose
if (iosDeviceMenu && availableRaw.some((d) => d.forEarpiece))
available.set(EARPIECE_CONFIG_ID, { type: "earpiece" });
return available;
},
),
);
private readonly deviceSelection$ = new Subject<string>();
public select(id: string): void {
this.deviceSelection$.next(id);
}
public readonly selected$ = this.scope.behavior(
combineLatest(
[
this.available$,
merge(
controlledOutputSelection$.pipe(startWith(undefined)),
this.deviceSelection$,
),
],
(available, preferredId) => {
const id = preferredId ?? available.keys().next().value;
return id === undefined
? undefined
: { id, virtualEarpiece: id === EARPIECE_CONFIG_ID };
},
),
);
public constructor(
private readonly usingNames$: Behavior<boolean>,
private readonly scope: ObservableScope,
) {
this.selected$.subscribe((device) => {
// Let the hosting application know which output device has been selected.
// This information is probably only of interest if the earpiece mode has
// been selected - for example, Element X iOS listens to this to determine
// whether it should enable the proximity sensor.
if (device !== undefined) {
this.logger.info(
"[controlled-output] onAudioDeviceSelect called:",
device,
);
window.controls.onAudioDeviceSelect?.(device.id);
// Also invoke the deprecated callback for backward compatibility
window.controls.onOutputDeviceSelect?.(device.id);
}
});
this.available$.subscribe((available) => {
this.logger.info("[controlled-output] available devices:", available);
});
this.availableRaw$.subscribe((availableRaw) => {
this.logger.info(
"[controlled-output] available raw devices:",
availableRaw,
);
});
}
}
class VideoInput implements MediaDevice<DeviceLabel, SelectedDevice> {
private logger = rootLogger.getChild("[MediaDevices VideoInput]");
@@ -434,7 +369,14 @@ export class MediaDevices {
AudioOutputDeviceLabel,
SelectedAudioOutputDevice
> = getUrlParams().controlledAudioDevices
? new ControlledAudioOutput(this.usingNames$, this.scope)
? platform == "android"
? new AndroidControlledAudioOutput(
controlledAvailableOutputDevices$,
this.scope,
getUrlParams().callIntent,
window.controls,
)
: new IOSControlledAudioOutput(this.usingNames$, this.scope)
: new AudioOutput(this.usingNames$, this.scope);
public readonly videoInput: MediaDevice<DeviceLabel, SelectedDevice> =

View File

@@ -93,6 +93,7 @@ export const initializeWidget = (
logger.info("Widget API is available");
const api = new WidgetApi(widgetId, parentOrigin);
api.requestCapability(MatrixCapabilities.AlwaysOnScreen);
api.requestCapability(MatrixCapabilities.MSC4039DownloadFile);
// Set up the lazy action emitter, but only for select actions that we
// intend for the app to handle

View File

@@ -6236,8 +6236,8 @@ __metadata:
linkType: hard
"@vector-im/compound-design-tokens@npm:^6.0.0":
version: 6.6.0
resolution: "@vector-im/compound-design-tokens@npm:6.6.0"
version: 6.10.2
resolution: "@vector-im/compound-design-tokens@npm:6.10.2"
peerDependencies:
"@types/react": "*"
react: ^17 || ^18 || ^19.0.0
@@ -6246,13 +6246,13 @@ __metadata:
optional: true
react:
optional: true
checksum: 10c0/93b152dd1de96371f9b6b1f7dbcc381d7ab598031dbc900f52d610f015766c0d4426ae6e47d417e723bfb62d1a53099155b4d788848b78232916ba132c03c2fe
checksum: 10c0/bcac6d79fcfb8cc1356d65dff576bdad217edd0df189a5dea032b0fd57cef335b73ad6d8e395709245bc1c6a8c672a83144ecea48550ca560544d2399af8f2d3
languageName: node
linkType: hard
"@vector-im/compound-web@npm:^8.0.0":
version: 8.3.4
resolution: "@vector-im/compound-web@npm:8.3.4"
version: 8.4.0
resolution: "@vector-im/compound-web@npm:8.4.0"
dependencies:
"@floating-ui/react": "npm:^0.27.0"
"@radix-ui/react-context-menu": "npm:^2.2.16"
@@ -6272,7 +6272,7 @@ __metadata:
peerDependenciesMeta:
"@types/react":
optional: true
checksum: 10c0/44764fa64b5fce2e7181e25b50ee970eda4d921cf650b92bd5e88df0eb60872f3086b8702d18f55c3e39b3751ac19f10bafda8c4306df65c3605bd44b297d95c
checksum: 10c0/31b73555c47b373d4250872bfe863a030b487197bf1198e3cf3a1ec344f2b02f0c72c1513bb598c1cbd7a91d3c6a334d0c8ae37bd7c90d4859c864fc223e059a
languageName: node
linkType: hard