Skip to content

Commit

Permalink
Merge liveness/alpha into liveness dca v2 (#5992)
Browse files Browse the repository at this point in the history
* chore(liveness): allow selecting all cameras, allow camera selection … (#5833)

* chore(liveness): allow selecting all cameras, allow camera selection on mobile

* fix(liveness): fix camera select showing up with one camera, fix camera changing weirdness with hair check screen (#5845)

* fix(liveness): Only apply transform style on user-facing video (#5953)

* fix(liveness): Fix oval render when switching cameras  (#5954)

---------

Co-authored-by: thaddmt <[email protected]>
Co-authored-by: Caleb Pollman <[email protected]>
Co-authored-by: Emma Sauerborn <[email protected]>
Co-authored-by: Scott Rees <[email protected]>
  • Loading branch information
5 people authored Nov 1, 2024
1 parent edc0d5b commit 68e1fe9
Show file tree
Hide file tree
Showing 9 changed files with 502 additions and 226 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import {

import { Hint, Overlay, selectErrorState, MatchIndicator } from '../shared';
import { LivenessClassNames } from '../types/classNames';
import { isDeviceUserFacing } from '../utils/device';
import {
FaceLivenessErrorModal,
renderErrorModal,
Expand Down Expand Up @@ -128,6 +129,8 @@ export const LivenessCameraModule = (
const freshnessColorRef = useRef<HTMLCanvasElement | null>(null);

const [isCameraReady, setIsCameraReady] = useState<boolean>(false);
const [isMetadataLoaded, setIsMetadataLoaded] = useState<boolean>(false);
const [isCameraUserFacing, setIsCameraUserFacing] = useState<boolean>(true);
const isInitCamera = state.matches('initCamera');
const isInitWebsocket = state.matches('initWebsocket');
const isCheckingCamera = state.matches({ initCamera: 'cameraCheck' });
Expand Down Expand Up @@ -162,21 +165,28 @@ export const LivenessCameraModule = (
(!isMobileScreen || isFaceMovementChallenge);

React.useEffect(() => {
if (canvasRef?.current && videoRef?.current && videoStream && isStartView) {
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
async function checkCameraFacing() {
const isUserFacing = await isDeviceUserFacing(selectedDeviceId);
setIsCameraUserFacing(isUserFacing);
}
}, [canvasRef, videoRef, videoStream, colorMode, isStartView]);
checkCameraFacing();
}, [selectedDeviceId]);

React.useEffect(() => {
const shouldDrawOval =
canvasRef?.current &&
videoRef?.current &&
videoStream &&
isStartView &&
isMetadataLoaded;

if (shouldDrawOval) {
drawStaticOval(canvasRef.current, videoRef.current!, videoStream);
}

const updateColorModeHandler = (e: MediaQueryListEvent) => {
if (
e.matches &&
canvasRef?.current &&
videoRef?.current &&
videoStream &&
isStartView
) {
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
if (e.matches && shouldDrawOval) {
drawStaticOval(canvasRef.current, videoRef.current!, videoStream);
}
};

Expand All @@ -194,7 +204,7 @@ export const LivenessCameraModule = (
darkModePreference.removeEventListener('change', updateColorModeHandler);
lightModePreference.addEventListener('change', updateColorModeHandler);
};
}, [canvasRef, videoRef, videoStream, isStartView]);
}, [videoRef, videoStream, colorMode, isStartView, isMetadataLoaded]);

React.useLayoutEffect(() => {
if (isCameraReady) {
Expand Down Expand Up @@ -243,6 +253,10 @@ export const LivenessCameraModule = (
setIsCameraReady(true);
};

const handleLoadedMetadata = () => {
setIsMetadataLoaded(true);
};

const beginLivenessCheck = React.useCallback(() => {
send({
type: 'BEGIN',
Expand All @@ -253,6 +267,7 @@ export const LivenessCameraModule = (
(e: React.ChangeEvent<HTMLSelectElement>) => {
const newDeviceId = e.target.value;
const changeCamera = async () => {
setIsMetadataLoaded(false);
const newStream = await navigator.mediaDevices.getUserMedia({
video: {
...videoConstraints,
Expand Down Expand Up @@ -403,8 +418,12 @@ export const LivenessCameraModule = (
width={mediaWidth}
height={mediaHeight}
onCanPlay={handleMediaPlay}
onLoadedMetadata={handleLoadedMetadata}
data-testid="video"
className={LivenessClassNames.Video}
className={classNames(
LivenessClassNames.Video,
isCameraUserFacing && LivenessClassNames.UserFacingVideo
)}
aria-label={cameraDisplayText.a11yVideoLabelText}
/>
<Flex
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,53 @@
import { render } from '@testing-library/react';
import { fireEvent, render, screen } from '@testing-library/react';
import { CameraSelector } from '../CameraSelector';
import React from 'react';

const mockMediaDevice: MediaDeviceInfo = {
deviceId: 'foobar',
groupId: 'foobar',
kind: 'videoinput',
label: 'foobar',
toJSON: jest.fn(),
};
const mockMediaDevices: MediaDeviceInfo[] = [
{
deviceId: '1',
groupId: 'foobar',
label: 'Camera 1',
kind: 'videoinput',
toJSON: jest.fn(),
},
{
deviceId: '2',
groupId: 'foobar',
label: 'Camera 2',
kind: 'videoinput',
toJSON: jest.fn(),
},
];

const onChange = jest.fn();
describe('CameraSelector', () => {
beforeEach(() => {
onChange.mockClear();
});

it('should render', () => {
const result = render(
<CameraSelector onSelect={() => {}} devices={[mockMediaDevice]} />
<CameraSelector onSelect={onChange} devices={mockMediaDevices} />
);

expect(result.container).toBeDefined();
});

it('renders CameraSelector when there are multiple devices and allows changing camera', async () => {
render(<CameraSelector onSelect={onChange} devices={mockMediaDevices} />);

const selectElement = screen.getByRole('combobox') as HTMLSelectElement;
expect(selectElement).toBeInTheDocument();
expect(selectElement.value).toBe('1');

const options = screen.getAllByRole('option');
expect(options).toHaveLength(2);
expect(options[0].textContent).toBe('Camera 1');
expect(options[1].textContent).toBe('Camera 2');

// Simulate selecting the back camera
fireEvent.change(selectElement, { target: { value: '2' } });
expect(onChange).toHaveBeenCalledTimes(1);
expect(selectElement.value).toBe('2');
});
});
Loading

0 comments on commit 68e1fe9

Please sign in to comment.