From b5a7e58e66c6d1c6771781a1c4245e09d1c47cb3 Mon Sep 17 00:00:00 2001 From: Adrian Castro <22133246+castdrian@users.noreply.github.com> Date: Wed, 6 Mar 2024 13:23:34 +0100 Subject: [PATCH] refactor: make audiotrack stuff its own hook --- .../components/player/AudioTrackSelector.tsx | 6 ++ .../src/components/player/VideoPlayer.tsx | 59 +++++-------------- apps/expo/src/hooks/player/useAudioTrack.ts | 51 ++++++++++++++++ 3 files changed, 71 insertions(+), 45 deletions(-) create mode 100644 apps/expo/src/hooks/player/useAudioTrack.ts diff --git a/apps/expo/src/components/player/AudioTrackSelector.tsx b/apps/expo/src/components/player/AudioTrackSelector.tsx index ef35c87..8278af9 100644 --- a/apps/expo/src/components/player/AudioTrackSelector.tsx +++ b/apps/expo/src/components/player/AudioTrackSelector.tsx @@ -4,6 +4,7 @@ import { MaterialCommunityIcons } from "@expo/vector-icons"; import colors from "@movie-web/tailwind-config/colors"; +import { useAudioTrack } from "~/hooks/player/useAudioTrack"; import { useBoolean } from "~/hooks/useBoolean"; import { useAudioTrackStore } from "~/stores/audio"; import { usePlayerStore } from "~/stores/player/store"; @@ -20,12 +21,14 @@ export interface AudioTrack { export const AudioTrackSelector = () => { const tracks = usePlayerStore((state) => state.interface.audioTracks); + const stream = usePlayerStore((state) => state.interface.currentStream); const setSelectedAudioTrack = useAudioTrackStore( (state) => state.setSelectedAudioTrack, ); const { isTrue, on, off } = useBoolean(); + const { synchronizePlayback } = useAudioTrack(); if (!tracks?.length) return null; @@ -64,6 +67,9 @@ export const AudioTrackSelector = () => { key={track.language} onPress={() => { setSelectedAudioTrack(track); + if (stream) { + void synchronizePlayback(track, stream); + } off(); }} > diff --git a/apps/expo/src/components/player/VideoPlayer.tsx b/apps/expo/src/components/player/VideoPlayer.tsx index 9899385..b75887e 100644 --- a/apps/expo/src/components/player/VideoPlayer.tsx +++ b/apps/expo/src/components/player/VideoPlayer.tsx @@ -9,7 +9,7 @@ import { } from "react-native"; import { Gesture, GestureDetector } from "react-native-gesture-handler"; import { runOnJS, useSharedValue } from "react-native-reanimated"; -import { Audio, ResizeMode, Video } from "expo-av"; +import { ResizeMode, Video } from "expo-av"; import * as Haptics from "expo-haptics"; import * as NavigationBar from "expo-navigation-bar"; import { useRouter } from "expo-router"; @@ -17,6 +17,7 @@ import * as StatusBar from "expo-status-bar"; import { findHighestQuality } from "@movie-web/provider-utils"; +import { useAudioTrack } from "~/hooks/player/useAudioTrack"; import { useBrightness } from "~/hooks/player/useBrightness"; import { usePlaybackSpeed } from "~/hooks/player/usePlaybackSpeed"; import { usePlayer } from "~/hooks/player/usePlayer"; @@ -43,6 +44,7 @@ export const VideoPlayer = () => { handleVolumeChange, } = useVolume(); const { currentSpeed } = usePlaybackSpeed(); + const { synchronizePlayback } = useAudioTrack(); const { dismissFullscreenPlayer } = usePlayer(); const [videoSrc, setVideoSrc] = useState(); const [isLoading, setIsLoading] = useState(true); @@ -52,7 +54,6 @@ export const VideoPlayer = () => { const router = useRouter(); const scale = useSharedValue(1); const [lastVelocityY, setLastVelocityY] = useState(0); - const [audioObject, setAudioObject] = useState(null); const isIdle = usePlayerStore((state) => state.interface.isIdle); const stream = usePlayerStore((state) => state.interface.currentStream); @@ -164,28 +165,6 @@ export const VideoPlayer = () => { return router.back(); } - const loadAudioTrack = async () => { - if (selectedAudioTrack) { - const { uri } = selectedAudioTrack; - const sound = new Audio.Sound(); - await sound.loadAsync({ - uri, - headers: { - ...stream.headers, - ...stream.preferredHeaders, - }, - }); - setAudioObject(sound); - } else { - if (audioObject) { - await audioObject.unloadAsync(); - setAudioObject(null); - } - } - }; - - void loadAudioTrack(); - setVideoSrc({ uri: url, headers: { @@ -208,17 +187,15 @@ export const VideoPlayer = () => { return () => { clearTimeout(timeout); - if (audioObject) { - void audioObject.unloadAsync(); - } + void synchronizePlayback(); }; }, [ - audioObject, dismissFullscreenPlayer, hasStartedPlaying, router, selectedAudioTrack, stream, + synchronizePlayback, ]); const onVideoLoadStart = () => { @@ -234,24 +211,16 @@ export const VideoPlayer = () => { }; useEffect(() => { - const synchronizePlayback = async () => { - if (videoRef && hasStartedPlaying) { - const videoStatus = await videoRef.getStatusAsync(); - - if (selectedAudioTrack && audioObject && videoStatus.isLoaded) { - await videoRef.setIsMutedAsync(true); - await audioObject.setPositionAsync(videoStatus.positionMillis); - await audioObject.playAsync(); - } else { - await videoRef.setIsMutedAsync(false); - } - } - }; - - if (hasStartedPlaying) { - void synchronizePlayback(); + if (hasStartedPlaying && selectedAudioTrack && stream) { + void synchronizePlayback(selectedAudioTrack, stream); } - }, [audioObject, hasStartedPlaying, selectedAudioTrack, videoRef]); + }, [ + hasStartedPlaying, + selectedAudioTrack, + stream, + synchronizePlayback, + videoRef, + ]); return ( diff --git a/apps/expo/src/hooks/player/useAudioTrack.ts b/apps/expo/src/hooks/player/useAudioTrack.ts new file mode 100644 index 0000000..61038da --- /dev/null +++ b/apps/expo/src/hooks/player/useAudioTrack.ts @@ -0,0 +1,51 @@ +import { useCallback, useState } from "react"; +import { Audio } from "expo-av"; + +import type { Stream } from "@movie-web/provider-utils"; + +import type { AudioTrack } from "~/components/player/AudioTrackSelector"; +import { usePlayerStore } from "~/stores/player/store"; + +export const useAudioTrack = () => { + const videoRef = usePlayerStore((state) => state.videoRef); + const [audioObject, setAudioObject] = useState(null); + + const synchronizePlayback = useCallback( + async (selectedAudioTrack?: AudioTrack, stream?: Stream) => { + if (selectedAudioTrack && stream) { + const { uri } = selectedAudioTrack; + const sound = new Audio.Sound(); + await sound.loadAsync({ + uri, + headers: { + ...stream.headers, + ...stream.preferredHeaders, + }, + }); + setAudioObject(sound); + } else { + if (audioObject) { + await audioObject.unloadAsync(); + setAudioObject(null); + } + } + + if (videoRef?.getStatusAsync && audioObject) { + const videoStatus = await videoRef.getStatusAsync(); + + if (selectedAudioTrack && videoStatus.isLoaded) { + await videoRef.setIsMutedAsync(true); + await audioObject.setPositionAsync(videoStatus.positionMillis); + await audioObject.playAsync(); + } else { + await videoRef.setIsMutedAsync(false); + } + } + }, + [videoRef, audioObject], + ); + + return { + synchronizePlayback, + } as const; +};