refactor: make audiotrack stuff its own hook

This commit is contained in:
Adrian Castro
2024-03-06 13:23:34 +01:00
parent 6b5ee9aba0
commit b5a7e58e66
3 changed files with 71 additions and 45 deletions

View File

@@ -4,6 +4,7 @@ import { MaterialCommunityIcons } from "@expo/vector-icons";
import colors from "@movie-web/tailwind-config/colors"; import colors from "@movie-web/tailwind-config/colors";
import { useAudioTrack } from "~/hooks/player/useAudioTrack";
import { useBoolean } from "~/hooks/useBoolean"; import { useBoolean } from "~/hooks/useBoolean";
import { useAudioTrackStore } from "~/stores/audio"; import { useAudioTrackStore } from "~/stores/audio";
import { usePlayerStore } from "~/stores/player/store"; import { usePlayerStore } from "~/stores/player/store";
@@ -20,12 +21,14 @@ export interface AudioTrack {
export const AudioTrackSelector = () => { export const AudioTrackSelector = () => {
const tracks = usePlayerStore((state) => state.interface.audioTracks); const tracks = usePlayerStore((state) => state.interface.audioTracks);
const stream = usePlayerStore((state) => state.interface.currentStream);
const setSelectedAudioTrack = useAudioTrackStore( const setSelectedAudioTrack = useAudioTrackStore(
(state) => state.setSelectedAudioTrack, (state) => state.setSelectedAudioTrack,
); );
const { isTrue, on, off } = useBoolean(); const { isTrue, on, off } = useBoolean();
const { synchronizePlayback } = useAudioTrack();
if (!tracks?.length) return null; if (!tracks?.length) return null;
@@ -64,6 +67,9 @@ export const AudioTrackSelector = () => {
key={track.language} key={track.language}
onPress={() => { onPress={() => {
setSelectedAudioTrack(track); setSelectedAudioTrack(track);
if (stream) {
void synchronizePlayback(track, stream);
}
off(); off();
}} }}
> >

View File

@@ -9,7 +9,7 @@ import {
} from "react-native"; } from "react-native";
import { Gesture, GestureDetector } from "react-native-gesture-handler"; import { Gesture, GestureDetector } from "react-native-gesture-handler";
import { runOnJS, useSharedValue } from "react-native-reanimated"; import { runOnJS, useSharedValue } from "react-native-reanimated";
import { Audio, ResizeMode, Video } from "expo-av"; import { ResizeMode, Video } from "expo-av";
import * as Haptics from "expo-haptics"; import * as Haptics from "expo-haptics";
import * as NavigationBar from "expo-navigation-bar"; import * as NavigationBar from "expo-navigation-bar";
import { useRouter } from "expo-router"; import { useRouter } from "expo-router";
@@ -17,6 +17,7 @@ import * as StatusBar from "expo-status-bar";
import { findHighestQuality } from "@movie-web/provider-utils"; import { findHighestQuality } from "@movie-web/provider-utils";
import { useAudioTrack } from "~/hooks/player/useAudioTrack";
import { useBrightness } from "~/hooks/player/useBrightness"; import { useBrightness } from "~/hooks/player/useBrightness";
import { usePlaybackSpeed } from "~/hooks/player/usePlaybackSpeed"; import { usePlaybackSpeed } from "~/hooks/player/usePlaybackSpeed";
import { usePlayer } from "~/hooks/player/usePlayer"; import { usePlayer } from "~/hooks/player/usePlayer";
@@ -43,6 +44,7 @@ export const VideoPlayer = () => {
handleVolumeChange, handleVolumeChange,
} = useVolume(); } = useVolume();
const { currentSpeed } = usePlaybackSpeed(); const { currentSpeed } = usePlaybackSpeed();
const { synchronizePlayback } = useAudioTrack();
const { dismissFullscreenPlayer } = usePlayer(); const { dismissFullscreenPlayer } = usePlayer();
const [videoSrc, setVideoSrc] = useState<AVPlaybackSource>(); const [videoSrc, setVideoSrc] = useState<AVPlaybackSource>();
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
@@ -52,7 +54,6 @@ export const VideoPlayer = () => {
const router = useRouter(); const router = useRouter();
const scale = useSharedValue(1); const scale = useSharedValue(1);
const [lastVelocityY, setLastVelocityY] = useState(0); const [lastVelocityY, setLastVelocityY] = useState(0);
const [audioObject, setAudioObject] = useState<Audio.Sound | null>(null);
const isIdle = usePlayerStore((state) => state.interface.isIdle); const isIdle = usePlayerStore((state) => state.interface.isIdle);
const stream = usePlayerStore((state) => state.interface.currentStream); const stream = usePlayerStore((state) => state.interface.currentStream);
@@ -164,28 +165,6 @@ export const VideoPlayer = () => {
return router.back(); return router.back();
} }
const loadAudioTrack = async () => {
if (selectedAudioTrack) {
const { uri } = selectedAudioTrack;
const sound = new Audio.Sound();
await sound.loadAsync({
uri,
headers: {
...stream.headers,
...stream.preferredHeaders,
},
});
setAudioObject(sound);
} else {
if (audioObject) {
await audioObject.unloadAsync();
setAudioObject(null);
}
}
};
void loadAudioTrack();
setVideoSrc({ setVideoSrc({
uri: url, uri: url,
headers: { headers: {
@@ -208,17 +187,15 @@ export const VideoPlayer = () => {
return () => { return () => {
clearTimeout(timeout); clearTimeout(timeout);
if (audioObject) { void synchronizePlayback();
void audioObject.unloadAsync();
}
}; };
}, [ }, [
audioObject,
dismissFullscreenPlayer, dismissFullscreenPlayer,
hasStartedPlaying, hasStartedPlaying,
router, router,
selectedAudioTrack, selectedAudioTrack,
stream, stream,
synchronizePlayback,
]); ]);
const onVideoLoadStart = () => { const onVideoLoadStart = () => {
@@ -234,24 +211,16 @@ export const VideoPlayer = () => {
}; };
useEffect(() => { useEffect(() => {
const synchronizePlayback = async () => { if (hasStartedPlaying && selectedAudioTrack && stream) {
if (videoRef && hasStartedPlaying) { void synchronizePlayback(selectedAudioTrack, stream);
const videoStatus = await videoRef.getStatusAsync();
if (selectedAudioTrack && audioObject && videoStatus.isLoaded) {
await videoRef.setIsMutedAsync(true);
await audioObject.setPositionAsync(videoStatus.positionMillis);
await audioObject.playAsync();
} else {
await videoRef.setIsMutedAsync(false);
}
}
};
if (hasStartedPlaying) {
void synchronizePlayback();
} }
}, [audioObject, hasStartedPlaying, selectedAudioTrack, videoRef]); }, [
hasStartedPlaying,
selectedAudioTrack,
stream,
synchronizePlayback,
videoRef,
]);
return ( return (
<GestureDetector gesture={composedGesture}> <GestureDetector gesture={composedGesture}>

View File

@@ -0,0 +1,51 @@
import { useCallback, useState } from "react";
import { Audio } from "expo-av";
import type { Stream } from "@movie-web/provider-utils";
import type { AudioTrack } from "~/components/player/AudioTrackSelector";
import { usePlayerStore } from "~/stores/player/store";
export const useAudioTrack = () => {
const videoRef = usePlayerStore((state) => state.videoRef);
const [audioObject, setAudioObject] = useState<Audio.Sound | null>(null);
const synchronizePlayback = useCallback(
async (selectedAudioTrack?: AudioTrack, stream?: Stream) => {
if (selectedAudioTrack && stream) {
const { uri } = selectedAudioTrack;
const sound = new Audio.Sound();
await sound.loadAsync({
uri,
headers: {
...stream.headers,
...stream.preferredHeaders,
},
});
setAudioObject(sound);
} else {
if (audioObject) {
await audioObject.unloadAsync();
setAudioObject(null);
}
}
if (videoRef?.getStatusAsync && audioObject) {
const videoStatus = await videoRef.getStatusAsync();
if (selectedAudioTrack && videoStatus.isLoaded) {
await videoRef.setIsMutedAsync(true);
await audioObject.setPositionAsync(videoStatus.positionMillis);
await audioObject.playAsync();
} else {
await videoRef.setIsMutedAsync(false);
}
}
},
[videoRef, audioObject],
);
return {
synchronizePlayback,
} as const;
};