refactor: make audiotrack stuff its own hook

This commit is contained in:
Adrian Castro
2024-03-06 13:23:34 +01:00
parent 6b5ee9aba0
commit b5a7e58e66
3 changed files with 71 additions and 45 deletions

View File

@@ -4,6 +4,7 @@ import { MaterialCommunityIcons } from "@expo/vector-icons";
import colors from "@movie-web/tailwind-config/colors";
import { useAudioTrack } from "~/hooks/player/useAudioTrack";
import { useBoolean } from "~/hooks/useBoolean";
import { useAudioTrackStore } from "~/stores/audio";
import { usePlayerStore } from "~/stores/player/store";
@@ -20,12 +21,14 @@ export interface AudioTrack {
export const AudioTrackSelector = () => {
const tracks = usePlayerStore((state) => state.interface.audioTracks);
const stream = usePlayerStore((state) => state.interface.currentStream);
const setSelectedAudioTrack = useAudioTrackStore(
(state) => state.setSelectedAudioTrack,
);
const { isTrue, on, off } = useBoolean();
const { synchronizePlayback } = useAudioTrack();
if (!tracks?.length) return null;
@@ -64,6 +67,9 @@ export const AudioTrackSelector = () => {
key={track.language}
onPress={() => {
setSelectedAudioTrack(track);
if (stream) {
void synchronizePlayback(track, stream);
}
off();
}}
>

View File

@@ -9,7 +9,7 @@ import {
} from "react-native";
import { Gesture, GestureDetector } from "react-native-gesture-handler";
import { runOnJS, useSharedValue } from "react-native-reanimated";
import { Audio, ResizeMode, Video } from "expo-av";
import { ResizeMode, Video } from "expo-av";
import * as Haptics from "expo-haptics";
import * as NavigationBar from "expo-navigation-bar";
import { useRouter } from "expo-router";
@@ -17,6 +17,7 @@ import * as StatusBar from "expo-status-bar";
import { findHighestQuality } from "@movie-web/provider-utils";
import { useAudioTrack } from "~/hooks/player/useAudioTrack";
import { useBrightness } from "~/hooks/player/useBrightness";
import { usePlaybackSpeed } from "~/hooks/player/usePlaybackSpeed";
import { usePlayer } from "~/hooks/player/usePlayer";
@@ -43,6 +44,7 @@ export const VideoPlayer = () => {
handleVolumeChange,
} = useVolume();
const { currentSpeed } = usePlaybackSpeed();
const { synchronizePlayback } = useAudioTrack();
const { dismissFullscreenPlayer } = usePlayer();
const [videoSrc, setVideoSrc] = useState<AVPlaybackSource>();
const [isLoading, setIsLoading] = useState(true);
@@ -52,7 +54,6 @@ export const VideoPlayer = () => {
const router = useRouter();
const scale = useSharedValue(1);
const [lastVelocityY, setLastVelocityY] = useState(0);
const [audioObject, setAudioObject] = useState<Audio.Sound | null>(null);
const isIdle = usePlayerStore((state) => state.interface.isIdle);
const stream = usePlayerStore((state) => state.interface.currentStream);
@@ -164,28 +165,6 @@ export const VideoPlayer = () => {
return router.back();
}
const loadAudioTrack = async () => {
if (selectedAudioTrack) {
const { uri } = selectedAudioTrack;
const sound = new Audio.Sound();
await sound.loadAsync({
uri,
headers: {
...stream.headers,
...stream.preferredHeaders,
},
});
setAudioObject(sound);
} else {
if (audioObject) {
await audioObject.unloadAsync();
setAudioObject(null);
}
}
};
void loadAudioTrack();
setVideoSrc({
uri: url,
headers: {
@@ -208,17 +187,15 @@ export const VideoPlayer = () => {
return () => {
clearTimeout(timeout);
if (audioObject) {
void audioObject.unloadAsync();
}
void synchronizePlayback();
};
}, [
audioObject,
dismissFullscreenPlayer,
hasStartedPlaying,
router,
selectedAudioTrack,
stream,
synchronizePlayback,
]);
const onVideoLoadStart = () => {
@@ -234,24 +211,16 @@ export const VideoPlayer = () => {
};
useEffect(() => {
const synchronizePlayback = async () => {
if (videoRef && hasStartedPlaying) {
const videoStatus = await videoRef.getStatusAsync();
if (selectedAudioTrack && audioObject && videoStatus.isLoaded) {
await videoRef.setIsMutedAsync(true);
await audioObject.setPositionAsync(videoStatus.positionMillis);
await audioObject.playAsync();
} else {
await videoRef.setIsMutedAsync(false);
if (hasStartedPlaying && selectedAudioTrack && stream) {
void synchronizePlayback(selectedAudioTrack, stream);
}
}
};
if (hasStartedPlaying) {
void synchronizePlayback();
}
}, [audioObject, hasStartedPlaying, selectedAudioTrack, videoRef]);
}, [
hasStartedPlaying,
selectedAudioTrack,
stream,
synchronizePlayback,
videoRef,
]);
return (
<GestureDetector gesture={composedGesture}>

View File

@@ -0,0 +1,51 @@
import { useCallback, useState } from "react";
import { Audio } from "expo-av";
import type { Stream } from "@movie-web/provider-utils";
import type { AudioTrack } from "~/components/player/AudioTrackSelector";
import { usePlayerStore } from "~/stores/player/store";
export const useAudioTrack = () => {
const videoRef = usePlayerStore((state) => state.videoRef);
const [audioObject, setAudioObject] = useState<Audio.Sound | null>(null);
const synchronizePlayback = useCallback(
async (selectedAudioTrack?: AudioTrack, stream?: Stream) => {
if (selectedAudioTrack && stream) {
const { uri } = selectedAudioTrack;
const sound = new Audio.Sound();
await sound.loadAsync({
uri,
headers: {
...stream.headers,
...stream.preferredHeaders,
},
});
setAudioObject(sound);
} else {
if (audioObject) {
await audioObject.unloadAsync();
setAudioObject(null);
}
}
if (videoRef?.getStatusAsync && audioObject) {
const videoStatus = await videoRef.getStatusAsync();
if (selectedAudioTrack && videoStatus.isLoaded) {
await videoRef.setIsMutedAsync(true);
await audioObject.setPositionAsync(videoStatus.positionMillis);
await audioObject.playAsync();
} else {
await videoRef.setIsMutedAsync(false);
}
}
},
[videoRef, audioObject],
);
return {
synchronizePlayback,
} as const;
};