mirror of
https://github.com/movie-web/native-app.git
synced 2025-09-13 14:43:25 +00:00
feat: finish audiotrack switching
This commit is contained in:
@@ -5,6 +5,8 @@ import { usePlayerStore } from "~/stores/player/store";
|
||||
export const PlayButton = () => {
|
||||
const videoRef = usePlayerStore((state) => state.videoRef);
|
||||
const status = usePlayerStore((state) => state.status);
|
||||
const playAudio = usePlayerStore((state) => state.playAudio);
|
||||
const pauseAudio = usePlayerStore((state) => state.pauseAudio);
|
||||
|
||||
return (
|
||||
<FontAwesome
|
||||
@@ -17,10 +19,12 @@ export const PlayButton = () => {
|
||||
videoRef?.pauseAsync().catch(() => {
|
||||
console.log("Error pausing video");
|
||||
});
|
||||
void pauseAudio();
|
||||
} else {
|
||||
videoRef?.playAsync().catch(() => {
|
||||
console.log("Error playing video");
|
||||
});
|
||||
void playAudio();
|
||||
}
|
||||
}
|
||||
}}
|
||||
|
@@ -142,12 +142,13 @@ export const ScraperProcess = ({ data }: ScraperProcessProps) => {
|
||||
|
||||
if (streamResult.stream.type === "hls") {
|
||||
const tracks = await extractTracksFromHLS(
|
||||
streamResult.stream.playlist, // multiple audio tracks: 'https://playertest.longtailvideo.com/adaptive/elephants_dream_v4/index.m3u8',
|
||||
streamResult.stream.playlist,
|
||||
{
|
||||
...streamResult.stream.preferredHeaders,
|
||||
...streamResult.stream.headers,
|
||||
},
|
||||
);
|
||||
|
||||
if (tracks) setHlsTracks(tracks);
|
||||
|
||||
const constructFullUrl = (playlistUrl: string, uri: string) => {
|
||||
|
@@ -9,6 +9,9 @@ interface SeekProps {
|
||||
export const SeekButton = ({ type }: SeekProps) => {
|
||||
const videoRef = usePlayerStore((state) => state.videoRef);
|
||||
const status = usePlayerStore((state) => state.status);
|
||||
const setAudioPositionAsync = usePlayerStore(
|
||||
(state) => state.setAudioPositionAsync,
|
||||
);
|
||||
|
||||
return (
|
||||
<MaterialIcons
|
||||
@@ -25,6 +28,7 @@ export const SeekButton = ({ type }: SeekProps) => {
|
||||
videoRef?.setPositionAsync(position).catch(() => {
|
||||
console.log("Error seeking backwards");
|
||||
});
|
||||
void setAudioPositionAsync(position);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
|
@@ -166,7 +166,7 @@ export const VideoPlayer = () => {
|
||||
}
|
||||
|
||||
setVideoSrc({
|
||||
uri: url, // multiple audio tracks: 'https://playertest.longtailvideo.com/adaptive/elephants_dream_v4/index.m3u8',
|
||||
uri: url,
|
||||
headers: {
|
||||
...stream.preferredHeaders,
|
||||
...stream.headers,
|
||||
@@ -210,18 +210,6 @@ export const VideoPlayer = () => {
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (hasStartedPlaying && selectedAudioTrack && stream) {
|
||||
void synchronizePlayback(selectedAudioTrack, stream);
|
||||
}
|
||||
}, [
|
||||
hasStartedPlaying,
|
||||
selectedAudioTrack,
|
||||
stream,
|
||||
synchronizePlayback,
|
||||
videoRef,
|
||||
]);
|
||||
|
||||
return (
|
||||
<GestureDetector gesture={composedGesture}>
|
||||
<View className="flex-1 items-center justify-center bg-black">
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { useCallback, useState } from "react";
|
||||
import type { Video } from "expo-av";
|
||||
import { useCallback, useEffect } from "react";
|
||||
import { Audio } from "expo-av";
|
||||
|
||||
import type { Stream } from "@movie-web/provider-utils";
|
||||
@@ -8,52 +9,80 @@ import { usePlayerStore } from "~/stores/player/store";
|
||||
|
||||
export const useAudioTrack = () => {
|
||||
const videoRef = usePlayerStore((state) => state.videoRef);
|
||||
const [audioObject, setAudioObject] = useState<Audio.Sound | null>(null);
|
||||
const audioObject = usePlayerStore((state) => state.audioObject);
|
||||
const currentAudioTrack = usePlayerStore((state) => state.currentAudioTrack);
|
||||
const setAudioObject = usePlayerStore((state) => state.setAudioObject);
|
||||
const setCurrentAudioTrack = usePlayerStore(
|
||||
(state) => state.setCurrentAudioTrack,
|
||||
);
|
||||
|
||||
const synchronizePlayback = useCallback(
|
||||
async (selectedAudioTrack?: AudioTrack, stream?: Stream) => {
|
||||
console.log("synchronizePlayback called");
|
||||
|
||||
if (selectedAudioTrack && stream) {
|
||||
console.log("Loading audio track", selectedAudioTrack.uri);
|
||||
const { uri } = selectedAudioTrack;
|
||||
const { sound } = await Audio.Sound.createAsync({
|
||||
// never resolves or rejects :(
|
||||
uri,
|
||||
headers: {
|
||||
...stream.headers,
|
||||
...stream.preferredHeaders,
|
||||
},
|
||||
});
|
||||
console.log("Audio track loaded");
|
||||
setAudioObject(sound);
|
||||
if (audioObject) {
|
||||
await audioObject.unloadAsync();
|
||||
}
|
||||
|
||||
const createAudioAsyncWithTimeout = (uri: string, timeout = 5000) => {
|
||||
return new Promise<Audio.Sound | undefined>((resolve, reject) => {
|
||||
Audio.Sound.createAsync({
|
||||
uri,
|
||||
headers: {
|
||||
...stream.headers,
|
||||
...stream.preferredHeaders,
|
||||
},
|
||||
})
|
||||
.then((value) => resolve(value.sound))
|
||||
.catch(reject);
|
||||
|
||||
setTimeout(() => {
|
||||
reject(new Error("Timeout: Audio loading took too long"));
|
||||
}, timeout);
|
||||
});
|
||||
};
|
||||
try {
|
||||
const sound = await createAudioAsyncWithTimeout(
|
||||
selectedAudioTrack.uri,
|
||||
);
|
||||
if (!sound) return;
|
||||
setAudioObject(sound);
|
||||
setCurrentAudioTrack(selectedAudioTrack);
|
||||
} catch (error) {
|
||||
console.error("Error loading audio track:", error);
|
||||
}
|
||||
} else {
|
||||
if (audioObject) {
|
||||
console.log("Unloading existing audio track");
|
||||
await audioObject.unloadAsync();
|
||||
setAudioObject(null);
|
||||
}
|
||||
}
|
||||
|
||||
if (videoRef && audioObject) {
|
||||
console.log("Synchronizing audio with video");
|
||||
const videoStatus = await videoRef.getStatusAsync();
|
||||
|
||||
if (selectedAudioTrack && videoStatus.isLoaded) {
|
||||
console.log("Muting video and starting audio playback");
|
||||
await videoRef.setIsMutedAsync(true);
|
||||
await audioObject.setPositionAsync(videoStatus.positionMillis);
|
||||
await audioObject.playAsync();
|
||||
} else {
|
||||
console.log("Unmuting video");
|
||||
await videoRef.setIsMutedAsync(false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[videoRef, audioObject],
|
||||
[audioObject, setAudioObject, setCurrentAudioTrack],
|
||||
);
|
||||
|
||||
return {
|
||||
synchronizePlayback,
|
||||
} as const;
|
||||
const synchronizeAudioWithVideo = async (
|
||||
videoRef: Video | null,
|
||||
audioObject: Audio.Sound | null,
|
||||
selectedAudioTrack?: AudioTrack,
|
||||
): Promise<void> => {
|
||||
if (videoRef && audioObject) {
|
||||
const videoStatus = await videoRef.getStatusAsync();
|
||||
|
||||
if (selectedAudioTrack && videoStatus.isLoaded) {
|
||||
await videoRef.setIsMutedAsync(true);
|
||||
await audioObject.playAsync();
|
||||
await audioObject.setPositionAsync(videoStatus.positionMillis);
|
||||
} else {
|
||||
await videoRef.setIsMutedAsync(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (audioObject && currentAudioTrack) {
|
||||
void synchronizeAudioWithVideo(videoRef, audioObject, currentAudioTrack);
|
||||
}
|
||||
}, [audioObject, videoRef, currentAudioTrack]);
|
||||
|
||||
return { synchronizePlayback };
|
||||
};
|
||||
|
45
apps/expo/src/stores/player/slices/audio.ts
Normal file
45
apps/expo/src/stores/player/slices/audio.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import type { Audio } from "expo-av";
|
||||
|
||||
import type { MakeSlice } from "./types";
|
||||
import type { AudioTrack } from "~/components/player/AudioTrackSelector";
|
||||
|
||||
export interface AudioSlice {
|
||||
audioObject: Audio.Sound | null;
|
||||
currentAudioTrack: AudioTrack | null;
|
||||
|
||||
setAudioObject(audioObject: Audio.Sound | null): void;
|
||||
setCurrentAudioTrack(track: AudioTrack | null): void;
|
||||
playAudio(): Promise<void>;
|
||||
pauseAudio(): Promise<void>;
|
||||
setAudioPositionAsync(positionMillis: number): Promise<void>;
|
||||
}
|
||||
|
||||
export const createAudioSlice: MakeSlice<AudioSlice> = (set, get) => ({
|
||||
audioObject: null,
|
||||
currentAudioTrack: null,
|
||||
|
||||
setAudioObject: (audioObject) => {
|
||||
set({ audioObject });
|
||||
},
|
||||
setCurrentAudioTrack: (track) => {
|
||||
set({ currentAudioTrack: track });
|
||||
},
|
||||
playAudio: async () => {
|
||||
const { audioObject } = get();
|
||||
if (audioObject) {
|
||||
await audioObject.playAsync();
|
||||
}
|
||||
},
|
||||
pauseAudio: async () => {
|
||||
const { audioObject } = get();
|
||||
if (audioObject) {
|
||||
await audioObject.pauseAsync();
|
||||
}
|
||||
},
|
||||
setAudioPositionAsync: async (positionMillis) => {
|
||||
const { audioObject } = get();
|
||||
if (audioObject) {
|
||||
await audioObject.setPositionAsync(positionMillis);
|
||||
}
|
||||
},
|
||||
});
|
@@ -1,9 +1,10 @@
|
||||
import type { StateCreator } from "zustand";
|
||||
|
||||
import type { AudioSlice } from "./audio";
|
||||
import type { InterfaceSlice } from "./interface";
|
||||
import type { VideoSlice } from "./video";
|
||||
|
||||
export type AllSlices = InterfaceSlice & VideoSlice;
|
||||
export type AllSlices = InterfaceSlice & VideoSlice & AudioSlice;
|
||||
|
||||
export type MakeSlice<Slice> = StateCreator<
|
||||
AllSlices,
|
||||
|
@@ -2,6 +2,7 @@ import { create } from "zustand";
|
||||
import { immer } from "zustand/middleware/immer";
|
||||
|
||||
import type { AllSlices } from "./slices/types";
|
||||
import { createAudioSlice } from "./slices/audio";
|
||||
import { createInterfaceSlice } from "./slices/interface";
|
||||
import { createVideoSlice } from "./slices/video";
|
||||
|
||||
@@ -9,5 +10,6 @@ export const usePlayerStore = create(
|
||||
immer<AllSlices>((...a) => ({
|
||||
...createInterfaceSlice(...a),
|
||||
...createVideoSlice(...a),
|
||||
...createAudioSlice(...a),
|
||||
})),
|
||||
);
|
||||
|
Reference in New Issue
Block a user