add scraper screen

This commit is contained in:
Jorrin
2024-03-09 21:46:38 +01:00
parent 0d135182c1
commit 0e00115e16
9 changed files with 464 additions and 139 deletions

View File

@@ -0,0 +1,58 @@
import { useCallback } from "react";
import { transformSearchResultToScrapeMedia } from "@movie-web/provider-utils";
import { fetchMediaDetails, fetchSeasonDetails } from "@movie-web/tmdb";
import { usePlayerStore } from "~/stores/player/store";
export const useMeta = () => {
const meta = usePlayerStore((state) => state.meta);
const setMeta = usePlayerStore((state) => state.setMeta);
const convertMovieIdToMeta = useCallback(
async (id: string, type: "movie" | "tv") => {
const media = await fetchMediaDetails(id, type);
if (!media) return;
const scrapeMedia = transformSearchResultToScrapeMedia(
media.type,
media.result,
meta?.season?.number,
meta?.episode?.number,
);
let seasonData = null;
if (scrapeMedia.type === "show") {
seasonData = await fetchSeasonDetails(
scrapeMedia.tmdbId,
scrapeMedia.season.number,
);
}
const m = {
...scrapeMedia,
poster: media.result.poster_path,
...("season" in scrapeMedia
? {
season: {
number: scrapeMedia.season.number,
tmdbId: scrapeMedia.tmdbId,
},
episode: {
number: scrapeMedia.episode.number,
tmdbId: scrapeMedia.episode.tmdbId,
},
episodes:
seasonData?.episodes.map((e) => ({
tmdbId: e.id.toString(),
number: e.episode_number,
name: e.name,
})) ?? [],
}
: {}),
};
setMeta(m);
return m;
},
[meta?.episode?.number, meta?.season?.number, setMeta],
);
return { convertMovieIdToMeta };
};

View File

@@ -1,13 +1,194 @@
import { useCallback, useRef, useState } from "react";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import type {
FullScraperEvents,
RunOutput,
ScrapeMedia,
} from "@movie-web/provider-utils";
import {
getMetaData,
getVideoStreamFromEmbed,
getVideoStreamFromSource,
providers,
} from "@movie-web/provider-utils";
import { convertMetaToScrapeMedia } from "~/stores/player/slices/video";
import { usePlayerStore } from "~/stores/player/store";
export interface ScrapingItems {
id: string;
children: string[];
}
export interface ScrapingSegment {
name: string;
id: string;
embedId?: string;
status: "failure" | "pending" | "notfound" | "success" | "waiting";
reason?: string;
error?: any;
percentage: number;
}
type ScraperEvent<Event extends keyof FullScraperEvents> = Parameters<
NonNullable<FullScraperEvents[Event]>
>[0];
export const useBaseScrape = () => {
const [sources, setSources] = useState<Record<string, ScrapingSegment>>({});
const [sourceOrder, setSourceOrder] = useState<ScrapingItems[]>([]);
const [currentSource, setCurrentSource] = useState<string>();
const lastId = useRef<string | null>(null);
const initEvent = useCallback((evt: ScraperEvent<"init">) => {
setSources(
evt.sourceIds
.map((v) => {
const source = getMetaData().find((s) => s.id === v);
if (!source) throw new Error("invalid source id");
const out: ScrapingSegment = {
name: source.name,
id: source.id,
status: "waiting",
percentage: 0,
};
return out;
})
.reduce<Record<string, ScrapingSegment>>((a, v) => {
a[v.id] = v;
return a;
}, {}),
);
setSourceOrder(evt.sourceIds.map((v) => ({ id: v, children: [] })));
}, []);
const startEvent = useCallback((id: ScraperEvent<"start">) => {
const lastIdTmp = lastId.current;
setSources((s) => {
if (s[id]) s[id]!.status = "pending";
if (lastIdTmp && s[lastIdTmp] && s[lastIdTmp]!.status === "pending")
s[lastIdTmp]!.status = "success";
return { ...s };
});
setCurrentSource(id);
lastId.current = id;
}, []);
const updateEvent = useCallback((evt: ScraperEvent<"update">) => {
setSources((s) => {
if (s[evt.id]) {
s[evt.id]!.status = evt.status;
s[evt.id]!.reason = evt.reason;
s[evt.id]!.error = evt.error;
s[evt.id]!.percentage = evt.percentage;
}
return { ...s };
});
}, []);
const discoverEmbedsEvent = useCallback(
(evt: ScraperEvent<"discoverEmbeds">) => {
setSources((s) => {
evt.embeds.forEach((v) => {
const source = getMetaData().find(
(src) => src.id === v.embedScraperId,
);
if (!source) throw new Error("invalid source id");
const out: ScrapingSegment = {
embedId: v.embedScraperId,
name: source.name,
id: v.id,
status: "waiting",
percentage: 0,
};
s[v.id] = out;
});
return { ...s };
});
setSourceOrder((s) => {
const source = s.find((v) => v.id === evt.sourceId);
if (!source) throw new Error("invalid source id");
source.children = evt.embeds.map((v) => v.id);
return [...s];
});
},
[],
);
const startScrape = useCallback(() => {
lastId.current = null;
}, []);
const getResult = useCallback((output: RunOutput | null) => {
if (output && lastId.current) {
setSources((s) => {
if (!lastId.current) return s;
if (s[lastId.current]) s[lastId.current]!.status = "success";
return { ...s };
});
}
return output;
}, []);
return {
initEvent,
startEvent,
updateEvent,
discoverEmbedsEvent,
startScrape,
getResult,
sources,
sourceOrder,
currentSource,
};
};
export function useScrape() {
const {
sources,
sourceOrder,
currentSource,
updateEvent,
discoverEmbedsEvent,
initEvent,
getResult,
startEvent,
startScrape,
} = useBaseScrape();
const startScraping = useCallback(
async (media: ScrapeMedia) => {
startScrape();
const output = await providers.runAll({
media,
events: {
init: initEvent,
start: startEvent,
update: updateEvent,
discoverEmbeds: discoverEmbedsEvent,
},
});
return getResult(output);
},
[
initEvent,
startEvent,
updateEvent,
discoverEmbedsEvent,
getResult,
startScrape,
],
);
return {
startScraping,
sourceOrder,
sources,
currentSource,
};
}
export const useEmbedScrape = (closeModal?: () => void) => {
const setCurrentStream = usePlayerStore((state) => state.setCurrentStream);