mirror of
https://github.com/movie-web/providers.git
synced 2025-09-13 15:43:26 +00:00
Add stream targets
This commit is contained in:
@@ -5,6 +5,10 @@ import { vi, describe, it, expect, afterEach } from 'vitest';
|
||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||
vi.mock('@/providers/all', () => mocks);
|
||||
|
||||
const features = {
|
||||
requires: [],
|
||||
}
|
||||
|
||||
describe('getProviders()', () => {
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
@@ -13,7 +17,7 @@ describe('getProviders()', () => {
|
||||
it('should return providers', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||
expect(getProviders()).toEqual({
|
||||
expect(getProviders(features)).toEqual({
|
||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||
embeds: [mockEmbeds.embedD],
|
||||
});
|
||||
@@ -22,7 +26,7 @@ describe('getProviders()', () => {
|
||||
it('should filter out disabled providers', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedEDisabled]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceCDisabled, mockSources.sourceB]);
|
||||
expect(getProviders()).toEqual({
|
||||
expect(getProviders(features)).toEqual({
|
||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||
embeds: [mockEmbeds.embedD],
|
||||
});
|
||||
@@ -31,31 +35,31 @@ describe('getProviders()', () => {
|
||||
it('should throw on duplicate ids in sources', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceAHigherRank, mockSources.sourceA, mockSources.sourceB]);
|
||||
expect(() => getProviders()).toThrowError();
|
||||
expect(() => getProviders(features)).toThrowError();
|
||||
});
|
||||
|
||||
it('should throw on duplicate ids in embeds', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedDHigherRank, mockEmbeds.embedA]);
|
||||
mocks.gatherAllSources.mockReturnValue([]);
|
||||
expect(() => getProviders()).toThrowError();
|
||||
expect(() => getProviders(features)).toThrowError();
|
||||
});
|
||||
|
||||
it('should throw on duplicate ids between sources and embeds', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||
expect(() => getProviders()).toThrowError();
|
||||
expect(() => getProviders(features)).toThrowError();
|
||||
});
|
||||
|
||||
it('should throw on duplicate rank between sources and embeds', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||
expect(() => getProviders()).toThrowError();
|
||||
expect(() => getProviders(features)).toThrowError();
|
||||
});
|
||||
|
||||
it('should not throw with same rank between sources and embeds', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA]);
|
||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||
expect(getProviders()).toEqual({
|
||||
expect(getProviders(features)).toEqual({
|
||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||
embeds: [mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA],
|
||||
});
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
||||
import { makeProviders } from '@/main/builder';
|
||||
import { targets } from '@/main/targets.ts';
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||
@@ -15,6 +16,7 @@ describe('ProviderControls.listSources()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.listSources()).toEqual([
|
||||
{
|
||||
@@ -32,6 +34,7 @@ describe('ProviderControls.listSources()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.listSources()).toEqual([
|
||||
{
|
||||
@@ -49,6 +52,7 @@ describe('ProviderControls.listSources()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.listSources()).toEqual([
|
||||
{
|
||||
@@ -66,6 +70,7 @@ describe('ProviderControls.listSources()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p1 = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
const l1 = p1.listSources();
|
||||
expect(l1.map((v) => v.id).join(',')).toEqual('z,y');
|
||||
@@ -74,6 +79,7 @@ describe('ProviderControls.listSources()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p2 = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
const l2 = p2.listSources();
|
||||
expect(l2.map((v) => v.id).join(',')).toEqual('z,y');
|
||||
@@ -90,6 +96,7 @@ describe('ProviderControls.getAllEmbedMetaSorted()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.listEmbeds()).toEqual([
|
||||
{
|
||||
@@ -106,6 +113,7 @@ describe('ProviderControls.getAllEmbedMetaSorted()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX, mockEmbeds.fullEmbedZ]);
|
||||
const p1 = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
const l1 = p1.listEmbeds();
|
||||
expect(l1.map((v) => v.id).join(',')).toEqual('z,x');
|
||||
@@ -114,6 +122,7 @@ describe('ProviderControls.getAllEmbedMetaSorted()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedZ, mockEmbeds.fullEmbedX]);
|
||||
const p2 = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
const l2 = p2.listEmbeds();
|
||||
expect(l2.map((v) => v.id).join(',')).toEqual('z,x');
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
||||
import { makeProviders } from '@/main/builder';
|
||||
import { targets } from '@/main/targets.ts';
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||
@@ -15,6 +16,7 @@ describe('ProviderControls.getMetadata()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.getMetadata(':)')).toEqual(null);
|
||||
});
|
||||
@@ -24,6 +26,7 @@ describe('ProviderControls.getMetadata()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.getMetadata(mockSources.fullSourceZBoth.id)).toEqual({
|
||||
type: 'source',
|
||||
@@ -39,6 +42,7 @@ describe('ProviderControls.getMetadata()', () => {
|
||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX]);
|
||||
const p = makeProviders({
|
||||
fetcher: null as any,
|
||||
target: targets.NATIVE,
|
||||
});
|
||||
expect(p.getMetadata(mockEmbeds.fullEmbedX.id)).toEqual({
|
||||
type: 'embed',
|
||||
|
@@ -2,6 +2,7 @@ export type { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||
export type { RunOutput } from '@/main/runner';
|
||||
export type { MetaOutput } from '@/main/meta';
|
||||
export type { FullScraperEvents } from '@/main/events';
|
||||
export type { Targets, Flags } from '@/main/targets';
|
||||
export type { MediaTypes, ShowMedia, ScrapeMedia, MovieMedia } from '@/main/media';
|
||||
export type {
|
||||
ProviderBuilderOptions,
|
||||
@@ -15,3 +16,4 @@ export { NotFoundError } from '@/utils/errors';
|
||||
export { makeProviders } from '@/main/builder';
|
||||
export { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||
export { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
||||
export { flags, targets } from '@/main/targets';
|
||||
|
@@ -5,6 +5,7 @@ import { scrapeIndividualEmbed, scrapeInvidualSource } from '@/main/individualRu
|
||||
import { ScrapeMedia } from '@/main/media';
|
||||
import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/main/meta';
|
||||
import { RunOutput, runAllProviders } from '@/main/runner';
|
||||
import { Targets, getTargetFeatures } from '@/main/targets';
|
||||
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||
import { getProviders } from '@/providers/get';
|
||||
|
||||
@@ -15,6 +16,9 @@ export interface ProviderBuilderOptions {
|
||||
// proxied fetcher, if the scraper needs to access a CORS proxy. this fetcher will be called instead
|
||||
// of the normal fetcher. Defaults to the normal fetcher.
|
||||
proxiedFetcher?: Fetcher;
|
||||
|
||||
// target of where the streams will be used
|
||||
target: Targets;
|
||||
}
|
||||
|
||||
export interface RunnerOptions {
|
||||
@@ -77,8 +81,10 @@ export interface ProviderControls {
|
||||
}
|
||||
|
||||
export function makeProviders(ops: ProviderBuilderOptions): ProviderControls {
|
||||
const list = getProviders();
|
||||
const features = getTargetFeatures(ops.target);
|
||||
const list = getProviders(features);
|
||||
const providerRunnerOps = {
|
||||
features,
|
||||
fetcher: makeFullFetcher(ops.fetcher),
|
||||
proxiedFetcher: makeFullFetcher(ops.proxiedFetcher ?? ops.fetcher),
|
||||
};
|
||||
|
@@ -1,11 +1,14 @@
|
||||
import { UseableFetcher } from '@/fetchers/types';
|
||||
import { IndividualScraperEvents } from '@/main/events';
|
||||
import { ScrapeMedia } from '@/main/media';
|
||||
import { FeatureMap, flagsAllowedInFeatures } from '@/main/targets';
|
||||
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||
import { ProviderList } from '@/providers/get';
|
||||
import { ScrapeContext } from '@/utils/context';
|
||||
import { NotFoundError } from '@/utils/errors';
|
||||
|
||||
export type IndividualSourceRunnerOptions = {
|
||||
features: FeatureMap;
|
||||
fetcher: UseableFetcher;
|
||||
proxiedFetcher: UseableFetcher;
|
||||
media: ScrapeMedia;
|
||||
@@ -46,11 +49,17 @@ export async function scrapeInvidualSource(
|
||||
media: ops.media,
|
||||
});
|
||||
|
||||
// stream doesn't satisfy the feature flags, so gets removed in output
|
||||
if (output?.stream && !flagsAllowedInFeatures(ops.features, output.stream.flags)) {
|
||||
output.stream = undefined;
|
||||
}
|
||||
|
||||
if (!output) throw new Error('output is null');
|
||||
return output;
|
||||
}
|
||||
|
||||
export type IndividualEmbedRunnerOptions = {
|
||||
features: FeatureMap;
|
||||
fetcher: UseableFetcher;
|
||||
proxiedFetcher: UseableFetcher;
|
||||
url: string;
|
||||
@@ -65,7 +74,7 @@ export async function scrapeIndividualEmbed(
|
||||
const embedScraper = list.embeds.find((v) => ops.id === v.id);
|
||||
if (!embedScraper) throw new Error('Embed with ID not found');
|
||||
|
||||
return embedScraper.scrape({
|
||||
const output = await embedScraper.scrape({
|
||||
fetcher: ops.fetcher,
|
||||
proxiedFetcher: ops.proxiedFetcher,
|
||||
url: ops.url,
|
||||
@@ -77,4 +86,8 @@ export async function scrapeIndividualEmbed(
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
if (!flagsAllowedInFeatures(ops.features, output.stream.flags))
|
||||
throw new NotFoundError("stream doesn't satisfy target feature flags");
|
||||
return output;
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { UseableFetcher } from '@/fetchers/types';
|
||||
import { FullScraperEvents } from '@/main/events';
|
||||
import { ScrapeMedia } from '@/main/media';
|
||||
import { FeatureMap, flagsAllowedInFeatures } from '@/main/targets';
|
||||
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||
import { ProviderList } from '@/providers/get';
|
||||
import { Stream } from '@/providers/streams';
|
||||
@@ -28,6 +29,7 @@ export type EmbedRunOutput = {
|
||||
export type ProviderRunnerOptions = {
|
||||
fetcher: UseableFetcher;
|
||||
proxiedFetcher: UseableFetcher;
|
||||
features: FeatureMap;
|
||||
sourceOrder?: string[];
|
||||
embedOrder?: string[];
|
||||
events?: FullScraperEvents;
|
||||
@@ -77,6 +79,9 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
||||
...contextBase,
|
||||
media: ops.media,
|
||||
});
|
||||
if (output?.stream && !flagsAllowedInFeatures(ops.features, output.stream.flags)) {
|
||||
throw new NotFoundError("stream doesn't satisfy target feature flags");
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
ops.events?.update?.({
|
||||
@@ -135,6 +140,9 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
||||
...contextBase,
|
||||
url: e.url,
|
||||
});
|
||||
if (!flagsAllowedInFeatures(ops.features, embedOutput.stream.flags)) {
|
||||
throw new NotFoundError("stream doesn't satisfy target feature flags");
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
ops.events?.update?.({
|
||||
|
35
src/main/targets.ts
Normal file
35
src/main/targets.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export const flags = {
|
||||
NO_CORS: 'no-cors',
|
||||
} as const;
|
||||
|
||||
export type Flags = (typeof flags)[keyof typeof flags];
|
||||
|
||||
export const targets = {
|
||||
BROWSER: 'browser',
|
||||
NATIVE: 'native',
|
||||
} as const;
|
||||
|
||||
export type Targets = (typeof targets)[keyof typeof targets];
|
||||
|
||||
export type FeatureMap = {
|
||||
requires: readonly Flags[];
|
||||
};
|
||||
|
||||
export const targetToFeatures: Record<Targets, FeatureMap> = {
|
||||
browser: {
|
||||
requires: [flags.NO_CORS],
|
||||
},
|
||||
native: {
|
||||
requires: [],
|
||||
},
|
||||
} as const;
|
||||
|
||||
export function getTargetFeatures(target: Targets): FeatureMap {
|
||||
return targetToFeatures[target];
|
||||
}
|
||||
|
||||
export function flagsAllowedInFeatures(features: FeatureMap, inputFlags: Flags[]): boolean {
|
||||
const hasAllFlags = features.requires.every((v) => inputFlags.includes(v));
|
||||
if (!hasAllFlags) return false;
|
||||
return true;
|
||||
}
|
@@ -1,4 +1,5 @@
|
||||
import { MovieMedia, ShowMedia } from '@/main/media';
|
||||
import { Flags } from '@/main/targets';
|
||||
import { Stream } from '@/providers/streams';
|
||||
import { EmbedScrapeContext, ScrapeContext } from '@/utils/context';
|
||||
|
||||
@@ -15,6 +16,7 @@ export type Sourcerer = {
|
||||
name: string; // displayed in the UI
|
||||
rank: number; // the higher the number, the earlier it gets put on the queue
|
||||
disabled?: boolean;
|
||||
flags: Flags[];
|
||||
scrapeMovie?: (input: ScrapeContext & { media: MovieMedia }) => Promise<SourcererOutput>;
|
||||
scrapeShow?: (input: ScrapeContext & { media: ShowMedia }) => Promise<SourcererOutput>;
|
||||
};
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import crypto from 'crypto-js';
|
||||
|
||||
import { flags } from '@/main/targets';
|
||||
import { makeEmbed } from '@/providers/base';
|
||||
|
||||
const { AES, enc } = crypto;
|
||||
@@ -69,6 +70,7 @@ export const upcloudScraper = makeEmbed({
|
||||
stream: {
|
||||
type: 'hls',
|
||||
playlist: sources.file,
|
||||
flags: [flags.NO_CORS],
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { FeatureMap, flagsAllowedInFeatures } from '@/main/targets';
|
||||
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
||||
import { Embed, Sourcerer } from '@/providers/base';
|
||||
import { hasDuplicates } from '@/utils/predicates';
|
||||
@@ -7,7 +8,7 @@ export interface ProviderList {
|
||||
embeds: Embed[];
|
||||
}
|
||||
|
||||
export function getProviders(): ProviderList {
|
||||
export function getProviders(features: FeatureMap): ProviderList {
|
||||
const sources = gatherAllSources().filter((v) => !v?.disabled);
|
||||
const embeds = gatherAllEmbeds().filter((v) => !v?.disabled);
|
||||
const combined = [...sources, ...embeds];
|
||||
@@ -21,7 +22,7 @@ export function getProviders(): ProviderList {
|
||||
if (anyDuplicateEmbedRank) throw new Error('Duplicate rank found in embeds');
|
||||
|
||||
return {
|
||||
sources,
|
||||
sources: sources.filter((s) => flagsAllowedInFeatures(features, s.flags)),
|
||||
embeds,
|
||||
};
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { flags } from '@/main/targets';
|
||||
import { makeSourcerer } from '@/providers/base';
|
||||
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||
import { getFlixhqSourceDetails, getFlixhqSources } from '@/providers/sources/flixhq/scrape';
|
||||
@@ -9,6 +10,7 @@ export const flixhqScraper = makeSourcerer({
|
||||
id: 'flixhq',
|
||||
name: 'FlixHQ',
|
||||
rank: 100,
|
||||
flags: [flags.NO_CORS],
|
||||
async scrapeMovie(ctx) {
|
||||
const id = await getFlixhqId(ctx, ctx.media);
|
||||
if (!id) throw new NotFoundError('no search results match');
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { Flags } from '@/main/targets';
|
||||
|
||||
export type StreamFile = {
|
||||
type: 'mp4';
|
||||
url: string;
|
||||
@@ -7,11 +9,13 @@ export type Qualities = '360' | '480' | '720' | '1080';
|
||||
|
||||
export type FileBasedStream = {
|
||||
type: 'file';
|
||||
flags: Flags[];
|
||||
qualities: Partial<Record<Qualities, StreamFile>>;
|
||||
};
|
||||
|
||||
export type HlsBasedStream = {
|
||||
type: 'hls';
|
||||
flags: Flags[];
|
||||
playlist: string;
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user