From ffe5cf536904d5a6ee3d6b210a887265ed75912e Mon Sep 17 00:00:00 2001 From: mrjvs Date: Thu, 24 Aug 2023 21:51:43 +0200 Subject: [PATCH] callback events --- README.md | 7 ++++++ src/main/builder.ts | 22 ++++++++++--------- src/main/events.ts | 46 ++++++++++++++++++++++++++++++++++++++++ src/main/runner.ts | 19 ++++++++++++++++- src/providers/base.ts | 23 +++++++++++++++++++- src/providers/streams.ts | 2 +- src/utils/context.ts | 10 +++++++-- src/utils/errors.ts | 6 ++++++ 8 files changed, 120 insertions(+), 15 deletions(-) create mode 100644 src/main/events.ts create mode 100644 src/utils/errors.ts diff --git a/README.md b/README.md index e69ac12..14a1337 100644 --- a/README.md +++ b/README.md @@ -6,5 +6,12 @@ Feel free to use for your own projects. features: - scrape popular streaming websites - works in both browser and NodeJS server + - choose between all streams or non-protected stream (for browser use) > This package is still WIP + +> TODO documentation: examples for nodejs + browser + +> TODO documentation: how to use + usecases + +> TODO documentation: examples on how to make a custom fetcher diff --git a/src/main/builder.ts b/src/main/builder.ts index 7d594d0..ee96ba1 100644 --- a/src/main/builder.ts +++ b/src/main/builder.ts @@ -1,13 +1,11 @@ import { Fetcher } from '@/fetchers/types'; +import { FullScraperEvents, SingleScraperEvents } from '@/main/events'; import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/main/meta'; -import { EmbedRunOutput, RunOutput, SourceRunOutput } from '@/main/runner'; +import { ProviderRunnerOptions, RunOutput, SourceRunOutput, runAllProviders } from '@/main/runner'; import { getProviders } from '@/providers/all'; // TODO meta data input (tmdb id, imdb id, title, release year) // TODO actually running scrapers -// TODO documentation: examples for nodejs + browser -// TODO documentation: how to use + usecases -// TODO documentation: examples on how to make a custom fetcher export interface ProviderBuilderOptions { // fetcher, every web request gets called through here @@ -21,13 +19,10 @@ export interface ProviderBuilderOptions { export interface ProviderControls { // Run all providers one by one. in order of rank (highest first) // returns the stream, or null if none found - runAll(): Promise; + runAll(cbs: FullScraperEvents): Promise; // Run a source provider - runSource(id: string): Promise; - - // Run a embed provider - runEmbed(id: string): Promise; + runSource(id: string, cbs: SingleScraperEvents): Promise; // get meta data about a source or embed. getMetadata(id: string): MetaOutput | null; @@ -39,10 +34,17 @@ export interface ProviderControls { listEmbeds(): MetaOutput[]; } -export function makeProviders(_ops: ProviderBuilderOptions): ProviderControls { +export function makeProviders(ops: ProviderBuilderOptions): ProviderControls { const list = getProviders(); + const providerRunnerOps: ProviderRunnerOptions = { + fetcher: ops.fetcher, + proxiedFetcher: ops.proxiedFetcher ?? ops.fetcher, + }; return { + runAll(cbs) { + return runAllProviders(providerRunnerOps, cbs); + }, getMetadata(id) { return getSpecificId(list, id); }, diff --git a/src/main/events.ts b/src/main/events.ts new file mode 100644 index 0000000..09f648d --- /dev/null +++ b/src/main/events.ts @@ -0,0 +1,46 @@ +export type UpdateEventStatus = 'success' | 'failure' | 'notfound' | 'pending'; + +export type UpdateEvent = { + percentage: number; + status: UpdateEventStatus; +}; + +export type InitEvent = { + sourceIds: string[]; // list of source ids +}; + +export type DiscoverEmbedsEvent = { + sourceId: string; + + // list of embeds that will be scraped in order + embeds: Array<{ + id: string; + embedScraperId: string; + }>; +}; + +export type StartScrapingEvent = { + sourceId: string; + + // embed Id (not embedScraperId) + embedId?: string; +}; + +export type SingleScraperEvents = { + update?: (evt: UpdateEvent) => void; +}; + +export type FullScraperEvents = { + // update progress percentage and status of the currently scraping item + update?: (evt: UpdateEvent) => void; + + // initial list of scrapers its running, only triggers once per run. + init?: (evt: InitEvent) => void; + + // list of embeds are discovered for the currently running source scraper + // triggers once per source scraper + discoverEmbeds?: (evt: DiscoverEmbedsEvent) => void; + + // start scraping an item. + start?: (id: string) => void; +}; diff --git a/src/main/runner.ts b/src/main/runner.ts index f0295af..b2627e0 100644 --- a/src/main/runner.ts +++ b/src/main/runner.ts @@ -1,8 +1,10 @@ +import { Fetcher } from '@/fetchers/types'; +import { FullScraperEvents } from '@/main/events'; import { Stream } from '@/providers/streams'; export type RunOutput = { sourceId: string; - fromEmbed: boolean; + embedId?: string; stream: Stream; }; @@ -16,3 +18,18 @@ export type EmbedRunOutput = { embedId: string; stream?: Stream; }; + +export type ProviderRunnerOptions = { + fetcher: Fetcher; + proxiedFetcher: Fetcher; +}; + +export async function runAllProviders(_ops: ProviderRunnerOptions, _cbs: FullScraperEvents): Promise { + return { + sourceId: '123', + stream: { + type: 'file', + qualities: {}, + }, + }; +} diff --git a/src/providers/base.ts b/src/providers/base.ts index 8292690..373989a 100644 --- a/src/providers/base.ts +++ b/src/providers/base.ts @@ -1,7 +1,11 @@ import { Stream } from '@/providers/streams'; -import { ScrapeContext } from '@/utils/context'; +import { EmbedScrapeContext, ScrapeContext } from '@/utils/context'; export type SourcererOutput = { + embeds: { + embedId: string; + url: string; + }[]; stream?: Stream; }; @@ -17,3 +21,20 @@ export function makeSourcerer(state: Sourcerer): Sourcerer | null { if (state.disabled) return null; return state; } + +export type EmbedOutput = { + stream?: Stream; +}; + +export type Embed = { + id: string; + name: string; // displayed in the UI + rank: number; // the higher the number, the earlier it gets put on the queue + disabled?: boolean; + scrape: (input: EmbedScrapeContext) => Promise; +}; + +export function makeEmbed(state: Embed): Embed | null { + if (state.disabled) return null; + return state; +} diff --git a/src/providers/streams.ts b/src/providers/streams.ts index 6078628..3c46567 100644 --- a/src/providers/streams.ts +++ b/src/providers/streams.ts @@ -7,7 +7,7 @@ export type Qualities = '360' | '480' | '720' | '1080'; export type FileBasedStream = { type: 'file'; - qualities: Record; + qualities: Partial>; }; export type HlsBasedStream = { diff --git a/src/utils/context.ts b/src/utils/context.ts index 3e42ffa..5d89f96 100644 --- a/src/utils/context.ts +++ b/src/utils/context.ts @@ -1,7 +1,13 @@ import { UseableFetcher } from '@/fetchers/types'; -export interface ScrapeContext { +export type ScrapeContext = { proxiedFetcher: UseableFetcher; fetcher: UseableFetcher; progress(val: number): void; -} +}; + +export type EmbedInput = { + url: string; +}; + +export type EmbedScrapeContext = EmbedInput & ScrapeContext; diff --git a/src/utils/errors.ts b/src/utils/errors.ts new file mode 100644 index 0000000..d31f7d8 --- /dev/null +++ b/src/utils/errors.ts @@ -0,0 +1,6 @@ +export class NotFoundError extends Error { + constructor(reason?: string) { + super(`Couldn't found a stream: ${reason ?? 'not found'}`); + this.name = 'NotFoundError'; + } +}