mirror of
https://github.com/movie-web/providers.git
synced 2025-09-13 18:13:25 +00:00
Merge branch 'dev'
This commit is contained in:
3
.docs/.gitignore
vendored
Normal file
3
.docs/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
node_modules
|
||||||
|
.vitepress/cache
|
||||||
|
.vitepress/dist
|
28
.docs/.vitepress/config.mts
Normal file
28
.docs/.vitepress/config.mts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import { defineConfig } from 'vitepress'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
title: "MW provider docs",
|
||||||
|
description: "Documentation for @movie-web/providers",
|
||||||
|
srcDir: "src",
|
||||||
|
themeConfig: {
|
||||||
|
nav: [
|
||||||
|
{ text: 'Home', link: '/' },
|
||||||
|
{ text: 'Get Started', link: '/get-started/start' },
|
||||||
|
{ text: 'Reference', link: '/reference/start' }
|
||||||
|
],
|
||||||
|
|
||||||
|
sidebar: [
|
||||||
|
{
|
||||||
|
text: 'Examples',
|
||||||
|
items: [
|
||||||
|
{ text: 'Markdown Examples', link: '/markdown-examples' },
|
||||||
|
{ text: 'Runtime API Examples', link: '/api-examples' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
socialLinks: [
|
||||||
|
{ icon: 'github', link: 'https://github.com/movie-web/providers' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
1252
.docs/package-lock.json
generated
Normal file
1252
.docs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
9
.docs/package.json
Normal file
9
.docs/package.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vitepress dev .",
|
||||||
|
"build": "vitepress build ."
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"vitepress": "^1.0.0-rc.10"
|
||||||
|
}
|
||||||
|
}
|
49
.docs/src/get-started/start.md
Normal file
49
.docs/src/get-started/start.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
outline: deep
|
||||||
|
---
|
||||||
|
|
||||||
|
# Runtime API Examples
|
||||||
|
|
||||||
|
This page demonstrates usage of some of the runtime APIs provided by VitePress.
|
||||||
|
|
||||||
|
The main `useData()` API can be used to access site, theme, and page data for the current page. It works in both `.md` and `.vue` files:
|
||||||
|
|
||||||
|
```md
|
||||||
|
<script setup>
|
||||||
|
import { useData } from 'vitepress'
|
||||||
|
|
||||||
|
const { theme, page, frontmatter } = useData()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
## Results
|
||||||
|
|
||||||
|
### Theme Data
|
||||||
|
<pre>{{ theme }}</pre>
|
||||||
|
|
||||||
|
### Page Data
|
||||||
|
<pre>{{ page }}</pre>
|
||||||
|
|
||||||
|
### Page Frontmatter
|
||||||
|
<pre>{{ frontmatter }}</pre>
|
||||||
|
```
|
||||||
|
|
||||||
|
<script setup>
|
||||||
|
import { useData } from 'vitepress'
|
||||||
|
|
||||||
|
const { site, theme, page, frontmatter } = useData()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
## Results
|
||||||
|
|
||||||
|
### Theme Data
|
||||||
|
<pre>{{ theme }}</pre>
|
||||||
|
|
||||||
|
### Page Data
|
||||||
|
<pre>{{ page }}</pre>
|
||||||
|
|
||||||
|
### Page Frontmatter
|
||||||
|
<pre>{{ frontmatter }}</pre>
|
||||||
|
|
||||||
|
## More
|
||||||
|
|
||||||
|
Check out the documentation for the [full list of runtime APIs](https://vitepress.dev/reference/runtime-api#usedata).
|
24
.docs/src/index.md
Normal file
24
.docs/src/index.md
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
layout: home
|
||||||
|
|
||||||
|
hero:
|
||||||
|
name: "@movie-web/providers"
|
||||||
|
tagline: Providers for all kinds of media
|
||||||
|
actions:
|
||||||
|
- theme: brand
|
||||||
|
text: Get Started
|
||||||
|
link: /get-started/start
|
||||||
|
- theme: alt
|
||||||
|
text: reference
|
||||||
|
link: /reference/start
|
||||||
|
|
||||||
|
features:
|
||||||
|
- title: All the scraping!
|
||||||
|
icon: '!'
|
||||||
|
details: scrape popular streaming websites
|
||||||
|
- title: Client & server
|
||||||
|
icon: '!'
|
||||||
|
details: This library can be ran both server-side and client-side (with CORS proxy)
|
||||||
|
---
|
||||||
|
|
||||||
|
|
85
.docs/src/reference/start.md
Normal file
85
.docs/src/reference/start.md
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Markdown Extension Examples
|
||||||
|
|
||||||
|
This page demonstrates some of the built-in markdown extensions provided by VitePress.
|
||||||
|
|
||||||
|
## Syntax Highlighting
|
||||||
|
|
||||||
|
VitePress provides Syntax Highlighting powered by [Shiki](https://github.com/shikijs/shiki), with additional features like line-highlighting:
|
||||||
|
|
||||||
|
**Input**
|
||||||
|
|
||||||
|
````
|
||||||
|
```js{4}
|
||||||
|
export default {
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
msg: 'Highlighted!'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
````
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
```js{4}
|
||||||
|
export default {
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
msg: 'Highlighted!'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Custom Containers
|
||||||
|
|
||||||
|
**Input**
|
||||||
|
|
||||||
|
```md
|
||||||
|
::: info
|
||||||
|
This is an info box.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: tip
|
||||||
|
This is a tip.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: warning
|
||||||
|
This is a warning.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: danger
|
||||||
|
This is a dangerous warning.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: details
|
||||||
|
This is a details block.
|
||||||
|
:::
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
::: info
|
||||||
|
This is an info box.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: tip
|
||||||
|
This is a tip.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: warning
|
||||||
|
This is a warning.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: danger
|
||||||
|
This is a dangerous warning.
|
||||||
|
:::
|
||||||
|
|
||||||
|
::: details
|
||||||
|
This is a details block.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## More
|
||||||
|
|
||||||
|
Check out the documentation for the [full list of markdown extensions](https://vitepress.dev/guide/markdown).
|
3
.github/CODEOWNERS
vendored
Normal file
3
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
* @movie-web/core
|
||||||
|
|
||||||
|
.github @binaryoverload
|
1
.github/CODE_OF_CONDUCT.md
vendored
Normal file
1
.github/CODE_OF_CONDUCT.md
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Please visit the [main document at primary repository](https://github.com/movie-web/movie-web/blob/dev/.github/CODE_OF_CONDUCT.md).
|
1
.github/CONTRIBUTING.md
vendored
Normal file
1
.github/CONTRIBUTING.md
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Please visit the [main document at primary repository](https://github.com/movie-web/movie-web/blob/dev/.github/CONTRIBUTING.md).
|
14
.github/SECURITY.md
vendored
Normal file
14
.github/SECURITY.md
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
The movie-web maintainers only support the latest version of movie-web published at https://movie-web.app.
|
||||||
|
This published version is equivalent to the master branch.
|
||||||
|
|
||||||
|
Support is not provided for any forks or mirrors of movie-web.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
There are two ways you can contact the movie-web maintainers to report a vulnerability:
|
||||||
|
- Email [security@movie-web.app](mailto:security@movie-web.app)
|
||||||
|
- Report the vulnerability in the [movie-web Discord server](https://discord.movie-web.app)
|
6
.github/pull_request_template.md
vendored
Normal file
6
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
This pull request resolves #XXX
|
||||||
|
|
||||||
|
- [ ] I have read and agreed to the [code of conduct](https://github.com/movie-web/movie-web/blob/dev/.github/CODE_OF_CONDUCT.md).
|
||||||
|
- [ ] I have read and complied with the [contributing guidelines](https://github.com/movie-web/movie-web/blob/dev/.github/CONTRIBUTING.md).
|
||||||
|
- [ ] What I'm implementing was assigned to me and is an [approved issue](https://github.com/movie-web/movie-web/issues?q=is%3Aopen+is%3Aissue+label%3Aapproved). For reference, please take a look at our [GitHub projects](https://github.com/movie-web/movie-web/projects).
|
||||||
|
- [ ] I have tested all of my changes.
|
43
.github/workflows/docs.yml
vendored
Normal file
43
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Publish docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
|
||||||
|
- name: Install packages
|
||||||
|
run: cd .docs && npm ci
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cd .docs && npm run build
|
||||||
|
|
||||||
|
- name: Upload
|
||||||
|
uses: actions/upload-pages-artifact@v2
|
||||||
|
with:
|
||||||
|
path: ./.docs/.vitepress/dist
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
needs: build
|
||||||
|
permissions:
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
|
environment:
|
||||||
|
name: docs
|
||||||
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
id: deployment
|
||||||
|
uses: actions/deploy-pages@v2
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
/lib
|
/lib
|
||||||
|
coverage
|
||||||
|
25
README.md
25
README.md
@@ -5,6 +5,27 @@ Feel free to use for your own projects.
|
|||||||
|
|
||||||
features:
|
features:
|
||||||
- scrape popular streaming websites
|
- scrape popular streaming websites
|
||||||
- works in both browser and NodeJS server
|
- works in both browser and server-side
|
||||||
|
|
||||||
> This package is still WIP
|
> **This package is still WIP**
|
||||||
|
|
||||||
|
Todos:
|
||||||
|
- add tests
|
||||||
|
- ProviderControls.runAll()
|
||||||
|
- are events called?
|
||||||
|
- custom source or embed order
|
||||||
|
- are fetchers called?
|
||||||
|
- is proxiedFetcher properly defaulted back to normal fetcher?
|
||||||
|
- makeStandardFetcher()
|
||||||
|
- do all parameters get passed to real fetch as expected?
|
||||||
|
- does serialisation work as expected? (formdata + json + string)
|
||||||
|
- does json responses get automatically parsed?
|
||||||
|
- running individual scrapers
|
||||||
|
- add all real providers
|
||||||
|
|
||||||
|
Future todos:
|
||||||
|
- docs: examples for nodejs + browser
|
||||||
|
- docs: how to use + usecases
|
||||||
|
- docs: examples for custom fetcher
|
||||||
|
- choose an output environment (for browser or for native)
|
||||||
|
- flixhq show support
|
||||||
|
0
examples/.gitkeep
Normal file
0
examples/.gitkeep
Normal file
892
package-lock.json
generated
892
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@movie-web/providers",
|
"name": "@movie-web/providers",
|
||||||
"version": "0.0.2",
|
"version": "0.0.3",
|
||||||
"description": "Package that contains all the providers of movie-web",
|
"description": "Package that contains all the providers of movie-web",
|
||||||
"main": "./lib/providers.umd.js",
|
"main": "./lib/providers.umd.js",
|
||||||
"types": "./lib/providers.d.ts",
|
"types": "./lib/providers.d.ts",
|
||||||
@@ -31,6 +31,7 @@
|
|||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
"lint": "eslint --ext .ts,.js src/",
|
"lint": "eslint --ext .ts,.js src/",
|
||||||
"lint:fix": "eslint --fix --ext .ts,.js src/",
|
"lint:fix": "eslint --fix --ext .ts,.js src/",
|
||||||
"lint:report": "eslint --ext .ts,.js --output-file eslint_report.json --format json src/",
|
"lint:report": "eslint --ext .ts,.js --output-file eslint_report.json --format json src/",
|
||||||
@@ -38,8 +39,10 @@
|
|||||||
"prepublishOnly": "npm test && npm run lint"
|
"prepublishOnly": "npm test && npm run lint"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/crypto-js": "^4.1.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.60.0",
|
"@typescript-eslint/eslint-plugin": "^5.60.0",
|
||||||
"@typescript-eslint/parser": "^5.60.0",
|
"@typescript-eslint/parser": "^5.60.0",
|
||||||
|
"@vitest/coverage-v8": "^0.34.3",
|
||||||
"eslint": "^8.30.0",
|
"eslint": "^8.30.0",
|
||||||
"eslint-config-airbnb-base": "^15.0.0",
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
"eslint-config-prettier": "^8.5.0",
|
"eslint-config-prettier": "^8.5.0",
|
||||||
@@ -48,12 +51,16 @@
|
|||||||
"eslint-plugin-prettier": "^4.2.1",
|
"eslint-plugin-prettier": "^4.2.1",
|
||||||
"prettier": "^2.6.2",
|
"prettier": "^2.6.2",
|
||||||
"tsc-alias": "^1.6.7",
|
"tsc-alias": "^1.6.7",
|
||||||
"tslint": "^6.1.3",
|
|
||||||
"tslint-config-prettier": "^1.18.0",
|
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^4.6.3",
|
||||||
"vite": "^4.0.0",
|
"vite": "^4.0.0",
|
||||||
"vite-plugin-dts": "^2.3.0",
|
"vite-plugin-dts": "^2.3.0",
|
||||||
"vite-plugin-eslint": "^1.8.1",
|
"vite-plugin-eslint": "^1.8.1",
|
||||||
"vitest": "^0.32.2"
|
"vitest": "^0.32.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"cheerio": "^1.0.0-rc.12",
|
||||||
|
"crypto-js": "^4.1.1",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"node-fetch": "^3.3.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
48
src/__test__/fetchers/common.test.ts
Normal file
48
src/__test__/fetchers/common.test.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { makeFullUrl } from "@/fetchers/common";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
describe("makeFullUrl()", () => {
|
||||||
|
it('should pass normal url if no options', () => {
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world')).toEqual("https://example.com/hello/world")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?a=b')).toEqual("https://example.com/hello/world?a=b")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?a=b#hello')).toEqual("https://example.com/hello/world?a=b#hello")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world#hello')).toEqual("https://example.com/hello/world#hello")
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should append baseurl correctly', () => {
|
||||||
|
const correctResult = "https://example.com/hello/world";
|
||||||
|
expect(makeFullUrl(correctResult, { baseUrl: '' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult)
|
||||||
|
expect(makeFullUrl('world?a=b', { baseUrl: 'https://example.com/hello' })).toEqual("https://example.com/hello/world?a=b")
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw with invalid baseurl combinations', () => {
|
||||||
|
expect(() => makeFullUrl('example.com/hello/world', { baseUrl: '' })).toThrowError()
|
||||||
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'example.com' })).toThrowError()
|
||||||
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError()
|
||||||
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should add/merge query parameters', () => {
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world', { query: { a: 'b' } })).toEqual("https://example.com/hello/world?a=b")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world/', { query: { a: 'b' } })).toEqual("https://example.com/hello/world/?a=b")
|
||||||
|
expect(makeFullUrl('https://example.com', { query: { a: 'b' } })).toEqual("https://example.com/?a=b")
|
||||||
|
expect(makeFullUrl('https://example.com/', { query: { a: 'b' } })).toEqual("https://example.com/?a=b")
|
||||||
|
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: { a: 'b' } })).toEqual("https://example.com/hello/world?c=d&a=b")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: {} })).toEqual("https://example.com/hello/world?c=d")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d')).toEqual("https://example.com/hello/world?c=d")
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', {})).toEqual("https://example.com/hello/world?c=d")
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should work with a mix of multiple options', () => {
|
||||||
|
expect(makeFullUrl('/hello/world?c=d', { baseUrl: 'https://example.com/', query: { a: 'b' } })).toEqual("https://example.com/hello/world?c=d&a=b")
|
||||||
|
})
|
||||||
|
})
|
@@ -1,8 +0,0 @@
|
|||||||
import { describe, expect, it } from 'vitest';
|
|
||||||
import { LOG } from '@/testing/oof';
|
|
||||||
|
|
||||||
describe('oof.ts', () => {
|
|
||||||
it('should contain hello', () => {
|
|
||||||
expect(LOG).toContain('hello');
|
|
||||||
});
|
|
||||||
});
|
|
122
src/__test__/providerTests.ts
Normal file
122
src/__test__/providerTests.ts
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||||
|
import { vi } from 'vitest';
|
||||||
|
|
||||||
|
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
|
||||||
|
export function makeProviderMocks() {
|
||||||
|
const embedsMock = vi.fn<Parameters<typeof gatherAllEmbeds>, ReturnType<typeof gatherAllEmbeds>>();
|
||||||
|
const sourcesMock = vi.fn<Parameters<typeof gatherAllSources>, ReturnType<typeof gatherAllSources>>();
|
||||||
|
return {
|
||||||
|
gatherAllEmbeds: embedsMock,
|
||||||
|
gatherAllSources: sourcesMock,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const sourceA = {
|
||||||
|
id: 'a',
|
||||||
|
rank: 1,
|
||||||
|
disabled: false,
|
||||||
|
} as Sourcerer;
|
||||||
|
const sourceB = {
|
||||||
|
id: 'b',
|
||||||
|
rank: 2,
|
||||||
|
disabled: false,
|
||||||
|
} as Sourcerer;
|
||||||
|
const sourceCDisabled = {
|
||||||
|
id: 'c',
|
||||||
|
rank: 3,
|
||||||
|
disabled: true,
|
||||||
|
} as Sourcerer;
|
||||||
|
const sourceAHigherRank = {
|
||||||
|
id: 'a',
|
||||||
|
rank: 100,
|
||||||
|
disabled: false,
|
||||||
|
} as Sourcerer;
|
||||||
|
const sourceGSameRankAsA = {
|
||||||
|
id: 'g',
|
||||||
|
rank: 1,
|
||||||
|
disabled: false,
|
||||||
|
} as Sourcerer;
|
||||||
|
const fullSourceYMovie = {
|
||||||
|
id: 'y',
|
||||||
|
name: 'Y',
|
||||||
|
rank: 105,
|
||||||
|
scrapeMovie: vi.fn(),
|
||||||
|
} as Sourcerer;
|
||||||
|
const fullSourceYShow = {
|
||||||
|
id: 'y',
|
||||||
|
name: 'Y',
|
||||||
|
rank: 105,
|
||||||
|
scrapeShow: vi.fn(),
|
||||||
|
} as Sourcerer;
|
||||||
|
const fullSourceZBoth = {
|
||||||
|
id: 'z',
|
||||||
|
name: 'Z',
|
||||||
|
rank: 106,
|
||||||
|
scrapeMovie: vi.fn(),
|
||||||
|
scrapeShow: vi.fn(),
|
||||||
|
} as Sourcerer;
|
||||||
|
|
||||||
|
const embedD = {
|
||||||
|
id: 'd',
|
||||||
|
rank: 4,
|
||||||
|
disabled: false,
|
||||||
|
} as Embed;
|
||||||
|
const embedA = {
|
||||||
|
id: 'a',
|
||||||
|
rank: 5,
|
||||||
|
disabled: false,
|
||||||
|
} as Embed;
|
||||||
|
const embedEDisabled = {
|
||||||
|
id: 'e',
|
||||||
|
rank: 6,
|
||||||
|
disabled: true,
|
||||||
|
} as Embed;
|
||||||
|
const embedDHigherRank = {
|
||||||
|
id: 'd',
|
||||||
|
rank: 4000,
|
||||||
|
disabled: false,
|
||||||
|
} as Embed;
|
||||||
|
const embedFSameRankAsA = {
|
||||||
|
id: 'f',
|
||||||
|
rank: 5,
|
||||||
|
disabled: false,
|
||||||
|
} as Embed;
|
||||||
|
const embedHSameRankAsSourceA = {
|
||||||
|
id: 'h',
|
||||||
|
rank: 1,
|
||||||
|
disabled: false,
|
||||||
|
} as Embed;
|
||||||
|
const fullEmbedX = {
|
||||||
|
id: 'x',
|
||||||
|
name: 'X',
|
||||||
|
rank: 104,
|
||||||
|
} as Embed;
|
||||||
|
const fullEmbedZ = {
|
||||||
|
id: 'z',
|
||||||
|
name: 'Z',
|
||||||
|
rank: 109,
|
||||||
|
} as Embed;
|
||||||
|
|
||||||
|
export const mockSources = {
|
||||||
|
sourceA,
|
||||||
|
sourceB,
|
||||||
|
sourceCDisabled,
|
||||||
|
sourceAHigherRank,
|
||||||
|
sourceGSameRankAsA,
|
||||||
|
fullSourceYMovie,
|
||||||
|
fullSourceYShow,
|
||||||
|
fullSourceZBoth,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const mockEmbeds = {
|
||||||
|
embedA,
|
||||||
|
embedD,
|
||||||
|
embedDHigherRank,
|
||||||
|
embedEDisabled,
|
||||||
|
embedFSameRankAsA,
|
||||||
|
embedHSameRankAsSourceA,
|
||||||
|
fullEmbedX,
|
||||||
|
fullEmbedZ,
|
||||||
|
};
|
63
src/__test__/providers/checks.test.ts
Normal file
63
src/__test__/providers/checks.test.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
||||||
|
import { getProviders } from '@/providers/get';
|
||||||
|
import { vi, describe, it, expect, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||||
|
vi.mock('@/providers/all', () => mocks);
|
||||||
|
|
||||||
|
describe('getProviders()', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return providers', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
|
expect(getProviders()).toEqual({
|
||||||
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
|
embeds: [mockEmbeds.embedD],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out disabled providers', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedEDisabled]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceCDisabled, mockSources.sourceB]);
|
||||||
|
expect(getProviders()).toEqual({
|
||||||
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
|
embeds: [mockEmbeds.embedD],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on duplicate ids in sources', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceAHigherRank, mockSources.sourceA, mockSources.sourceB]);
|
||||||
|
expect(() => getProviders()).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on duplicate ids in embeds', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedDHigherRank, mockEmbeds.embedA]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
expect(() => getProviders()).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on duplicate ids between sources and embeds', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
|
expect(() => getProviders()).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw on duplicate rank between sources and embeds', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
|
expect(() => getProviders()).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw with same rank between sources and embeds', () => {
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA]);
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
|
expect(getProviders()).toEqual({
|
||||||
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
|
embeds: [mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
121
src/__test__/runner/list.test.ts
Normal file
121
src/__test__/runner/list.test.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
||||||
|
import { makeProviders } from '@/main/builder';
|
||||||
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||||
|
vi.mock('@/providers/all', () => mocks);
|
||||||
|
|
||||||
|
describe('ProviderControls.listSources()', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the source with movie type', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceYMovie]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.listSources()).toEqual([
|
||||||
|
{
|
||||||
|
type: 'source',
|
||||||
|
id: 'y',
|
||||||
|
rank: mockSources.fullSourceYMovie.rank,
|
||||||
|
name: 'Y',
|
||||||
|
mediaTypes: ['movie'],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the source with show type', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceYShow]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.listSources()).toEqual([
|
||||||
|
{
|
||||||
|
type: 'source',
|
||||||
|
id: 'y',
|
||||||
|
rank: mockSources.fullSourceYShow.rank,
|
||||||
|
name: 'Y',
|
||||||
|
mediaTypes: ['show'],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the source with both types', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceZBoth]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.listSources()).toEqual([
|
||||||
|
{
|
||||||
|
type: 'source',
|
||||||
|
id: 'z',
|
||||||
|
rank: mockSources.fullSourceZBoth.rank,
|
||||||
|
name: 'Z',
|
||||||
|
mediaTypes: ['movie', 'show'],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the sources in correct order', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceYMovie, mockSources.fullSourceZBoth]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p1 = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
const l1 = p1.listSources();
|
||||||
|
expect(l1.map((v) => v.id).join(',')).toEqual('z,y');
|
||||||
|
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceZBoth, mockSources.fullSourceYMovie]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p2 = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
const l2 = p2.listSources();
|
||||||
|
expect(l2.map((v) => v.id).join(',')).toEqual('z,y');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ProviderControls.getAllEmbedMetaSorted()', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the correct embed format', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.listEmbeds()).toEqual([
|
||||||
|
{
|
||||||
|
type: 'embed',
|
||||||
|
id: 'x',
|
||||||
|
rank: mockEmbeds.fullEmbedX.rank,
|
||||||
|
name: 'X',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the embeds in correct order', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX, mockEmbeds.fullEmbedZ]);
|
||||||
|
const p1 = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
const l1 = p1.listEmbeds();
|
||||||
|
expect(l1.map((v) => v.id).join(',')).toEqual('z,x');
|
||||||
|
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedZ, mockEmbeds.fullEmbedX]);
|
||||||
|
const p2 = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
const l2 = p2.listEmbeds();
|
||||||
|
expect(l2.map((v) => v.id).join(',')).toEqual('z,x');
|
||||||
|
});
|
||||||
|
});
|
50
src/__test__/runner/meta.test.ts
Normal file
50
src/__test__/runner/meta.test.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
||||||
|
import { makeProviders } from '@/main/builder';
|
||||||
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
||||||
|
vi.mock('@/providers/all', () => mocks);
|
||||||
|
|
||||||
|
describe('ProviderControls.getMetadata()', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null if not found', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.getMetadata(':)')).toEqual(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return correct source meta', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([mockSources.fullSourceZBoth]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.getMetadata(mockSources.fullSourceZBoth.id)).toEqual({
|
||||||
|
type: 'source',
|
||||||
|
id: 'z',
|
||||||
|
name: 'Z',
|
||||||
|
rank: mockSources.fullSourceZBoth.rank,
|
||||||
|
mediaTypes: ['movie', 'show'],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return correct embed meta', () => {
|
||||||
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.fullEmbedX]);
|
||||||
|
const p = makeProviders({
|
||||||
|
fetcher: null as any,
|
||||||
|
});
|
||||||
|
expect(p.getMetadata(mockEmbeds.fullEmbedX.id)).toEqual({
|
||||||
|
type: 'embed',
|
||||||
|
id: 'x',
|
||||||
|
name: 'X',
|
||||||
|
rank: mockEmbeds.fullEmbedX.rank,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
21
src/__test__/tsconfig.json
Normal file
21
src/__test__/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": ["ES2022"],
|
||||||
|
"module": "ES2022",
|
||||||
|
"declaration": true,
|
||||||
|
"outDir": "./lib",
|
||||||
|
"strict": true,
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"noEmit": true,
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"isolatedModules": false,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["../*"],
|
||||||
|
"@entrypoint": ["../index.ts"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"include": ["./"]
|
||||||
|
}
|
54
src/__test__/utils/list.test.ts
Normal file
54
src/__test__/utils/list.test.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import { reorderOnIdList } from "@/utils/list";
|
||||||
|
import { describe, it, expect } from "vitest";
|
||||||
|
|
||||||
|
function list(def: string) {
|
||||||
|
return def.split(",").map(v=>({
|
||||||
|
rank: parseInt(v),
|
||||||
|
id: v,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
function expectListToEqual(l1: ReturnType<typeof list>, l2: ReturnType<typeof list>) {
|
||||||
|
function flatten(l: ReturnType<typeof list>) {
|
||||||
|
return l.map(v=>v.id).join(",");
|
||||||
|
}
|
||||||
|
expect(flatten(l1)).toEqual(flatten(l2));
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('reorderOnIdList()', () => {
|
||||||
|
it('should reorder based on rank', () => {
|
||||||
|
const l = list('2,1,4,3');
|
||||||
|
const sortedList = list('4,3,2,1')
|
||||||
|
expectListToEqual(reorderOnIdList([], l), sortedList);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work with empty input', () => {
|
||||||
|
expectListToEqual(reorderOnIdList([], []), []);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reorder based on id list', () => {
|
||||||
|
const l = list('4,2,1,3');
|
||||||
|
const sortedList = list('4,3,2,1')
|
||||||
|
expectListToEqual(reorderOnIdList(["4","3","2","1"], l), sortedList);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reorder based on id list and rank second', () => {
|
||||||
|
const l = list('4,2,1,3');
|
||||||
|
const sortedList = list('4,3,2,1')
|
||||||
|
expectListToEqual(reorderOnIdList(["4","3"], l), sortedList);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work with only one item', () => {
|
||||||
|
const l = list('1');
|
||||||
|
const sortedList = list('1')
|
||||||
|
expectListToEqual(reorderOnIdList(["1"], l), sortedList);
|
||||||
|
expectListToEqual(reorderOnIdList([], l), sortedList);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not affect original list', () => {
|
||||||
|
const l = list('4,3,2,1');
|
||||||
|
const unsortedList = list('4,3,2,1')
|
||||||
|
reorderOnIdList([], l);
|
||||||
|
expectListToEqual(l, unsortedList);
|
||||||
|
});
|
||||||
|
});
|
24
src/fetchers/body.ts
Normal file
24
src/fetchers/body.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import FormData = require('form-data');
|
||||||
|
|
||||||
|
import { FetcherOptions } from '@/fetchers/types';
|
||||||
|
|
||||||
|
export interface SeralizedBody {
|
||||||
|
headers: Record<string, string>;
|
||||||
|
body: FormData | URLSearchParams | string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serializeBody(body: FetcherOptions['body']): SeralizedBody {
|
||||||
|
if (body === undefined || typeof body === 'string' || body instanceof URLSearchParams || body instanceof FormData)
|
||||||
|
return {
|
||||||
|
headers: {},
|
||||||
|
body,
|
||||||
|
};
|
||||||
|
|
||||||
|
// serialize as JSON
|
||||||
|
return {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
};
|
||||||
|
}
|
39
src/fetchers/common.ts
Normal file
39
src/fetchers/common.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { Fetcher, FetcherOptions, UseableFetcher } from '@/fetchers/types';
|
||||||
|
|
||||||
|
export type FullUrlOptions = Pick<FetcherOptions, 'query' | 'baseUrl'>;
|
||||||
|
|
||||||
|
// make url with query params and base url used correctly
|
||||||
|
export function makeFullUrl(url: string, ops?: FullUrlOptions): string {
|
||||||
|
// glue baseUrl and rest of url together
|
||||||
|
let leftSide = ops?.baseUrl ?? '';
|
||||||
|
let rightSide = url;
|
||||||
|
|
||||||
|
// left side should always end with slash, if its set
|
||||||
|
if (leftSide.length > 0 && !leftSide.endsWith('/')) leftSide += '/';
|
||||||
|
|
||||||
|
// right side should never start with slash
|
||||||
|
if (rightSide.startsWith('/')) rightSide = rightSide.slice(1);
|
||||||
|
|
||||||
|
const fullUrl = leftSide + rightSide;
|
||||||
|
if (!fullUrl.startsWith('http://') && !fullUrl.startsWith('https://'))
|
||||||
|
throw new Error(`Invald URL -- URL doesn't start with a http scheme: '${fullUrl}'`);
|
||||||
|
|
||||||
|
const parsedUrl = new URL(fullUrl);
|
||||||
|
Object.entries(ops?.query ?? {}).forEach(([k, v]) => {
|
||||||
|
parsedUrl.searchParams.set(k, v);
|
||||||
|
});
|
||||||
|
|
||||||
|
return parsedUrl.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeFullFetcher(fetcher: Fetcher): UseableFetcher {
|
||||||
|
return (url, ops) => {
|
||||||
|
return fetcher(url, {
|
||||||
|
headers: ops?.headers ?? {},
|
||||||
|
method: ops?.method ?? 'GET',
|
||||||
|
query: ops?.query ?? {},
|
||||||
|
baseUrl: ops?.baseUrl ?? '',
|
||||||
|
body: ops?.body,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
27
src/fetchers/standardFetch.ts
Normal file
27
src/fetchers/standardFetch.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
import { serializeBody } from '@/fetchers/body';
|
||||||
|
import { makeFullUrl } from '@/fetchers/common';
|
||||||
|
import { Fetcher } from '@/fetchers/types';
|
||||||
|
|
||||||
|
export function makeStandardFetcher(f: typeof fetch): Fetcher {
|
||||||
|
const normalFetch: Fetcher = async (url, ops) => {
|
||||||
|
const fullUrl = makeFullUrl(url, ops);
|
||||||
|
const seralizedBody = serializeBody(ops.body);
|
||||||
|
|
||||||
|
const res = await f(fullUrl, {
|
||||||
|
method: ops.method,
|
||||||
|
headers: {
|
||||||
|
...seralizedBody.headers,
|
||||||
|
...ops.headers,
|
||||||
|
},
|
||||||
|
body: seralizedBody.body,
|
||||||
|
});
|
||||||
|
|
||||||
|
const isJson = res.headers.get('content-type')?.includes('application/json');
|
||||||
|
if (isJson) return res.json();
|
||||||
|
return res.text();
|
||||||
|
};
|
||||||
|
|
||||||
|
return normalFetch;
|
||||||
|
}
|
26
src/fetchers/types.ts
Normal file
26
src/fetchers/types.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import * as FormData from 'form-data';
|
||||||
|
|
||||||
|
export type FetcherOptions = {
|
||||||
|
baseUrl?: string;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
query?: Record<string, string>;
|
||||||
|
method?: 'GET' | 'POST';
|
||||||
|
body?: Record<string, any> | string | FormData | URLSearchParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DefaultedFetcherOptions = {
|
||||||
|
baseUrl?: string;
|
||||||
|
body?: Record<string, any> | string | FormData;
|
||||||
|
headers: Record<string, string>;
|
||||||
|
query: Record<string, string>;
|
||||||
|
method: 'GET' | 'POST';
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Fetcher<T = any> = {
|
||||||
|
(url: string, ops: DefaultedFetcherOptions): Promise<T>;
|
||||||
|
};
|
||||||
|
|
||||||
|
// this feature has some quality of life features
|
||||||
|
export type UseableFetcher<T = any> = {
|
||||||
|
(url: string, ops?: FetcherOptions): Promise<T>;
|
||||||
|
};
|
12
src/index.ts
12
src/index.ts
@@ -1,5 +1,9 @@
|
|||||||
import { LOG } from '@/testing/oof';
|
export type { RunOutput } from '@/main/runner';
|
||||||
|
export type { MetaOutput } from '@/main/meta';
|
||||||
|
export type { FullScraperEvents } from '@/main/events';
|
||||||
|
export type { MediaTypes, ShowMedia, ScrapeMedia, MovieMedia } from '@/main/media';
|
||||||
|
export type { ProviderBuilderOptions, ProviderControls, RunnerOptions } from '@/main/builder';
|
||||||
|
|
||||||
export function test() {
|
export { NotFoundError } from '@/utils/errors';
|
||||||
console.log(LOG);
|
export { makeProviders } from '@/main/builder';
|
||||||
}
|
export { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||||
|
73
src/main/builder.ts
Normal file
73
src/main/builder.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { makeFullFetcher } from '@/fetchers/common';
|
||||||
|
import { Fetcher } from '@/fetchers/types';
|
||||||
|
import { FullScraperEvents } from '@/main/events';
|
||||||
|
import { ScrapeMedia } from '@/main/media';
|
||||||
|
import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/main/meta';
|
||||||
|
import { RunOutput, runAllProviders } from '@/main/runner';
|
||||||
|
import { getProviders } from '@/providers/get';
|
||||||
|
|
||||||
|
export interface ProviderBuilderOptions {
|
||||||
|
// fetcher, every web request gets called through here
|
||||||
|
fetcher: Fetcher;
|
||||||
|
|
||||||
|
// proxied fetcher, if the scraper needs to access a CORS proxy. this fetcher will be called instead
|
||||||
|
// of the normal fetcher. Defaults to the normal fetcher.
|
||||||
|
proxiedFetcher?: Fetcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunnerOptions {
|
||||||
|
// overwrite the order of sources to run. list of ids
|
||||||
|
// any omitted ids are in added to the end in order of rank (highest first)
|
||||||
|
sourceOrder?: string[];
|
||||||
|
|
||||||
|
// overwrite the order of embeds to run. list of ids
|
||||||
|
// any omitted ids are in added to the end in order of rank (highest first)
|
||||||
|
embedOrder?: string[];
|
||||||
|
|
||||||
|
// object of event functions
|
||||||
|
events?: FullScraperEvents;
|
||||||
|
|
||||||
|
// the media you want to see sources from
|
||||||
|
media: ScrapeMedia;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProviderControls {
|
||||||
|
// Run all providers one by one. in order of rank (highest first)
|
||||||
|
// returns the stream, or null if none found
|
||||||
|
runAll(runnerOps: RunnerOptions): Promise<RunOutput | null>;
|
||||||
|
|
||||||
|
// get meta data about a source or embed.
|
||||||
|
getMetadata(id: string): MetaOutput | null;
|
||||||
|
|
||||||
|
// return all sources. sorted by rank (highest first)
|
||||||
|
listSources(): MetaOutput[];
|
||||||
|
|
||||||
|
// return all embed scrapers. sorted by rank (highest first)
|
||||||
|
listEmbeds(): MetaOutput[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeProviders(ops: ProviderBuilderOptions): ProviderControls {
|
||||||
|
const list = getProviders();
|
||||||
|
const providerRunnerOps = {
|
||||||
|
fetcher: makeFullFetcher(ops.fetcher),
|
||||||
|
proxiedFetcher: makeFullFetcher(ops.proxiedFetcher ?? ops.fetcher),
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
runAll(runnerOps: RunnerOptions) {
|
||||||
|
return runAllProviders(list, {
|
||||||
|
...providerRunnerOps,
|
||||||
|
...runnerOps,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
getMetadata(id) {
|
||||||
|
return getSpecificId(list, id);
|
||||||
|
},
|
||||||
|
listSources() {
|
||||||
|
return getAllSourceMetaSorted(list);
|
||||||
|
},
|
||||||
|
listEmbeds() {
|
||||||
|
return getAllEmbedMetaSorted(list);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
48
src/main/events.ts
Normal file
48
src/main/events.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
export type UpdateEventStatus = 'success' | 'failure' | 'notfound' | 'pending';
|
||||||
|
|
||||||
|
export type UpdateEvent = {
|
||||||
|
percentage: number;
|
||||||
|
status: UpdateEventStatus;
|
||||||
|
error?: unknown; // set when status is failure
|
||||||
|
reason?: string; // set when status is not-found
|
||||||
|
};
|
||||||
|
|
||||||
|
export type InitEvent = {
|
||||||
|
sourceIds: string[]; // list of source ids
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DiscoverEmbedsEvent = {
|
||||||
|
sourceId: string;
|
||||||
|
|
||||||
|
// list of embeds that will be scraped in order
|
||||||
|
embeds: Array<{
|
||||||
|
id: string;
|
||||||
|
embedScraperId: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type StartScrapingEvent = {
|
||||||
|
sourceId: string;
|
||||||
|
|
||||||
|
// embed Id (not embedScraperId)
|
||||||
|
embedId?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type SingleScraperEvents = {
|
||||||
|
update?: (evt: UpdateEvent) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type FullScraperEvents = {
|
||||||
|
// update progress percentage and status of the currently scraping item
|
||||||
|
update?: (evt: UpdateEvent) => void;
|
||||||
|
|
||||||
|
// initial list of scrapers its running, only triggers once per run.
|
||||||
|
init?: (evt: InitEvent) => void;
|
||||||
|
|
||||||
|
// list of embeds are discovered for the currently running source scraper
|
||||||
|
// triggers once per source scraper
|
||||||
|
discoverEmbeds?: (evt: DiscoverEmbedsEvent) => void;
|
||||||
|
|
||||||
|
// start scraping an item.
|
||||||
|
start?: (id: string) => void;
|
||||||
|
};
|
26
src/main/media.ts
Normal file
26
src/main/media.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
export type CommonMedia = {
|
||||||
|
title: string;
|
||||||
|
releaseYear: number;
|
||||||
|
imbdId: string;
|
||||||
|
tmdbId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type MediaTypes = 'show' | 'movie';
|
||||||
|
|
||||||
|
export type ShowMedia = CommonMedia & {
|
||||||
|
type: 'show';
|
||||||
|
episode: {
|
||||||
|
number: number;
|
||||||
|
tmdbId: string;
|
||||||
|
};
|
||||||
|
season: {
|
||||||
|
number: number;
|
||||||
|
tmdbId: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export type MovieMedia = CommonMedia & {
|
||||||
|
type: 'movie';
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ScrapeMedia = ShowMedia | MovieMedia;
|
55
src/main/meta.ts
Normal file
55
src/main/meta.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { MediaTypes } from '@/main/media';
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
import { ProviderList } from '@/providers/get';
|
||||||
|
|
||||||
|
export type MetaOutput = {
|
||||||
|
type: 'embed' | 'source';
|
||||||
|
id: string;
|
||||||
|
rank: number;
|
||||||
|
name: string;
|
||||||
|
mediaTypes?: Array<MediaTypes>;
|
||||||
|
};
|
||||||
|
|
||||||
|
function formatSourceMeta(v: Sourcerer): MetaOutput {
|
||||||
|
const types: Array<MediaTypes> = [];
|
||||||
|
if (v.scrapeMovie) types.push('movie');
|
||||||
|
if (v.scrapeShow) types.push('show');
|
||||||
|
return {
|
||||||
|
type: 'source',
|
||||||
|
id: v.id,
|
||||||
|
rank: v.rank,
|
||||||
|
name: v.name,
|
||||||
|
mediaTypes: types,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatEmbedMeta(v: Embed): MetaOutput {
|
||||||
|
return {
|
||||||
|
type: 'embed',
|
||||||
|
id: v.id,
|
||||||
|
rank: v.rank,
|
||||||
|
name: v.name,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllSourceMetaSorted(list: ProviderList): MetaOutput[] {
|
||||||
|
return list.sources.sort((a, b) => b.rank - a.rank).map(formatSourceMeta);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllEmbedMetaSorted(list: ProviderList): MetaOutput[] {
|
||||||
|
return list.embeds.sort((a, b) => b.rank - a.rank).map(formatEmbedMeta);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getSpecificId(list: ProviderList, id: string): MetaOutput | null {
|
||||||
|
const foundSource = list.sources.find((v) => v.id === id);
|
||||||
|
if (foundSource) {
|
||||||
|
return formatSourceMeta(foundSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
const foundEmbed = list.embeds.find((v) => v.id === id);
|
||||||
|
if (foundEmbed) {
|
||||||
|
return formatEmbedMeta(foundEmbed);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
159
src/main/runner.ts
Normal file
159
src/main/runner.ts
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
import { UseableFetcher } from '@/fetchers/types';
|
||||||
|
import { FullScraperEvents } from '@/main/events';
|
||||||
|
import { ScrapeMedia } from '@/main/media';
|
||||||
|
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||||
|
import { ProviderList } from '@/providers/get';
|
||||||
|
import { Stream } from '@/providers/streams';
|
||||||
|
import { ScrapeContext } from '@/utils/context';
|
||||||
|
import { NotFoundError } from '@/utils/errors';
|
||||||
|
import { reorderOnIdList } from '@/utils/list';
|
||||||
|
|
||||||
|
export type RunOutput = {
|
||||||
|
sourceId: string;
|
||||||
|
embedId?: string;
|
||||||
|
stream: Stream;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type SourceRunOutput = {
|
||||||
|
sourceId: string;
|
||||||
|
stream?: Stream;
|
||||||
|
embeds: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type EmbedRunOutput = {
|
||||||
|
embedId: string;
|
||||||
|
stream?: Stream;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ProviderRunnerOptions = {
|
||||||
|
fetcher: UseableFetcher;
|
||||||
|
proxiedFetcher: UseableFetcher;
|
||||||
|
sourceOrder?: string[];
|
||||||
|
embedOrder?: string[];
|
||||||
|
events?: FullScraperEvents;
|
||||||
|
media: ScrapeMedia;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOptions): Promise<RunOutput | null> {
|
||||||
|
const sources = reorderOnIdList(ops.sourceOrder ?? [], list.sources).filter((v) => {
|
||||||
|
if (ops.media.type === 'movie') return !!v.scrapeMovie;
|
||||||
|
if (ops.media.type === 'show') return !!v.scrapeShow;
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
const embeds = reorderOnIdList(ops.embedOrder ?? [], list.embeds);
|
||||||
|
const embedIds = embeds.map((v) => v.id);
|
||||||
|
|
||||||
|
const contextBase: ScrapeContext = {
|
||||||
|
fetcher: ops.fetcher,
|
||||||
|
proxiedFetcher: ops.proxiedFetcher,
|
||||||
|
progress(val) {
|
||||||
|
ops.events?.update?.({
|
||||||
|
percentage: val,
|
||||||
|
status: 'pending',
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
ops.events?.init?.({
|
||||||
|
sourceIds: sources.map((v) => v.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const s of sources) {
|
||||||
|
ops.events?.start?.(s.id);
|
||||||
|
|
||||||
|
// run source scrapers
|
||||||
|
let output: SourcererOutput | null = null;
|
||||||
|
try {
|
||||||
|
if (ops.media.type === 'movie' && s.scrapeMovie)
|
||||||
|
output = await s.scrapeMovie({
|
||||||
|
...contextBase,
|
||||||
|
media: ops.media,
|
||||||
|
});
|
||||||
|
else if (ops.media.type === 'show' && s.scrapeShow)
|
||||||
|
output = await s.scrapeShow({
|
||||||
|
...contextBase,
|
||||||
|
media: ops.media,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof NotFoundError) {
|
||||||
|
ops.events?.update?.({
|
||||||
|
percentage: 100,
|
||||||
|
status: 'notfound',
|
||||||
|
reason: err.message,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ops.events?.update?.({
|
||||||
|
percentage: 100,
|
||||||
|
status: 'failure',
|
||||||
|
error: err,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!output) throw new Error('Invalid media type');
|
||||||
|
|
||||||
|
// return stream is there are any
|
||||||
|
if (output.stream) {
|
||||||
|
return {
|
||||||
|
sourceId: s.id,
|
||||||
|
stream: output.stream,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (output.embeds.length > 0) {
|
||||||
|
ops.events?.discoverEmbeds?.({
|
||||||
|
embeds: output.embeds.map((v, i) => ({
|
||||||
|
id: [s.id, i].join('-'),
|
||||||
|
embedScraperId: v.embedId,
|
||||||
|
})),
|
||||||
|
sourceId: s.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// run embed scrapers on listed embeds
|
||||||
|
const sortedEmbeds = output.embeds;
|
||||||
|
sortedEmbeds.sort((a, b) => embedIds.indexOf(a.embedId) - embedIds.indexOf(b.embedId));
|
||||||
|
|
||||||
|
for (const ind in sortedEmbeds) {
|
||||||
|
if (!Object.prototype.hasOwnProperty.call(sortedEmbeds, ind)) continue;
|
||||||
|
const e = sortedEmbeds[ind];
|
||||||
|
const scraper = embeds.find((v) => v.id === e.embedId);
|
||||||
|
if (!scraper) throw new Error('Invalid embed returned');
|
||||||
|
|
||||||
|
// run embed scraper
|
||||||
|
const id = [s.id, ind].join('-');
|
||||||
|
ops.events?.start?.(id);
|
||||||
|
let embedOutput: EmbedOutput;
|
||||||
|
try {
|
||||||
|
embedOutput = await scraper.scrape({
|
||||||
|
...contextBase,
|
||||||
|
url: e.url,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof NotFoundError) {
|
||||||
|
ops.events?.update?.({
|
||||||
|
percentage: 100,
|
||||||
|
status: 'notfound',
|
||||||
|
reason: err.message,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ops.events?.update?.({
|
||||||
|
percentage: 100,
|
||||||
|
status: 'failure',
|
||||||
|
error: err,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
sourceId: s.id,
|
||||||
|
embedId: scraper.id,
|
||||||
|
stream: embedOutput.stream,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no providers or embeds returns streams
|
||||||
|
return null;
|
||||||
|
}
|
13
src/providers/all.ts
Normal file
13
src/providers/all.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||||
|
import { flixhqScraper } from '@/providers/sources/flixhq/index';
|
||||||
|
|
||||||
|
export function gatherAllSources(): Array<Sourcerer> {
|
||||||
|
// all sources are gathered here
|
||||||
|
return [flixhqScraper];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function gatherAllEmbeds(): Array<Embed> {
|
||||||
|
// all embeds are gathered here
|
||||||
|
return [upcloudScraper];
|
||||||
|
}
|
40
src/providers/base.ts
Normal file
40
src/providers/base.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { MovieMedia, ShowMedia } from '@/main/media';
|
||||||
|
import { Stream } from '@/providers/streams';
|
||||||
|
import { EmbedScrapeContext, ScrapeContext } from '@/utils/context';
|
||||||
|
|
||||||
|
export type SourcererOutput = {
|
||||||
|
embeds: {
|
||||||
|
embedId: string;
|
||||||
|
url: string;
|
||||||
|
}[];
|
||||||
|
stream?: Stream;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Sourcerer = {
|
||||||
|
id: string;
|
||||||
|
name: string; // displayed in the UI
|
||||||
|
rank: number; // the higher the number, the earlier it gets put on the queue
|
||||||
|
disabled?: boolean;
|
||||||
|
scrapeMovie?: (input: ScrapeContext & { media: MovieMedia }) => Promise<SourcererOutput>;
|
||||||
|
scrapeShow?: (input: ScrapeContext & { media: ShowMedia }) => Promise<SourcererOutput>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function makeSourcerer(state: Sourcerer): Sourcerer {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type EmbedOutput = {
|
||||||
|
stream: Stream;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Embed = {
|
||||||
|
id: string;
|
||||||
|
name: string; // displayed in the UI
|
||||||
|
rank: number; // the higher the number, the earlier it gets put on the queue
|
||||||
|
disabled?: boolean;
|
||||||
|
scrape: (input: EmbedScrapeContext) => Promise<EmbedOutput>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function makeEmbed(state: Embed): Embed {
|
||||||
|
return state;
|
||||||
|
}
|
73
src/providers/embeds/upcloud.ts
Normal file
73
src/providers/embeds/upcloud.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { AES, enc } from 'crypto-js';
|
||||||
|
|
||||||
|
import { makeEmbed } from '@/providers/base';
|
||||||
|
|
||||||
|
interface StreamRes {
|
||||||
|
server: number;
|
||||||
|
sources: string;
|
||||||
|
tracks: {
|
||||||
|
file: string;
|
||||||
|
kind: 'captions' | 'thumbnails';
|
||||||
|
label: string;
|
||||||
|
}[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function isJSON(json: string) {
|
||||||
|
try {
|
||||||
|
JSON.parse(json);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const upcloudScraper = makeEmbed({
|
||||||
|
id: 'upcloud',
|
||||||
|
name: 'UpCloud',
|
||||||
|
rank: 200,
|
||||||
|
async scrape(ctx) {
|
||||||
|
// Example url: https://dokicloud.one/embed-4/{id}?z=
|
||||||
|
const parsedUrl = new URL(ctx.url.replace('embed-5', 'embed-4'));
|
||||||
|
|
||||||
|
const dataPath = parsedUrl.pathname.split('/');
|
||||||
|
const dataId = dataPath[dataPath.length - 1];
|
||||||
|
|
||||||
|
const streamRes = await ctx.proxiedFetcher<StreamRes>(`${parsedUrl.origin}/ajax/embed-4/getSources?id=${dataId}`, {
|
||||||
|
headers: {
|
||||||
|
Referer: parsedUrl.origin,
|
||||||
|
'X-Requested-With': 'XMLHttpRequest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let sources: { file: string; type: string } | null = null;
|
||||||
|
|
||||||
|
if (!isJSON(streamRes.sources)) {
|
||||||
|
const decryptionKey = JSON.parse(
|
||||||
|
await ctx.proxiedFetcher<string>(`https://raw.githubusercontent.com/enimax-anime/key/e4/key.txt`),
|
||||||
|
) as [number, number][];
|
||||||
|
|
||||||
|
let extractedKey = '';
|
||||||
|
const sourcesArray = streamRes.sources.split('');
|
||||||
|
for (const index of decryptionKey) {
|
||||||
|
for (let i: number = index[0]; i < index[1]; i += 1) {
|
||||||
|
extractedKey += streamRes.sources[i];
|
||||||
|
sourcesArray[i] = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedStream = AES.decrypt(sourcesArray.join(''), extractedKey).toString(enc.Utf8);
|
||||||
|
const parsedStream = JSON.parse(decryptedStream)[0];
|
||||||
|
if (!parsedStream) throw new Error('No stream found');
|
||||||
|
sources = parsedStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sources) throw new Error('upcloud source not found');
|
||||||
|
|
||||||
|
return {
|
||||||
|
stream: {
|
||||||
|
type: 'hls',
|
||||||
|
playlist: sources.file,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
27
src/providers/get.ts
Normal file
27
src/providers/get.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
import { hasDuplicates } from '@/utils/predicates';
|
||||||
|
|
||||||
|
export interface ProviderList {
|
||||||
|
sources: Sourcerer[];
|
||||||
|
embeds: Embed[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getProviders(): ProviderList {
|
||||||
|
const sources = gatherAllSources().filter((v) => !v?.disabled);
|
||||||
|
const embeds = gatherAllEmbeds().filter((v) => !v?.disabled);
|
||||||
|
const combined = [...sources, ...embeds];
|
||||||
|
|
||||||
|
const anyDuplicateId = hasDuplicates(combined.map((v) => v.id));
|
||||||
|
const anyDuplicateSourceRank = hasDuplicates(sources.map((v) => v.rank));
|
||||||
|
const anyDuplicateEmbedRank = hasDuplicates(embeds.map((v) => v.rank));
|
||||||
|
|
||||||
|
if (anyDuplicateId) throw new Error('Duplicate id found in sources/embeds');
|
||||||
|
if (anyDuplicateSourceRank) throw new Error('Duplicate rank found in sources');
|
||||||
|
if (anyDuplicateEmbedRank) throw new Error('Duplicate rank found in embeds');
|
||||||
|
|
||||||
|
return {
|
||||||
|
sources,
|
||||||
|
embeds,
|
||||||
|
};
|
||||||
|
}
|
1
src/providers/sources/flixhq/common.ts
Normal file
1
src/providers/sources/flixhq/common.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export const flixHqBase = 'https://flixhq.to';
|
29
src/providers/sources/flixhq/index.ts
Normal file
29
src/providers/sources/flixhq/index.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { makeSourcerer } from '@/providers/base';
|
||||||
|
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||||
|
import { getFlixhqSourceDetails, getFlixhqSources } from '@/providers/sources/flixhq/scrape';
|
||||||
|
import { getFlixhqId } from '@/providers/sources/flixhq/search';
|
||||||
|
import { NotFoundError } from '@/utils/errors';
|
||||||
|
|
||||||
|
// TODO tv shows are available in flixHQ, just no scraper yet
|
||||||
|
export const flixhqScraper = makeSourcerer({
|
||||||
|
id: 'flixhq',
|
||||||
|
name: 'FlixHQ',
|
||||||
|
rank: 100,
|
||||||
|
async scrapeMovie(ctx) {
|
||||||
|
const id = await getFlixhqId(ctx, ctx.media);
|
||||||
|
if (!id) throw new NotFoundError('no search results match');
|
||||||
|
|
||||||
|
const sources = await getFlixhqSources(ctx, id);
|
||||||
|
const upcloudStream = sources.find((v) => v.embed.toLowerCase() === 'upcloud');
|
||||||
|
if (!upcloudStream) throw new NotFoundError('upcloud stream not found for flixhq');
|
||||||
|
|
||||||
|
return {
|
||||||
|
embeds: [
|
||||||
|
{
|
||||||
|
embedId: upcloudScraper.id,
|
||||||
|
url: await getFlixhqSourceDetails(ctx, upcloudStream.episodeId),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
37
src/providers/sources/flixhq/scrape.ts
Normal file
37
src/providers/sources/flixhq/scrape.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { load } from 'cheerio';
|
||||||
|
|
||||||
|
import { flixHqBase } from '@/providers/sources/flixhq/common';
|
||||||
|
import { ScrapeContext } from '@/utils/context';
|
||||||
|
|
||||||
|
export async function getFlixhqSources(ctx: ScrapeContext, id: string) {
|
||||||
|
const type = id.split('/')[0];
|
||||||
|
const episodeParts = id.split('-');
|
||||||
|
const episodeId = episodeParts[episodeParts.length - 1];
|
||||||
|
|
||||||
|
const data = await ctx.proxiedFetcher<string>(`/ajax/${type}/episodes/${episodeId}`, {
|
||||||
|
baseUrl: flixHqBase,
|
||||||
|
});
|
||||||
|
const doc = load(data);
|
||||||
|
const sourceLinks = doc('.nav-item > a')
|
||||||
|
.toArray()
|
||||||
|
.map((el) => {
|
||||||
|
const query = doc(el);
|
||||||
|
const embedTitle = query.attr('title');
|
||||||
|
const linkId = query.attr('data-linkid');
|
||||||
|
if (!embedTitle || !linkId) throw new Error('invalid sources');
|
||||||
|
return {
|
||||||
|
embed: embedTitle,
|
||||||
|
episodeId: linkId,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return sourceLinks;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getFlixhqSourceDetails(ctx: ScrapeContext, sourceId: string): Promise<string> {
|
||||||
|
const jsonData = await ctx.proxiedFetcher<Record<string, any>>(`/ajax/sources/${sourceId}`, {
|
||||||
|
baseUrl: flixHqBase,
|
||||||
|
});
|
||||||
|
|
||||||
|
return jsonData.link;
|
||||||
|
}
|
34
src/providers/sources/flixhq/search.ts
Normal file
34
src/providers/sources/flixhq/search.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { load } from 'cheerio';
|
||||||
|
|
||||||
|
import { MovieMedia } from '@/main/media';
|
||||||
|
import { flixHqBase } from '@/providers/sources/flixhq/common';
|
||||||
|
import { compareMedia } from '@/utils/compare';
|
||||||
|
import { ScrapeContext } from '@/utils/context';
|
||||||
|
|
||||||
|
export async function getFlixhqId(ctx: ScrapeContext, media: MovieMedia): Promise<string | null> {
|
||||||
|
const searchResults = await ctx.proxiedFetcher<string>(`/search/${media.title.replaceAll(/[^a-z0-9A-Z]/g, '-')}`, {
|
||||||
|
baseUrl: flixHqBase,
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc = load(searchResults);
|
||||||
|
const items = doc('.film_list-wrap > div.flw-item')
|
||||||
|
.toArray()
|
||||||
|
.map((el) => {
|
||||||
|
const query = doc(el);
|
||||||
|
const id = query.find('div.film-poster > a').attr('href')?.slice(1);
|
||||||
|
const title = query.find('div.film-detail > h2 > a').attr('title');
|
||||||
|
const year = query.find('div.film-detail > div.fd-infor > span:nth-child(1)').text();
|
||||||
|
|
||||||
|
if (!id || !title || !year) return null;
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
title,
|
||||||
|
year: +year,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const matchingItem = items.find((v) => v && compareMedia(media, v.title, v.year));
|
||||||
|
|
||||||
|
if (!matchingItem) return null;
|
||||||
|
return matchingItem.id;
|
||||||
|
}
|
18
src/providers/streams.ts
Normal file
18
src/providers/streams.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
export type StreamFile = {
|
||||||
|
type: 'mp4';
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Qualities = '360' | '480' | '720' | '1080';
|
||||||
|
|
||||||
|
export type FileBasedStream = {
|
||||||
|
type: 'file';
|
||||||
|
qualities: Partial<Record<Qualities, StreamFile>>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type HlsBasedStream = {
|
||||||
|
type: 'hls';
|
||||||
|
playlist: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Stream = FileBasedStream | HlsBasedStream;
|
@@ -1 +0,0 @@
|
|||||||
export const LOG = 'hello world';
|
|
19
src/utils/compare.ts
Normal file
19
src/utils/compare.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { CommonMedia } from '@/main/media';
|
||||||
|
|
||||||
|
export function normalizeTitle(title: string): string {
|
||||||
|
return title
|
||||||
|
.trim()
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/['":]/g, '')
|
||||||
|
.replace(/[^a-zA-Z0-9]+/g, '_');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function compareTitle(a: string, b: string): boolean {
|
||||||
|
return normalizeTitle(a) === normalizeTitle(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function compareMedia(media: CommonMedia, title: string, releaseYear?: number): boolean {
|
||||||
|
// if no year is provided, count as if its the correct year
|
||||||
|
const isSameYear = releaseYear === undefined ? true : media.releaseYear === releaseYear;
|
||||||
|
return compareTitle(media.title, title) && isSameYear;
|
||||||
|
}
|
13
src/utils/context.ts
Normal file
13
src/utils/context.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { UseableFetcher } from '@/fetchers/types';
|
||||||
|
|
||||||
|
export type ScrapeContext = {
|
||||||
|
proxiedFetcher: <T>(...params: Parameters<UseableFetcher<T>>) => ReturnType<UseableFetcher<T>>;
|
||||||
|
fetcher: <T>(...params: Parameters<UseableFetcher<T>>) => ReturnType<UseableFetcher<T>>;
|
||||||
|
progress(val: number): void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type EmbedInput = {
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type EmbedScrapeContext = EmbedInput & ScrapeContext;
|
6
src/utils/errors.ts
Normal file
6
src/utils/errors.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export class NotFoundError extends Error {
|
||||||
|
constructor(reason?: string) {
|
||||||
|
super(`Couldn't found a stream: ${reason ?? 'not found'}`);
|
||||||
|
this.name = 'NotFoundError';
|
||||||
|
}
|
||||||
|
}
|
20
src/utils/list.ts
Normal file
20
src/utils/list.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
export function reorderOnIdList<T extends { rank: number; id: string }[]>(order: string[], list: T): T {
|
||||||
|
const copy = [...list] as T;
|
||||||
|
copy.sort((a, b) => {
|
||||||
|
const aIndex = order.indexOf(a.id);
|
||||||
|
const bIndex = order.indexOf(b.id);
|
||||||
|
|
||||||
|
// both in order list
|
||||||
|
if (aIndex >= 0 && bIndex >= 0) return aIndex - bIndex;
|
||||||
|
|
||||||
|
// only one in order list
|
||||||
|
// negative means order [a,b]
|
||||||
|
// positive means order [b,a]
|
||||||
|
if (bIndex >= 0) return 1; // A isnt in list but B is, so A goes later on the list
|
||||||
|
if (aIndex >= 0) return -1; // B isnt in list but A is, so B goes later on the list
|
||||||
|
|
||||||
|
// both not in list, sort on rank
|
||||||
|
return b.rank - a.rank;
|
||||||
|
});
|
||||||
|
return copy;
|
||||||
|
}
|
3
src/utils/predicates.ts
Normal file
3
src/utils/predicates.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export function hasDuplicates<T>(values: Array<T>): boolean {
|
||||||
|
return new Set(values).size !== values.length;
|
||||||
|
}
|
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "es2018",
|
"target": "es2021",
|
||||||
"lib": ["es2018", "DOM"],
|
"lib": ["es2021"],
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"outDir": "./lib",
|
"outDir": "./lib",
|
||||||
@@ -9,8 +9,10 @@
|
|||||||
"baseUrl": "src",
|
"baseUrl": "src",
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"isolatedModules": false,
|
"isolatedModules": false,
|
||||||
|
"skipLibCheck": true,
|
||||||
"paths": {
|
"paths": {
|
||||||
"@/*": ["./*"]
|
"@/*": ["./*"],
|
||||||
|
"@entrypoint": ["./index.ts"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"include": ["src"],
|
"include": ["src"],
|
||||||
|
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": ["tslint:recommended", "tslint-config-prettier"]
|
|
||||||
}
|
|
@@ -6,12 +6,7 @@ const dts = require('vite-plugin-dts');
|
|||||||
const main = path.resolve(__dirname, 'src/index.ts');
|
const main = path.resolve(__dirname, 'src/index.ts');
|
||||||
|
|
||||||
module.exports = defineConfig({
|
module.exports = defineConfig({
|
||||||
plugins: [
|
plugins: [eslint(), dts({})],
|
||||||
eslint(),
|
|
||||||
dts({
|
|
||||||
include: [main],
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
'@': path.resolve(__dirname, './src'),
|
'@': path.resolve(__dirname, './src'),
|
||||||
@@ -23,8 +18,8 @@ module.exports = defineConfig({
|
|||||||
|
|
||||||
lib: {
|
lib: {
|
||||||
entry: main,
|
entry: main,
|
||||||
name: 'providers',
|
name: 'index',
|
||||||
fileName: 'providers',
|
fileName: 'index',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
Reference in New Issue
Block a user