mirror of
https://github.com/movie-web/providers.git
synced 2025-09-13 13:03:25 +00:00
Compare commits
596 Commits
docs-updat
...
0a2259b1f4
Author | SHA1 | Date | |
---|---|---|---|
|
0a2259b1f4 | ||
|
79a8fdca6d | ||
|
94bfcd0c31 | ||
|
ef600dd08c | ||
|
025b6821e6 | ||
|
356286dfaa | ||
|
534b53f9b9 | ||
|
089c6aa6ce | ||
|
2553909e8d | ||
|
6a0ac52908 | ||
|
cffbc844e0 | ||
|
116cc1c3c3 | ||
|
0ede244c2b | ||
|
5be81a3142 | ||
|
3f68b00ffa | ||
|
9ba05ab991 | ||
|
c17b74c8ba | ||
|
047b69c309 | ||
|
61952b0e87 | ||
|
125021e432 | ||
|
9e06035e27 | ||
|
8281c3141a | ||
|
8796b39a63 | ||
|
6b038a288c | ||
|
ec6524389c | ||
|
5d6b93385e | ||
|
5b836a4839 | ||
|
082d2754be | ||
|
921c35b3ed | ||
|
09eadfd306 | ||
|
fbbb671967 | ||
|
2daded1d79 | ||
|
1a754ad951 | ||
|
51c58aae75 | ||
|
aabfbfbeea | ||
|
7dc5a5ac83 | ||
|
ecf46817d4 | ||
|
7d87e77695 | ||
|
e75fe05ad2 | ||
|
fa8e3fa7e1 | ||
|
73facc0184 | ||
|
68fe85065f | ||
|
8a711265cb | ||
|
a58ea29ae5 | ||
|
d99a22d734 | ||
|
0c7aa1756b | ||
|
29d0ea4276 | ||
|
3ae517c2d2 | ||
|
718eb97c06 | ||
|
68a70b1177 | ||
|
a6667ade8c | ||
|
301c9200ad | ||
|
2661e24908 | ||
|
95e4578eee | ||
|
18a3d9d9e6 | ||
|
296216c734 | ||
|
7d5dca227d | ||
|
4238672f28 | ||
|
867313fce1 | ||
|
c4343b81c7 | ||
|
0382438fe1 | ||
|
b488ff569f | ||
|
3d7e8aacec | ||
|
d25e1eaede | ||
|
464f34d574 | ||
|
b8570bb560 | ||
|
494b662af7 | ||
|
97008a6439 | ||
|
49d1e58681 | ||
|
314b7ccfcb | ||
|
8551101b5d | ||
|
2d909d72cd | ||
|
6a72ec3481 | ||
|
03874e8e24 | ||
|
c91974a584 | ||
|
21879b2230 | ||
|
b8286ddf3a | ||
|
183ab4db14 | ||
|
bc34b95647 | ||
|
ecdc3a670e | ||
|
32e8bd39fa | ||
|
f6871f8c64 | ||
|
45a3f2835d | ||
|
641821164c | ||
|
c22608a4d6 | ||
|
38d4c97032 | ||
|
7e5d06434d | ||
|
1e66b0b7e3 | ||
|
b804af5ab6 | ||
|
e58b6b64c2 | ||
|
0e941923d5 | ||
|
9119c0675e | ||
|
f7bddecdd0 | ||
|
090c892bda | ||
|
77540229a0 | ||
|
32f5df2f95 | ||
|
84105c3ac2 | ||
|
12404cd25f | ||
|
5caae7ac18 | ||
|
d6bd15e2b4 | ||
|
db4a7e82dd | ||
|
423b728595 | ||
|
9258c97de8 | ||
|
442b0ea800 | ||
|
55f963611c | ||
|
c964c55d33 | ||
|
5e49375daf | ||
|
46502bf36c | ||
|
dcfab4bd80 | ||
|
d1d7f5cf72 | ||
|
cce3b810c3 | ||
|
761de36e3b | ||
|
d6824f1b71 | ||
|
035b2e3ea7 | ||
|
9da234f0ea | ||
|
1e7523d4b5 | ||
|
f3310e9e8c | ||
|
9e5c40ae45 | ||
|
56dc8b113f | ||
|
92db2c1c3f | ||
|
21f1fd3cee | ||
|
f8a5120064 | ||
|
1024467636 | ||
|
a5bf932299 | ||
|
fa30ed299b | ||
f4d0a7a05e | |||
|
234209e90b | ||
|
be5ba8f87b | ||
|
943c8b81ca | ||
|
f7592cb837 | ||
|
9edaea0125 | ||
|
acb9531cfe | ||
|
5356d7336c | ||
|
35721b553b | ||
|
e6210b5d93 | ||
|
3b29a685cc | ||
|
d5d735d089 | ||
|
4f490dd8f1 | ||
f1b962dd5d | |||
a03fe2825a | |||
3e3bd5d719 | |||
8063c5f245 | |||
c8f32145fc | |||
22766ca82a | |||
b505336882 | |||
|
ca70828efd | ||
|
d3363eafde | ||
|
e381465d53 | ||
|
7b1a7b7ef5 | ||
|
c686467b11 | ||
|
33f6fe5953 | ||
|
3d5ce44aa3 | ||
|
28be597b71 | ||
|
83e18900a9 | ||
|
264302af76 | ||
|
d5b0da5659 | ||
|
c5e87669bf | ||
fae92715b5 | |||
af443bff96 | |||
9a920f86d2 | |||
1fd98debc6 | |||
|
f066fe1371 | ||
|
6bc765caed | ||
|
baecc3f6eb | ||
|
10f07f8282 | ||
|
7a2c255ea8 | ||
|
eefa501938 | ||
|
54e54261f6 | ||
|
35d48c192c | ||
|
5ae85a500d | ||
|
d776069613 | ||
|
db4a27e51a | ||
|
44686f76de | ||
|
bd0f1e2ddc | ||
|
fed8678ef1 | ||
|
147884a74c | ||
|
afb261717d | ||
|
59e1700ea1 | ||
|
f9cc1f6bf6 | ||
|
e1a0ce72cc | ||
|
845bf7c3db | ||
|
dd3942488c | ||
|
af9d9c17cd | ||
|
c348686dc2 | ||
|
284ad4d222 | ||
|
2aa7585b79 | ||
|
85ef0d1628 | ||
|
853da85d41 | ||
|
bbd88fd966 | ||
|
2715ca5cb3 | ||
|
284226166e | ||
|
da7c83774c | ||
|
bbb39a3aa7 | ||
|
8a3155999b | ||
|
1e63402914 | ||
|
89ae529dd1 | ||
|
02d8e475f2 | ||
|
d89b189b9e | ||
|
868e96e707 | ||
|
70f70475df | ||
|
9e4e06b4c2 | ||
|
b0da041ba9 | ||
|
36d4b41baa | ||
|
c735ce33a0 | ||
|
6ae186f436 | ||
|
f97c907720 | ||
|
9bbd0c0976 | ||
|
e4e56dd7b1 | ||
|
b2f372fbb9 | ||
|
f47a2b0f40 | ||
|
85cb751542 | ||
|
f8f0d4696a | ||
|
5df9123edb | ||
|
b5dcd7d133 | ||
|
ad2ee21c00 | ||
|
e0bb7d5a30 | ||
|
bc5a1893e7 | ||
|
b10cfd9132 | ||
|
01fcb392fd | ||
|
66b27ef21e | ||
|
fb6cf96d12 | ||
|
66b9004af9 | ||
|
7ebf23c3d3 | ||
|
09bde8b82c | ||
|
8457a85ca0 | ||
|
6445906453 | ||
|
5a937abcaf | ||
|
234c5108ac | ||
|
8a39e6ae59 | ||
|
4329bf9995 | ||
|
8e4a87421c | ||
|
5161c6ed86 | ||
|
5078c176ec | ||
|
2c34396897 | ||
|
b34e2fb619 | ||
|
2201ff5822 | ||
|
805e520f82 | ||
|
311eefb74f | ||
|
8a5392f08b | ||
|
c4e2fcbd5e | ||
|
822b7a3965 | ||
|
f4bc3831e1 | ||
|
0dafb434d4 | ||
|
06dcde8036 | ||
|
16dcca270c | ||
|
fe90bdaa9a | ||
|
ac2261bbd1 | ||
|
ccb029db1d | ||
|
69da27d6dc | ||
|
28b15e5a36 | ||
|
59dfc22c95 | ||
|
9ee1ca7cae | ||
|
96a455c4c2 | ||
|
e2aa6e2bc0 | ||
|
cb44f663ca | ||
|
07a862d78a | ||
|
fc052a9f08 | ||
|
f683cbf8ac | ||
|
d5e0f0bf81 | ||
|
7061e808ba | ||
|
a30881cf5d | ||
|
8b149458e0 | ||
|
c8ad3387c5 | ||
|
bd1e3234d1 | ||
|
43faeec1e7 | ||
|
c423a51b4c | ||
|
46253bad0c | ||
|
53f4d6699a | ||
|
63bbd8b858 | ||
|
1d4c556d78 | ||
|
ff29bc3299 | ||
|
d6624efc5a | ||
|
5edd540a50 | ||
|
6580e3b1a8 | ||
|
fb12c6bfa9 | ||
|
03fc42fa9c | ||
|
8247ad6ef0 | ||
|
25dbadd909 | ||
|
c6c921f80b | ||
|
96b090a033 | ||
|
4591bcbc2e | ||
|
d321f5a5b6 | ||
|
3c2e3bc076 | ||
|
e243b27704 | ||
|
89c7f49be2 | ||
|
12b2a071e3 | ||
|
5adca068a8 | ||
|
289d072582 | ||
|
462905a342 | ||
|
65fb0eb862 | ||
|
f6061f6945 | ||
|
e5d9a3b23c | ||
|
09cfafcaa2 | ||
|
2df4f10f64 | ||
|
54db7efcee | ||
|
cc880e9da2 | ||
|
a21ec86cb2 | ||
|
f77144808f | ||
|
f9670bf3c6 | ||
|
2834b1eff2 | ||
|
889bb88fac | ||
|
a22537ad66 | ||
|
3dddfd7678 | ||
|
66e59b36ae | ||
|
4be3e60bc8 | ||
|
3b00511eb3 | ||
|
f2073e89d9 | ||
|
efa4857b20 | ||
|
ec4f2d02ed | ||
|
5eb3ade82e | ||
|
0396272dd9 | ||
|
9291bd6828 | ||
|
e11885a1df | ||
|
bcb1d29218 | ||
|
198dfdec87 | ||
|
b13578cefa | ||
|
f3e4786b72 | ||
|
e8a8909ecb | ||
|
949fe487bc | ||
|
03a628ea34 | ||
|
616c8a1e1b | ||
|
327ba025c2 | ||
|
528e2774b5 | ||
|
9798659af8 | ||
|
8819648023 | ||
|
b3212bd327 | ||
|
7a5cbb3428 | ||
|
854dec111d | ||
|
7697c76721 | ||
|
1dc7c879af | ||
|
e1f23f571d | ||
|
6212a014a0 | ||
|
b5b2d341f9 | ||
|
75613df6c5 | ||
|
53ed51013c | ||
|
777f3d78e0 | ||
|
c3bd575621 | ||
|
48b3fcdd17 | ||
|
f427173e87 | ||
|
c7dff57a40 | ||
|
6d3db3269a | ||
|
5ef048c28f | ||
|
efe5b6d67b | ||
|
30fdfcc9af | ||
|
0de21c17c6 | ||
|
e728f8c211 | ||
|
48a3f3ad1e | ||
|
78c269a882 | ||
|
c91e18ec91 | ||
|
f67d82200f | ||
|
7e1c897e30 | ||
|
d4e2d234f3 | ||
|
0ba1183e34 | ||
|
9276694ead | ||
|
e210d39a39 | ||
|
4b08c0101d | ||
|
a2b5aeecfc | ||
|
d49c408197 | ||
|
39a6082a19 | ||
|
5199213a70 | ||
|
4e4b405398 | ||
|
1970e11443 | ||
|
605b9d78d1 | ||
|
fe1f8d364b | ||
|
7b5c53e6bd | ||
|
2f892eec7b | ||
|
a694230bee | ||
|
f1c29aab90 | ||
|
d7134d4daf | ||
|
cc400aebb7 | ||
|
69e66ab855 | ||
|
ae440b1620 | ||
|
1d2dbcf903 | ||
|
0d36d51dd4 | ||
|
c38c5c834a | ||
|
cdb59c6046 | ||
|
7ea4ac0c48 | ||
|
570cb63c52 | ||
|
7dfeeb2700 | ||
|
2ac9be5fdc | ||
|
147029f123 | ||
|
7f4e412b9d | ||
|
10eb0cc8f6 | ||
|
98230470f1 | ||
|
27e1ae7e0b | ||
|
1296915a42 | ||
|
34a2dd2ec5 | ||
|
239c0cc7e8 | ||
|
2dd19da7eb | ||
|
db17675ebe | ||
|
2e9c1b4baa | ||
|
7cce9a9e5a | ||
|
00cd55402c | ||
|
79181ff87d | ||
|
67d936f58c | ||
|
a07f54e0cf | ||
|
b0df27f41b | ||
|
ac3dfb98e1 | ||
|
336859f359 | ||
|
3c072f0d11 | ||
|
6233bbcdcb | ||
|
9b7314321e | ||
|
d49ebf00d2 | ||
|
f88021fff6 | ||
|
81668fe9e7 | ||
|
a775a5a92b | ||
|
5058a1edb5 | ||
|
eb0f916957 | ||
|
6a071509fc | ||
|
cb0145d58f | ||
|
cd7368d980 | ||
|
9ad12286ae | ||
|
8589185c53 | ||
|
a1e172125c | ||
|
57204d76e1 | ||
|
3732f03852 | ||
|
829d87cdd5 | ||
|
17662b01b9 | ||
|
2d68d443ec | ||
|
030b512c43 | ||
|
8ac11d9067 | ||
|
9b338b6f3b | ||
|
8904e87b7e | ||
|
fb3f237808 | ||
|
c9a611d6b7 | ||
|
a208aef364 | ||
|
c44d13f0bd | ||
|
30e6067a3f | ||
|
f39aaca3e3 | ||
|
8b7e840814 | ||
|
7c40c8a2c1 | ||
|
68fa20a12c | ||
|
06acec4675 | ||
|
53b83fe24c | ||
|
582b46cd39 | ||
|
7143b06c55 | ||
|
2117b417f4 | ||
|
65bbf28442 | ||
|
ddf612c6cc | ||
|
304ef68c5f | ||
|
d8f2d0553a | ||
|
c26e135d74 | ||
|
75d4b9edcb | ||
|
edd08446cf | ||
|
aa4b7cda9e | ||
|
feddf9c215 | ||
|
af00bcf7c1 | ||
|
4b1e8288b8 | ||
|
4eaae64e4a | ||
|
ffe7ae0985 | ||
|
d8d004e4cb | ||
|
d978e3b3fe | ||
|
64050df350 | ||
|
82034b91ed | ||
|
09ba0ebcc1 | ||
|
4a557b8140 | ||
|
b70d9aaaf7 | ||
|
e5989ffbb0 | ||
|
0affe83d24 | ||
|
d44320e362 | ||
|
a64a80cf12 | ||
|
0fe2fb40e1 | ||
|
c00ed69801 | ||
|
1a11ccb406 | ||
|
b89602b87e | ||
|
1f8dd2f77d | ||
|
af6ede4a39 | ||
|
4be2da76ba | ||
|
0477b876ec | ||
|
6597e8f981 | ||
|
c93cc4babc | ||
|
80f3f84b3d | ||
|
95ce136863 | ||
|
4096109a3e | ||
|
cb57e0eef4 | ||
|
9feca64b9e | ||
|
c711f168ae | ||
|
2e3ee28503 | ||
|
9ff9cc5938 | ||
|
d95de0b602 | ||
|
7fa098fb0c | ||
|
8c97760dc0 | ||
|
9cdc9b1fad | ||
|
2a43934788 | ||
|
ab2ab028bf | ||
|
0dc7baeca7 | ||
|
e4b8c225e1 | ||
|
8a278976c3 | ||
|
ca6ae98f4d | ||
|
afe6f8b2ed | ||
|
65aeda6beb | ||
|
e63d150506 | ||
|
0e47b53bba | ||
|
7dcd9dd3bb | ||
|
671c558c55 | ||
|
f3720161ac | ||
|
35d61d19a2 | ||
|
1cf837dff7 | ||
|
65184e07c4 | ||
|
c68b7db9e7 | ||
|
c548796263 | ||
|
38cdb1313f | ||
|
163f96c21d | ||
|
f465ac5984 | ||
|
34164933eb | ||
|
b569fad014 | ||
|
4ff5878dad | ||
|
37880f4e74 | ||
|
4dc7179328 | ||
|
44f3318202 | ||
|
eb14d8d069 | ||
|
c59cc88077 | ||
|
24122ce035 | ||
|
21647f586f | ||
|
628d8d7f72 | ||
|
a2c0a7045f | ||
|
50c3166e70 | ||
|
2f02522ca6 | ||
|
9324e0c132 | ||
|
283b569b7c | ||
|
e962bb410f | ||
|
36d70fa6fd | ||
|
099993d7f3 | ||
|
fee543642e | ||
|
c7cb8c2d1e | ||
|
4d55c26666 | ||
|
af1f7e4964 | ||
|
be0858b14c | ||
|
5af65e6c0d | ||
|
44342c48c9 | ||
|
fcade4671c | ||
|
20229a4667 | ||
|
78e0150944 | ||
|
7d803f9506 | ||
|
ab5dcc7b42 | ||
|
fe4882b43e | ||
|
4ce41e828d | ||
|
d31deeb907 | ||
|
3cd023bb10 | ||
|
9fa6be9b58 | ||
|
fdca62cad2 | ||
|
67c2a7c809 | ||
|
bf4dc79078 | ||
|
378c99e0bd | ||
|
a474502ca3 | ||
|
43ff52d158 | ||
|
5dee60c69a | ||
|
00134c1487 | ||
|
222241a293 | ||
|
3c6959e86e | ||
|
8a2db08b1a | ||
|
e5aa402b9e | ||
|
a6e2355cac | ||
|
f9f9ddf852 | ||
|
dcb662e44a | ||
|
506955e7ae | ||
|
9d98331ce0 | ||
|
757efaf693 | ||
|
1049a3e25d | ||
|
b5bfa9f6fa | ||
|
df67574821 | ||
|
cc2bcd2781 | ||
|
57a22c4a76 | ||
|
07f81707a3 | ||
|
457ecab529 | ||
|
3e7d1f19d3 | ||
|
964aee7c78 | ||
|
55ea64e64b | ||
|
b868ec1f81 | ||
|
8647d08e22 | ||
|
0f35983087 | ||
|
bec7c07881 | ||
|
9d204c381f | ||
6f9c0517b8 | |||
aaa01a629b | |||
6ecc866fed | |||
35cfa4747e | |||
64b493eaf2 | |||
|
381c6d4e14 | ||
6879dcdbe7 | |||
856a8fcac1 | |||
11af17b19b | |||
c3de70bbc0 | |||
f82457ed62 | |||
8baeb90bee | |||
c9bd98517f | |||
791caa88fd | |||
|
19f03949f4 | ||
|
bbc4f60c73 | ||
|
c8374ef139 | ||
|
849347afbe | ||
7feaf9256f | |||
|
f9d2aab106 | ||
2f289d93d0 | |||
e070701905 | |||
3c292baf26 |
@@ -8,7 +8,7 @@ layout: page
|
|||||||
---
|
---
|
||||||
cta:
|
cta:
|
||||||
- Get Started
|
- Get Started
|
||||||
- /guide/usage
|
- /get-started/introduction
|
||||||
secondary:
|
secondary:
|
||||||
- Open on GitHub →
|
- Open on GitHub →
|
||||||
- https://github.com/movie-web/providers
|
- https://github.com/movie-web/providers
|
||||||
|
@@ -1,13 +0,0 @@
|
|||||||
# Targets
|
|
||||||
|
|
||||||
When making an instance of the library using `makeProviders()`. It will immediately require choosing a target.
|
|
||||||
|
|
||||||
::alert{type="info"}
|
|
||||||
A target is the device where the stream will be played on.
|
|
||||||
**Where the scraping is run has nothing to do with the target**, only where the stream is finally played in the end is significant in choosing a target.
|
|
||||||
::
|
|
||||||
|
|
||||||
#### Possible targets
|
|
||||||
- **`targets.BROWSER`** Stream will be played in a browser with CORS
|
|
||||||
- **`targets.NATIVE`** Stream will be played natively
|
|
||||||
- **`targets.ALL`** Stream will be played on a device with no restrictions of any kind
|
|
@@ -1,47 +0,0 @@
|
|||||||
# Fetchers
|
|
||||||
|
|
||||||
When making an instance of the library using `makeProviders()`. It will immediately make a fetcher.
|
|
||||||
This comes with some considerations depending on the environment you're running.
|
|
||||||
|
|
||||||
## Using `fetch()`
|
|
||||||
In most cases, you can use the `fetch()` API. This will work in newer versions of Node.js (18 and above) and on the browser.
|
|
||||||
|
|
||||||
```ts
|
|
||||||
const fetcher = makeDefaultFetcher(fetch);
|
|
||||||
```
|
|
||||||
|
|
||||||
If you using older version of Node.js. You can use the npm package `node-fetch` to polyfill fetch:
|
|
||||||
|
|
||||||
```ts
|
|
||||||
import fetch from "node-fetch";
|
|
||||||
|
|
||||||
const fetcher = makeDefaultFetcher(fetch);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using fetchers on the browser
|
|
||||||
When using this library on a browser, you will need a proxy. Browsers come with many restrictions on when a web request can be made, and to bypass those restrictions, you will need a cors proxy.
|
|
||||||
|
|
||||||
The movie-web team has a proxy pre-made and pre-configured for you to use. For more information, check out [movie-web/simple-proxy](https://github.com/movie-web/simple-proxy). After installing, you can use this proxy like so:
|
|
||||||
|
|
||||||
```ts
|
|
||||||
const fetcher = makeSimpleProxyFetcher("https://your.proxy.workers.dev/", fetch);
|
|
||||||
```
|
|
||||||
|
|
||||||
If you aren't able to use this specific proxy and need to use a different one, you can make your own fetcher in the next section.
|
|
||||||
|
|
||||||
## Making a custom fetcher
|
|
||||||
|
|
||||||
In some rare cases, a custom fetcher will need to be made. This can be quite difficult to do from scratch so it's recommended to base it off an existing fetcher and building your own functionality around it.
|
|
||||||
|
|
||||||
```ts
|
|
||||||
export function makeCustomFetcher(): Fetcher {
|
|
||||||
const fetcher = makeStandardFetcher(f);
|
|
||||||
const customFetcher: Fetcher = (url, ops) => {
|
|
||||||
return fetcher(url, ops);
|
|
||||||
};
|
|
||||||
|
|
||||||
return customFetcher;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need to make your own fetcher for a proxy. Make sure you make it compatible with the following headers: `Cookie`, `Referer`, `Origin`. Proxied fetchers need to be able to write those headers when making a request.
|
|
@@ -1,2 +0,0 @@
|
|||||||
icon: ph:book-open-fill
|
|
||||||
navigation.redirect: /guide/usage
|
|
14
.docs/content/1.get-started/0.introduction.md
Normal file
14
.docs/content/1.get-started/0.introduction.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Introduction
|
||||||
|
|
||||||
|
## What is `@movie-web/providers`?
|
||||||
|
|
||||||
|
`@movie-web/providers` is the soul of [movie-web](https://github.com/movie-web/movie-web). It's a collection of scrapers of various streaming sites. It extracts the raw streams from those sites, so you can watch them without any extra fluff from the original sites.
|
||||||
|
|
||||||
|
## What can I use this on?
|
||||||
|
|
||||||
|
We support many different environments, here are a few examples:
|
||||||
|
- In browser, watch streams without needing a server to scrape (does need a proxy)
|
||||||
|
- In a native app, scrape in the app itself
|
||||||
|
- In a backend server, scrape on the server and give the streams to the client to watch.
|
||||||
|
|
||||||
|
To find out how to configure the library for your environment, You can read [How to use on X](../2.essentials/0.usage-on-x.md).
|
@@ -1,4 +1,6 @@
|
|||||||
# Usage
|
# Quick start
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
Let's get started with `@movie-web/providers`. First lets install the package.
|
Let's get started with `@movie-web/providers`. First lets install the package.
|
||||||
|
|
||||||
@@ -18,11 +20,15 @@ Let's get started with `@movie-web/providers`. First lets install the package.
|
|||||||
|
|
||||||
To get started with scraping on the **server**, first you have to make an instance of the providers.
|
To get started with scraping on the **server**, first you have to make an instance of the providers.
|
||||||
|
|
||||||
```ts
|
::alert{type="warning"}
|
||||||
import { makeProviders, makeDefaultFetcher, targets } from '@movie-web/providers';
|
This snippet will only work on a **server**. For other environments, check out [Usage on X](../2.essentials/0.usage-on-x.md).
|
||||||
|
::
|
||||||
|
|
||||||
|
```ts [index.ts (server)]
|
||||||
|
import { makeProviders, makeStandardFetcher, targets } from '@movie-web/providers';
|
||||||
|
|
||||||
// this is how the library will make http requests
|
// this is how the library will make http requests
|
||||||
const myFetcher = makeDefaultFetcher(fetch);
|
const myFetcher = makeStandardFetcher(fetch);
|
||||||
|
|
||||||
// make an instance of the providers library
|
// make an instance of the providers library
|
||||||
const providers = makeProviders({
|
const providers = makeProviders({
|
||||||
@@ -33,7 +39,8 @@ const providers = makeProviders({
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
Perfect, now we can start scraping a stream:
|
Perfect. You now have an instance of the providers you can reuse everywhere.
|
||||||
|
Now let's scrape an item:
|
||||||
|
|
||||||
```ts [index.ts (server)]
|
```ts [index.ts (server)]
|
||||||
// fetch some data from TMDB
|
// fetch some data from TMDB
|
||||||
@@ -47,7 +54,7 @@ const media = {
|
|||||||
const output = await providers.runAll({
|
const output = await providers.runAll({
|
||||||
media: media
|
media: media
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!output) console.log("No stream found")
|
|
||||||
console.log(`stream url: ${output.stream.playlist}`)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Now we have our stream in the output variable. (If the output is `null` then nothing could be found.)
|
||||||
|
To find out how to use the streams, check out [Using streams](../2.essentials/4.using-streams.md).
|
5
.docs/content/1.get-started/3.examples.md
Normal file
5
.docs/content/1.get-started/3.examples.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Examples
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
There are no examples yet, stay tuned!
|
||||||
|
::
|
123
.docs/content/1.get-started/4.changelog.md
Normal file
123
.docs/content/1.get-started/4.changelog.md
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
---
|
||||||
|
title: 'Changelog'
|
||||||
|
---
|
||||||
|
|
||||||
|
# Version 2.2.9
|
||||||
|
- Fixed VidSrcTo (both Vidplay and Filemoon embeds)
|
||||||
|
- Added dropload, filelions and vtube embeds to Primewire
|
||||||
|
- Fixed and enabled Smashystream
|
||||||
|
- Improved RidoMovies search results
|
||||||
|
|
||||||
|
# Version 2.2.8
|
||||||
|
- Fix package exports for CJS and ESM
|
||||||
|
- Fixed Mixdrop embed
|
||||||
|
- Added thumbnailTrack to Vidplay embed
|
||||||
|
|
||||||
|
# Version 2.2.7
|
||||||
|
- Fix showbox
|
||||||
|
|
||||||
|
# Version 2.2.6
|
||||||
|
- Fix febbox
|
||||||
|
- Validate if a stream is actually playable. Streams that are not responding are no longer returned.
|
||||||
|
|
||||||
|
# Version 2.2.5
|
||||||
|
- Add Primewire provider
|
||||||
|
- Improve VidSrcTo search results
|
||||||
|
- Fixed Filemoon embeds
|
||||||
|
- Fixed febbox
|
||||||
|
- Disabled non-working providers
|
||||||
|
- Reordered providers in ranking
|
||||||
|
|
||||||
|
# Version 2.2.4
|
||||||
|
- Hotfix for HDRezka provider
|
||||||
|
|
||||||
|
# Version 2.2.3
|
||||||
|
- Fix VidSrcTo
|
||||||
|
- Add HDRezka provider
|
||||||
|
- Fix Goojara causing a crash
|
||||||
|
- Improve react-native URLSearchParams implementation
|
||||||
|
- Cover an edge case where the title contains 'the movie' or 'the show'
|
||||||
|
|
||||||
|
# Version 2.2.2
|
||||||
|
- Fix subtitles not appearing if the name of the subtitle is in its native tongue.
|
||||||
|
- Remove references to the old domain
|
||||||
|
- Fixed ridomovies not working for some shows and movies
|
||||||
|
- Fixed Showbox not working in react-native.
|
||||||
|
|
||||||
|
# Version 2.2.1
|
||||||
|
- Fixed Closeload scraper
|
||||||
|
|
||||||
|
# Version 2.2.0
|
||||||
|
- Fixed vidsrc.me URL decoding.
|
||||||
|
- Added ridomovies with Ridoo and Closeload embed.
|
||||||
|
- Added Goojara.to source.
|
||||||
|
- Fixed VidSrcTo crashing if no subtitles are found.
|
||||||
|
- Added Nepu Provider.
|
||||||
|
- Added vidcloud to flixhq and zoechip.
|
||||||
|
- Add thumbnail track option to response (Not supported by any providers yet).
|
||||||
|
- Disabled Lookmovie and swapped Showbox and VidSrcTo in ranking.
|
||||||
|
|
||||||
|
# Version 2.1.1
|
||||||
|
- Fixed vidplay decryption keys being wrong and switched the domain to one that works
|
||||||
|
|
||||||
|
# Version 2.1.0
|
||||||
|
- Add preferedHeaders to most sources
|
||||||
|
- Add CF_BLOCKED flag to sources that have blocked cloudflare API's
|
||||||
|
- Fix vidsrc sometimes having an equal sign where it shouldnt
|
||||||
|
- Increase ranking of lookmovie
|
||||||
|
- Re-enabled subtitles for febbox-mp4
|
||||||
|
|
||||||
|
# Version 2.0.5
|
||||||
|
- Disable subtitles for febbox-mp4. As their endpoint doesn't work anymore.
|
||||||
|
|
||||||
|
# Version 2.0.4
|
||||||
|
- Added providers:
|
||||||
|
- Add VidSrcTo provider with Vidplay and Filemoon embeds
|
||||||
|
- Add VidSrc provider with StreamBucket embeds
|
||||||
|
- Fixed providers:
|
||||||
|
- RemoteStream
|
||||||
|
- LookMovie - Fixed captions
|
||||||
|
- ShowBox
|
||||||
|
- Updated documentation to fix spelling + grammar
|
||||||
|
- User-agent header fix
|
||||||
|
- Needs the latest simple-proxy update
|
||||||
|
- Added utility to not return multiple subs for the same language - Applies to Lookmovie and Showbox
|
||||||
|
|
||||||
|
# Version 2.0.3
|
||||||
|
- Actually remove Febbox HLS
|
||||||
|
|
||||||
|
# Version 2.0.2
|
||||||
|
- Added Lookmovie caption support
|
||||||
|
- Fix Febbox duplicate subtitle languages
|
||||||
|
- Remove Febbox HLS
|
||||||
|
|
||||||
|
# Version 2.0.1
|
||||||
|
- Fixed issue where febbox-mp4 would not show all qualities
|
||||||
|
- Fixed issue where discoverEmbeds event would not show the embeds in the right order
|
||||||
|
|
||||||
|
# Version 2.0.0
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
There are breaking changes in this list, make sure to read them thoroughly if you plan on updating.
|
||||||
|
::
|
||||||
|
|
||||||
|
**Development tooling:**
|
||||||
|
- Added integration test for browser. To make sure the package keeps working in the browser
|
||||||
|
- Add type checking when building, previously it ignored them
|
||||||
|
- Refactored the main folder, now called entrypoint.
|
||||||
|
- Dev-cli code has been split up a bit more, a bit cleaner to navigate
|
||||||
|
- Dev-cli is now moved to `npm run cli`
|
||||||
|
- Dev-cli has now has support for running in a headless browser using a proxy URL.
|
||||||
|
- Fetchers can now return a full response with headers and everything
|
||||||
|
|
||||||
|
**New features:**
|
||||||
|
- Added system to allow scraping IP locked sources through the consistentIpforRequests option.
|
||||||
|
- There is now a `buildProviders()` function that gives a builder for the `ProviderControls`. It's an alternative to `makeProviders()`.
|
||||||
|
- Streams can now return a headers object and a `preferredHeaders` object. which is required and optional headers for when using the stream.
|
||||||
|
|
||||||
|
**Notable changes:**
|
||||||
|
- Renamed the NO_CORS flag to CORS_ALLOWED (meaning that resource sharing is allowed)
|
||||||
|
- Export Fetcher and Stream types with all types related to it
|
||||||
|
- Providers can now return a list of streams instead of just one.
|
||||||
|
- Captions now have identifiers returned with them. Just generally useful to have
|
||||||
|
- New targets and some of them renamed
|
2
.docs/content/1.get-started/_dir.yml
Normal file
2
.docs/content/1.get-started/_dir.yml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
icon: ph:shooting-star-fill
|
||||||
|
navigation.redirect: /get-started/introduction
|
@@ -1,20 +0,0 @@
|
|||||||
# `makeStandardFetcher`
|
|
||||||
|
|
||||||
Make a fetcher from a `fetch()` API. It is used for making a instance of providers with `makeProviders()`.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```ts
|
|
||||||
import { targets, makeProviders, makeDefaultFetcher } from "@movie-web/providers";
|
|
||||||
|
|
||||||
const providers = makeProviders({
|
|
||||||
fetcher: makeDefaultFetcher(fetch),
|
|
||||||
target: targets.NATIVE,
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## Type
|
|
||||||
|
|
||||||
```ts
|
|
||||||
function makeDefaultFetcher(fetchApi: typeof fetch): Fetcher;
|
|
||||||
```
|
|
@@ -1,3 +0,0 @@
|
|||||||
icon: ph:file-code-fill
|
|
||||||
navigation.redirect: /api/makeproviders
|
|
||||||
navigation.title: API
|
|
67
.docs/content/2.essentials/0.usage-on-x.md
Normal file
67
.docs/content/2.essentials/0.usage-on-x.md
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# How to use on X
|
||||||
|
|
||||||
|
The library can run in many environments, so it can be tricky to figure out how to set it up.
|
||||||
|
|
||||||
|
Here is a checklist. For more specific environments, keep reading below:
|
||||||
|
- When requests are very restricted (like browser client-side). Configure a proxied fetcher.
|
||||||
|
- When your requests come from the same device on which it will be streamed (not compatible with proxied fetcher). Set `consistentIpForRequests: true`.
|
||||||
|
- To set a target. Consult [Targets](./1.targets.md).
|
||||||
|
|
||||||
|
To make use of the examples below, check out the following pages:
|
||||||
|
- [Quick start](../1.get-started/1.quick-start.md)
|
||||||
|
- [Using streams](../2.essentials/4.using-streams.md)
|
||||||
|
|
||||||
|
## NodeJs server
|
||||||
|
```ts
|
||||||
|
import { makeProviders, makeStandardFetcher, targets } from '@movie-web/providers';
|
||||||
|
|
||||||
|
const providers = makeProviders({
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
target: chooseYourself, // check out https://movie-web.github.io/providers/essentials/targets
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Browser client-side
|
||||||
|
|
||||||
|
Using the provider package client-side requires a hosted version of simple-proxy.
|
||||||
|
Read more [about proxy fetchers](./2.fetchers.md#using-fetchers-on-the-browser).
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { makeProviders, makeStandardFetcher, targets } from '@movie-web/providers';
|
||||||
|
|
||||||
|
const proxyUrl = "https://your.proxy.workers.dev/";
|
||||||
|
|
||||||
|
const providers = makeProviders({
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
proxiedFetcher: makeSimpleProxyFetcher(proxyUrl, fetch),
|
||||||
|
target: target.BROWSER,
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## React native
|
||||||
|
To use the library in a react native app, you would also need a couple of polyfills to polyfill crypto and base64.
|
||||||
|
|
||||||
|
1. First install the polyfills:
|
||||||
|
```bash
|
||||||
|
npm install @react-native-anywhere/polyfill-base64 react-native-quick-crypto
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add the polyfills to your app:
|
||||||
|
```ts
|
||||||
|
// Import in your entry file
|
||||||
|
import '@react-native-anywhere/polyfill-base64';
|
||||||
|
```
|
||||||
|
|
||||||
|
And follow the [react-native-quick-crypto documentation](https://github.com/margelo/react-native-quick-crypto) to set up the crypto polyfill.
|
||||||
|
|
||||||
|
3. Then you can use the library like this:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { makeProviders, makeStandardFetcher, targets } from '@movie-web/providers';
|
||||||
|
|
||||||
|
const providers = makeProviders({
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
target: target.NATIVE,
|
||||||
|
consistentIpForRequests: true,
|
||||||
|
})
|
||||||
|
```
|
14
.docs/content/2.essentials/1.targets.md
Normal file
14
.docs/content/2.essentials/1.targets.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Targets
|
||||||
|
|
||||||
|
When creating provider controls, you will immediately be required to choose a target.
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
A target is the device on which the stream will be played.
|
||||||
|
**Where the scraping is run has nothing to do with the target**, only where the stream is finally played in the end is significant in choosing a target.
|
||||||
|
::
|
||||||
|
|
||||||
|
#### Possible targets
|
||||||
|
- **`targets.BROWSER`** Stream will be played in a browser with CORS
|
||||||
|
- **`targets.BROWSER_EXTENSION`** Stream will be played in a browser using the movie-web extension (WIP)
|
||||||
|
- **`targets.NATIVE`** Stream will be played on a native video player
|
||||||
|
- **`targets.ANY`** No restrictions for selecting streams, will just give all of them
|
74
.docs/content/2.essentials/2.fetchers.md
Normal file
74
.docs/content/2.essentials/2.fetchers.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Fetchers
|
||||||
|
|
||||||
|
When creating provider controls, a fetcher will need to be configured.
|
||||||
|
Depending on your environment, this can come with some considerations:
|
||||||
|
|
||||||
|
## Using `fetch()`
|
||||||
|
In most cases, you can use the `fetch()` API. This will work in newer versions of Node.js (18 and above) and on the browser.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const fetcher = makeStandardFetcher(fetch);
|
||||||
|
```
|
||||||
|
|
||||||
|
If you using older version of Node.js. You can use the npm package `node-fetch` to polyfill fetch:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
|
||||||
|
const fetcher = makeStandardFetcher(fetch);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using fetchers on the browser
|
||||||
|
When using this library on a browser, you will need a proxy. Browsers restrict when a web request can be made. To bypass those restrictions, you will need a CORS proxy.
|
||||||
|
|
||||||
|
The movie-web team has a proxy pre-made and pre-configured for you to use. For more information, check out [movie-web/simple-proxy](https://github.com/movie-web/simple-proxy). After installing, you can use this proxy like so:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const fetcher = makeSimpleProxyFetcher("https://your.proxy.workers.dev/", fetch);
|
||||||
|
```
|
||||||
|
|
||||||
|
If you aren't able to use this specific proxy and need to use a different one, you can make your own fetcher in the next section.
|
||||||
|
|
||||||
|
## Making a derived fetcher
|
||||||
|
|
||||||
|
In some rare cases, a custom fetcher is necessary. This can be quite difficult to make from scratch so it's recommended to base it off of an existing fetcher and building your own functionality around it.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function makeCustomFetcher(): Fetcher {
|
||||||
|
const fetcher = makeStandardFetcher(f);
|
||||||
|
const customFetcher: Fetcher = (url, ops) => {
|
||||||
|
// Do something with the options and URL here
|
||||||
|
return fetcher(url, ops);
|
||||||
|
};
|
||||||
|
|
||||||
|
return customFetcher;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to make your own fetcher for a proxy, ensure you make it compatible with the following headers: `Set-Cookie`, `Cookie`, `Referer`, `Origin`. Proxied fetchers need to be able to write/read those headers when making a request.
|
||||||
|
|
||||||
|
|
||||||
|
## Making a fetcher from scratch
|
||||||
|
|
||||||
|
In some rare cases, you need to make a fetcher from scratch.
|
||||||
|
This is the list of features it needs:
|
||||||
|
- Send/read every header
|
||||||
|
- Parse JSON, otherwise parse as text
|
||||||
|
- Send JSON, Formdata or normal strings
|
||||||
|
- get final destination URL
|
||||||
|
|
||||||
|
It's not recommended to do this at all. If you have to, you can base your code on the original implementation of `makeStandardFetcher`. Check out the [source code for it here](https://github.com/movie-web/providers/blob/dev/src/fetchers/standardFetch.ts).
|
||||||
|
|
||||||
|
Here is a basic template on how to make your own custom fetcher:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const myFetcher: Fetcher = (url, ops) => {
|
||||||
|
// Do some fetching
|
||||||
|
return {
|
||||||
|
body: {},
|
||||||
|
finalUrl: '',
|
||||||
|
headers: new Headers(), // should only contain headers from ops.readHeaders
|
||||||
|
statusCode: 200,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
74
.docs/content/2.essentials/3.customize-providers.md
Normal file
74
.docs/content/2.essentials/3.customize-providers.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Customize providers
|
||||||
|
|
||||||
|
You make the provider controls in two ways. Either with `makeProviders()` (the simpler option) or with `buildProviders()` (more elaborate and extensive option).
|
||||||
|
|
||||||
|
## `makeProviders()` (simple)
|
||||||
|
|
||||||
|
To know what to set the configuration to, you can read [How to use on X](./0.usage-on-x.md) for a detailed guide on how to configure your controls.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const providers = makeProviders({
|
||||||
|
// fetcher, every web request gets called through here
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
|
||||||
|
// proxied fetcher, if the scraper needs to access a CORS proxy. this fetcher will be called instead
|
||||||
|
// of the normal fetcher. Defaults to the normal fetcher.
|
||||||
|
proxiedFetcher: undefined;
|
||||||
|
|
||||||
|
// target of where the streams will be used
|
||||||
|
target: targets.NATIVE;
|
||||||
|
|
||||||
|
// Set this to true, if the requests will have the same IP as
|
||||||
|
// the device that the stream will be played on.
|
||||||
|
consistentIpForRequests: false;
|
||||||
|
})
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## `buildProviders()` (advanced)
|
||||||
|
|
||||||
|
To know what to set the configuration to, you can read [How to use on X](./0.usage-on-x.md) for a detailed guide on how to configure your controls.
|
||||||
|
|
||||||
|
### Standard setup
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const providers = buildProviders()
|
||||||
|
.setTarget(targets.NATIVE) // target of where the streams will be used
|
||||||
|
.setFetcher(makeStandardFetcher(fetch)) // fetcher, every web request gets called through here
|
||||||
|
.addBuiltinProviders() // add all builtin providers, if this is not called, no providers will be added to the controls
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Adding only select few providers
|
||||||
|
|
||||||
|
Not all providers are great quality, so you can make an instance of the controls with only the providers you want.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const providers = buildProviders()
|
||||||
|
.setTarget(targets.NATIVE) // target of where the streams will be used
|
||||||
|
.setFetcher(makeStandardFetcher(fetch)) // fetcher, every web request gets called through here
|
||||||
|
.addSource('showbox') // only add showbox source
|
||||||
|
.addEmbed('febbox-hls') // add febbox-hls embed, which is returned by showbox
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Adding your own scrapers to the providers
|
||||||
|
|
||||||
|
If you have your own scraper and still want to use the nice utilities of the provider library or just want to add on to the built-in providers, you can add your own custom source.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const providers = buildProviders()
|
||||||
|
.setTarget(targets.NATIVE) // target of where the streams will be used
|
||||||
|
.setFetcher(makeStandardFetcher(fetch)) // fetcher, every web request gets called through here
|
||||||
|
.addSource({ // add your own source
|
||||||
|
id: 'my-scraper',
|
||||||
|
name: 'My scraper',
|
||||||
|
rank: 800,
|
||||||
|
flags: [],
|
||||||
|
scrapeMovie(ctx) {
|
||||||
|
throw new Error('Not implemented');
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.build();
|
||||||
|
```
|
84
.docs/content/2.essentials/4.using-streams.md
Normal file
84
.docs/content/2.essentials/4.using-streams.md
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
# Using streams
|
||||||
|
|
||||||
|
Streams can sometimes be quite picky on how they can be used. So here is a guide on how to use them.
|
||||||
|
|
||||||
|
## Essentials
|
||||||
|
|
||||||
|
All streams have the same common parameters:
|
||||||
|
- `Stream.type`: The type of stream. Either `hls` or `file`
|
||||||
|
- `Stream.id`: The id of this stream, unique per scraper output.
|
||||||
|
- `Stream.flags`: A list of flags that apply to this stream. Most people won't need to use it.
|
||||||
|
- `Stream.captions`: A list of captions/subtitles for this stream.
|
||||||
|
- `Stream.headers`: Either undefined or a key value object of headers you must set to use the stream.
|
||||||
|
- `Stream.preferredHeaders`: Either undefined or a key value object of headers you may want to set if you want optimal playback - but not required.
|
||||||
|
|
||||||
|
Now let's delve deeper into how to watch these streams!
|
||||||
|
|
||||||
|
## Streams with type `hls`
|
||||||
|
|
||||||
|
HLS streams can be tough to watch. They're not normal files you can just use.
|
||||||
|
These streams have an extra property `Stream.playlist` which contains the m3u8 playlist.
|
||||||
|
|
||||||
|
Here is a code sample of how to use HLS streams in web context using hls.js
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/hls.js@1"></script>
|
||||||
|
|
||||||
|
<video id="video"></video>
|
||||||
|
<script>
|
||||||
|
const stream = null; // add your stream here
|
||||||
|
|
||||||
|
if (Hls.isSupported()) {
|
||||||
|
var video = document.getElementById('video');
|
||||||
|
var hls = new Hls();
|
||||||
|
hls.loadSource(stream.playlist);
|
||||||
|
hls.attachMedia(video);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Streams with type `file`
|
||||||
|
|
||||||
|
File streams are quite easy to use, they just return a new property: `Stream.qualities`.
|
||||||
|
This property is a map of quality and a stream file. So if you want to get 1080p quality you do `stream["1080"]` to get your stream file. It will return undefined if that quality is absent.
|
||||||
|
|
||||||
|
The possibly qualities are: `unknown`, `360`, `480`, `720`, `1080`, `4k`.
|
||||||
|
File based streams are always guaranteed to have one quality.
|
||||||
|
|
||||||
|
Once you get a streamfile, you have the following parameters:
|
||||||
|
- `StreamFile.type`: Right now it can only be `mp4`.
|
||||||
|
- `StreamFile.url`: The URL linking to the video file.
|
||||||
|
|
||||||
|
Here is a code sample of how to watch a file based stream in a browser:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<video id="video"></video>
|
||||||
|
<script>
|
||||||
|
const stream = null; // add your stream here
|
||||||
|
const video = document.getElementById('video');
|
||||||
|
|
||||||
|
const qualityEntries = Object.keys(stream.qualities);
|
||||||
|
const firstQuality = qualityEntries[0];
|
||||||
|
video.src = firstQuality.url;
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Streams with headers
|
||||||
|
|
||||||
|
Streams have both a `Stream.headers` and a `Stream.preferredHeaders`.
|
||||||
|
The difference between the two is that `Stream.headers` **must** be set in order for the stream to work. While the other is optional, and enhances the quality or performance.
|
||||||
|
|
||||||
|
If your target is set to `BROWSER`, headers will never be required, as it's not possible to do.
|
||||||
|
|
||||||
|
## Using captions/subtitles
|
||||||
|
|
||||||
|
All streams have a list of captions at `Stream.captions`. The structure looks like this:
|
||||||
|
```ts
|
||||||
|
type Caption = {
|
||||||
|
type: CaptionType; // Language type, either "srt" or "vtt"
|
||||||
|
id: string; // Unique per stream
|
||||||
|
url: string; // The URL pointing to the subtitle file
|
||||||
|
hasCorsRestrictions: boolean; // If true, you will need to proxy it if you're running in a browser
|
||||||
|
language: string; // Language code of the caption
|
||||||
|
};
|
||||||
|
```
|
3
.docs/content/2.essentials/_dir.yml
Normal file
3
.docs/content/2.essentials/_dir.yml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
icon: ph:info-fill
|
||||||
|
navigation.redirect: /essentials/usage
|
||||||
|
navigation.title: "Get started"
|
@@ -1,31 +0,0 @@
|
|||||||
# Testing Providers
|
|
||||||
|
|
||||||
In order to test providers effectively, a built-in tool can be used which allows testing of individual source and embed providers.
|
|
||||||
|
|
||||||
There are two ways to use the tool:
|
|
||||||
- Command Line Mode - For passing in arguments directly to the script. This is useful in non-interactive environments such as CI or to repeatedly test during development.
|
|
||||||
- Question Mode - Where the script asks you questions about which source you wish to test.
|
|
||||||
|
|
||||||
::code-group
|
|
||||||
```bash [Interactive]
|
|
||||||
npm run test:dev
|
|
||||||
```
|
|
||||||
```bash [Command Line]
|
|
||||||
npm run test:dev -- [options]
|
|
||||||
|
|
||||||
# Example testing FlixHQ with "Spirited Away"
|
|
||||||
npm run test:dev -- -sid flixhq -tid 129 -t movie
|
|
||||||
```
|
|
||||||
::
|
|
||||||
|
|
||||||
The following Command Line Mode arguments are available:
|
|
||||||
| Argument | Alias | Description | Default |
|
|
||||||
|---------------|--------|-------------------------------------------------------------------------|--------------|
|
|
||||||
| `--fetcher` | `-f` | Fetcher type. Either `node-fetch` or `native` | `node-fetch` |
|
|
||||||
| `--source-id` | `-sid` | Source ID for the source to be tested | |
|
|
||||||
| `--tmdb-id` | `-tid` | TMDB ID for the media to scrape. Only used if source is a provider | |
|
|
||||||
| `--type` | `-t` | Media type. Either `movie` or `show`. Only used if source is a provider | `movie` |
|
|
||||||
| `--season` | `-s` | Season number. Only used if type is `show` | `0` |
|
|
||||||
| `--episode` | `-e` | Episode number. Only used if type is `show` | `0` |
|
|
||||||
| `--url` | `-u` | URL to a video embed. Only used if source is an embed | |
|
|
||||||
| `--help` | `-h` | Shows help for the command arguments | |
|
|
@@ -1,2 +0,0 @@
|
|||||||
icon: ph:code
|
|
||||||
navigation.redirect: /developer/dev-cli
|
|
11
.docs/content/3.in-depth/0.sources-and-embeds.md
Normal file
11
.docs/content/3.in-depth/0.sources-and-embeds.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Sources vs embeds
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
This page isn't quite done yet, stay tuned!
|
||||||
|
::
|
||||||
|
|
||||||
|
<!--
|
||||||
|
TODO
|
||||||
|
- How do sources and embeds differ
|
||||||
|
- How do sources and embeds interact
|
||||||
|
-->
|
12
.docs/content/3.in-depth/1.new-providers.md
Normal file
12
.docs/content/3.in-depth/1.new-providers.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# New providers
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
This page isn't quite done yet, stay tuned!
|
||||||
|
::
|
||||||
|
|
||||||
|
<!--
|
||||||
|
TODO
|
||||||
|
- How to make new sources or embeds
|
||||||
|
- Ranking
|
||||||
|
- Link to flags
|
||||||
|
-->
|
10
.docs/content/3.in-depth/2.flags.md
Normal file
10
.docs/content/3.in-depth/2.flags.md
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Flags
|
||||||
|
|
||||||
|
Flags is the primary way the library separates entities between different environments.
|
||||||
|
For example, some sources only give back content that has the CORS headers set to allow anyone, so that source gets the flag `CORS_ALLOWED`. Now if you set your target to `BROWSER`, sources without that flag won't even get listed.
|
||||||
|
|
||||||
|
This concept is applied in multiple away across the library.
|
||||||
|
|
||||||
|
## Flag options
|
||||||
|
- `CORS_ALLOWED`: Headers from the output streams are set to allow any origin.
|
||||||
|
- `IP_LOCKED`: The streams are locked by IP: requester and watcher must be the same.
|
3
.docs/content/3.in-depth/_dir.yml
Normal file
3
.docs/content/3.in-depth/_dir.yml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
icon: ph:atom-fill
|
||||||
|
navigation.redirect: /in-depth/sources-and-embeds
|
||||||
|
navigation.title: "In-depth"
|
72
.docs/content/4.extra-topics/0.development.md
Normal file
72
.docs/content/4.extra-topics/0.development.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Development / contributing
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
This page isn't quite done yet, stay tuned!
|
||||||
|
::
|
||||||
|
|
||||||
|
<!--
|
||||||
|
TODO
|
||||||
|
- Development setup
|
||||||
|
- How to make new sources/embeds (link to the page)
|
||||||
|
- How to use the fetchers, when to use proxiedFetcher
|
||||||
|
- How to use the context
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Testing using the CLI
|
||||||
|
|
||||||
|
Testing can be quite difficult for this library, unit tests can't really be made because of the unreliable nature of scrapers.
|
||||||
|
But manually testing by writing an entry-point is also really annoying.
|
||||||
|
|
||||||
|
Our solution is to make a CLI that you can use to run the scrapers. For everything else there are unit tests.
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
Make a `.env` file in the root of the repository and add a TMDB API key: `MOVIE_WEB_TMDB_API_KEY=KEY_HERE`.
|
||||||
|
Then make sure you've run `npm i` to get all the dependencies.
|
||||||
|
|
||||||
|
### Mode 1 - interactive
|
||||||
|
|
||||||
|
To run the CLI without needing to learn all the arguments, simply run the following command and go with the flow.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm run cli
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mode 2 - arguments
|
||||||
|
|
||||||
|
For repeatability, it can be useful to specify the arguments one by one.
|
||||||
|
To see all the arguments, you can run the help command:
|
||||||
|
```sh
|
||||||
|
npm run cli -- -h
|
||||||
|
```
|
||||||
|
|
||||||
|
Then just run it with your arguments, for example:
|
||||||
|
```sh
|
||||||
|
npm run cli -- -sid showbox -tid 556574
|
||||||
|
```
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Spirited away - showbox
|
||||||
|
npm run cli -- -sid showbox -tid 129
|
||||||
|
|
||||||
|
# Hamilton - flixhq
|
||||||
|
npm run cli -- -sid flixhq -tid 556574
|
||||||
|
|
||||||
|
# Arcane S1E1 - showbox
|
||||||
|
npm run cli -- -sid zoechip -tid 94605 -s 1 -e 1
|
||||||
|
|
||||||
|
# febbox mp4 - get streams from an embed (gotten from a source output)
|
||||||
|
npm run cli -- -sid febbox-mp4 -u URL_HERE
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fetcher options
|
||||||
|
|
||||||
|
The CLI comes with a few built-in fetchers:
|
||||||
|
- `node-fetch`: Fetch using the "node-fetch" library.
|
||||||
|
- `native`: Use the new fetch built into Node.JS (undici).
|
||||||
|
- `browser`: Start up headless chrome, and run the library in that context using a proxied fetcher.
|
||||||
|
|
||||||
|
::alert{type="warning"}
|
||||||
|
The browser fetcher will require you to run `npm run build` before running the CLI. Otherwise you will get outdated results.
|
||||||
|
::
|
3
.docs/content/4.extra-topics/_dir.yml
Normal file
3
.docs/content/4.extra-topics/_dir.yml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
icon: ph:aperture-fill
|
||||||
|
navigation.redirect: /extra-topics/development
|
||||||
|
navigation.title: "Extra topics"
|
@@ -1,12 +1,12 @@
|
|||||||
# `makeProviders`
|
# `makeProviders`
|
||||||
|
|
||||||
Make an instance of providers with configuration.
|
Make an instance of provider controls with configuration.
|
||||||
This is the main entrypoint of the library. It is recommended to make one instance globally and reuse it throughout your application.
|
This is the main entry-point of the library. It is recommended to make one instance globally and reuse it throughout your application.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { targets, makeProviders, makeDefaultFetcher } from "@movie-web/providers";
|
import { targets, makeProviders, makeDefaultFetcher } from '@movie-web/providers';
|
||||||
|
|
||||||
const providers = makeProviders({
|
const providers = makeProviders({
|
||||||
fetcher: makeDefaultFetcher(fetch),
|
fetcher: makeDefaultFetcher(fetch),
|
||||||
@@ -23,9 +23,9 @@ interface ProviderBuilderOptions {
|
|||||||
// instance of a fetcher, all webrequests are made with the fetcher.
|
// instance of a fetcher, all webrequests are made with the fetcher.
|
||||||
fetcher: Fetcher;
|
fetcher: Fetcher;
|
||||||
|
|
||||||
// instance of a fetcher, in case the request has cors restrictions.
|
// instance of a fetcher, in case the request has CORS restrictions.
|
||||||
// this fetcher will be called instead of normal fetcher.
|
// this fetcher will be called instead of normal fetcher.
|
||||||
// if your environment doesnt have cors restrictions (like nodejs), there is no need to set this.
|
// if your environment doesn't have CORS restrictions (like Node.JS), there is no need to set this.
|
||||||
proxiedFetcher?: Fetcher;
|
proxiedFetcher?: Fetcher;
|
||||||
|
|
||||||
// target to get streams for
|
// target to get streams for
|
@@ -1,7 +1,7 @@
|
|||||||
# `ProviderControls.runAll`
|
# `ProviderControls.runAll`
|
||||||
|
|
||||||
Run all providers one by one in order of their built-in ranking.
|
Run all providers one by one in order of their built-in ranking.
|
||||||
You can attach events if you need to know what is going on while its processing.
|
You can attach events if you need to know what is going on while it is processing.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
@@ -9,9 +9,9 @@ You can attach events if you need to know what is going on while its processing.
|
|||||||
// media from TMDB
|
// media from TMDB
|
||||||
const media = {
|
const media = {
|
||||||
type: 'movie',
|
type: 'movie',
|
||||||
title: "Hamilton",
|
title: 'Hamilton',
|
||||||
releaseYear: 2020,
|
releaseYear: 2020,
|
||||||
tmdbId: "556574"
|
tmdbId: '556574'
|
||||||
}
|
}
|
||||||
|
|
||||||
// scrape a stream
|
// scrape a stream
|
||||||
@@ -20,7 +20,7 @@ const stream = await providers.runAll({
|
|||||||
})
|
})
|
||||||
|
|
||||||
// scrape a stream, but prioritize flixhq above all
|
// scrape a stream, but prioritize flixhq above all
|
||||||
// (other scrapers are stil ran if flixhq fails, it just has priority)
|
// (other scrapers are still run if flixhq fails, it just has priority)
|
||||||
const flixhqStream = await providers.runAll({
|
const flixhqStream = await providers.runAll({
|
||||||
media: media,
|
media: media,
|
||||||
sourceOrder: ['flixhq']
|
sourceOrder: ['flixhq']
|
||||||
@@ -33,12 +33,12 @@ const flixhqStream = await providers.runAll({
|
|||||||
function runAll(runnerOps: RunnerOptions): Promise<RunOutput | null>;
|
function runAll(runnerOps: RunnerOptions): Promise<RunOutput | null>;
|
||||||
|
|
||||||
interface RunnerOptions {
|
interface RunnerOptions {
|
||||||
// overwrite the order of sources to run. list of ids
|
// overwrite the order of sources to run. List of IDs
|
||||||
// any omitted ids are in added to the end in order of rank (highest first)
|
// any omitted IDs are added to the end in order of rank (highest first)
|
||||||
sourceOrder?: string[];
|
sourceOrder?: string[];
|
||||||
|
|
||||||
// overwrite the order of embeds to run. list of ids
|
// overwrite the order of embeds to run. List of IDs
|
||||||
// any omitted ids are in added to the end in order of rank (highest first)
|
// any omitted IDs are added to the end in order of rank (highest first)
|
||||||
embedOrder?: string[];
|
embedOrder?: string[];
|
||||||
|
|
||||||
// object of event functions
|
// object of event functions
|
||||||
@@ -49,13 +49,13 @@ interface RunnerOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type RunOutput = {
|
type RunOutput = {
|
||||||
// source scraper id
|
// source scraper ID
|
||||||
sourceId: string;
|
sourceId: string;
|
||||||
|
|
||||||
// if from an embed, this is the embed scraper id
|
// if from an embed, this is the embed scraper ID
|
||||||
embedId?: string;
|
embedId?: string;
|
||||||
|
|
||||||
// the outputed stream
|
// the emitted stream
|
||||||
stream: Stream;
|
stream: Stream;
|
||||||
};
|
};
|
||||||
```
|
```
|
@@ -1,18 +1,18 @@
|
|||||||
# `ProviderControls.runSourceScraper`
|
# `ProviderControls.runSourceScraper`
|
||||||
|
|
||||||
Run a specific source scraper and get its outputted streams.
|
Run a specific source scraper and get its emitted streams.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { SourcererOutput, NotFoundError } from "@movie-web/providers";
|
import { SourcererOutput, NotFoundError } from '@movie-web/providers';
|
||||||
|
|
||||||
// media from TMDB
|
// media from TMDB
|
||||||
const media = {
|
const media = {
|
||||||
type: 'movie',
|
type: 'movie',
|
||||||
title: "Hamilton",
|
title: 'Hamilton',
|
||||||
releaseYear: 2020,
|
releaseYear: 2020,
|
||||||
tmdbId: "556574"
|
tmdbId: '556574'
|
||||||
}
|
}
|
||||||
|
|
||||||
// scrape a stream from flixhq
|
// scrape a stream from flixhq
|
||||||
@@ -24,15 +24,15 @@ try {
|
|||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err instanceof NotFoundError) {
|
if (err instanceof NotFoundError) {
|
||||||
console.log("source doesnt have this media");
|
console.log('source does not have this media');
|
||||||
} else {
|
} else {
|
||||||
console.log("failed to scrape")
|
console.log('failed to scrape')
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!output.stream && output.embeds.length === 0) {
|
if (!output.stream && output.embeds.length === 0) {
|
||||||
console.log("no streams found");
|
console.log('no streams found');
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -48,13 +48,13 @@ interface SourceRunnerOptions {
|
|||||||
// the media you want to see sources from
|
// the media you want to see sources from
|
||||||
media: ScrapeMedia;
|
media: ScrapeMedia;
|
||||||
|
|
||||||
// id of the source scraper you want to scrape from
|
// ID of the source scraper you want to scrape from
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type SourcererOutput = {
|
type SourcererOutput = {
|
||||||
// list of embeds that the source scraper found.
|
// list of embeds that the source scraper found.
|
||||||
// embed id is a reference to an embed scraper
|
// embed ID is a reference to an embed scraper
|
||||||
embeds: {
|
embeds: {
|
||||||
embedId: string;
|
embedId: string;
|
||||||
url: string;
|
url: string;
|
@@ -1,21 +1,21 @@
|
|||||||
# `ProviderControls.runEmbedScraper`
|
# `ProviderControls.runEmbedScraper`
|
||||||
|
|
||||||
Run a specific embed scraper and get its outputted streams.
|
Run a specific embed scraper and get its emitted streams.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { SourcererOutput } from "@movie-web/providers";
|
import { SourcererOutput } from '@movie-web/providers';
|
||||||
|
|
||||||
// scrape a stream from upcloud
|
// scrape a stream from upcloud
|
||||||
let output: EmbedOutput;
|
let output: EmbedOutput;
|
||||||
try {
|
try {
|
||||||
output = await providers.runSourceScraper({
|
output = await providers.runEmbedScraper({
|
||||||
id: 'upcloud',
|
id: 'upcloud',
|
||||||
url: 'https://example.com/123',
|
url: 'https://example.com/123',
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log("failed to scrape")
|
console.log('failed to scrape')
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,10 +31,10 @@ interface EmbedRunnerOptions {
|
|||||||
// object of event functions
|
// object of event functions
|
||||||
events?: IndividualScraperEvents;
|
events?: IndividualScraperEvents;
|
||||||
|
|
||||||
// the embed url
|
// the embed URL
|
||||||
url: string;
|
url: string;
|
||||||
|
|
||||||
// id of the embed scraper you want to scrape from
|
// ID of the embed scraper you want to scrape from
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
|
|
@@ -1,13 +1,13 @@
|
|||||||
# `ProviderControls.listSources`
|
# `ProviderControls.listSources`
|
||||||
|
|
||||||
List all source scrapers that applicable for the target.
|
List all source scrapers that are applicable for the target.
|
||||||
They are sorted by rank, highest first
|
They are sorted by rank; highest first
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
const sourceScrapers = providers.listSources();
|
const sourceScrapers = providers.listSources();
|
||||||
// Guaranteed to only return type: 'source'
|
// Guaranteed to only return the type: 'source'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Type
|
## Type
|
@@ -1,13 +1,13 @@
|
|||||||
# `ProviderControls.listEmbeds`
|
# `ProviderControls.listEmbeds`
|
||||||
|
|
||||||
List all embed scrapers that applicable for the target.
|
List all embed scrapers that are applicable for the target.
|
||||||
They are sorted by rank, highest first
|
They are sorted by rank; highest first
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
const embedScrapers = providers.listEmbeds();
|
const embedScrapers = providers.listEmbeds();
|
||||||
// Guaranteed to only return type: 'embed'
|
// Guaranteed to only return the type: 'embed'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Type
|
## Type
|
@@ -1,7 +1,7 @@
|
|||||||
# `ProviderControls.getMetadata`
|
# `ProviderControls.getMetadata`
|
||||||
|
|
||||||
Get meta data for a scraper, can be either source or embed scraper.
|
Get meta data for a scraper, can be either source or embed scraper.
|
||||||
Returns null if the `id` is not recognized.
|
Returns `null` if the `id` is not recognized.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
20
.docs/content/5.api-reference/7.makeStandardFetcher.md
Normal file
20
.docs/content/5.api-reference/7.makeStandardFetcher.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# `makeStandardFetcher`
|
||||||
|
|
||||||
|
Make a fetcher from a `fetch()` API. It is used for making an instance of provider controls.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { targets, makeProviders, makeDefaultFetcher } from '@movie-web/providers';
|
||||||
|
|
||||||
|
const providers = makeProviders({
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
target: targets.ANY,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type
|
||||||
|
|
||||||
|
```ts
|
||||||
|
function makeStandardFetcher(fetchApi: typeof fetch): Fetcher;
|
||||||
|
```
|
@@ -5,9 +5,9 @@ Make a fetcher to use with [movie-web/simple-proxy](https://github.com/movie-web
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { targets, makeProviders, makeDefaultFetcher, makeSimpleProxyFetcher } from "@movie-web/providers";
|
import { targets, makeProviders, makeDefaultFetcher, makeSimpleProxyFetcher } from '@movie-web/providers';
|
||||||
|
|
||||||
const proxyUrl = "https://your.proxy.workers.dev/"
|
const proxyUrl = 'https://your.proxy.workers.dev/'
|
||||||
|
|
||||||
const providers = makeProviders({
|
const providers = makeProviders({
|
||||||
fetcher: makeDefaultFetcher(fetch),
|
fetcher: makeDefaultFetcher(fetch),
|
3
.docs/content/5.api-reference/_dir.yml
Normal file
3
.docs/content/5.api-reference/_dir.yml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
icon: ph:code-simple-fill
|
||||||
|
navigation.redirect: /api/makeproviders
|
||||||
|
navigation.title: "Api reference"
|
19980
.docs/package-lock.json
generated
19980
.docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
5
.docs/package.json
Executable file → Normal file
5
.docs/package.json
Executable file → Normal file
@@ -7,11 +7,12 @@
|
|||||||
"build": "nuxi build",
|
"build": "nuxi build",
|
||||||
"generate": "nuxi generate",
|
"generate": "nuxi generate",
|
||||||
"preview": "nuxi preview",
|
"preview": "nuxi preview",
|
||||||
"lint": "eslint ."
|
"lint": "eslint .",
|
||||||
|
"preinstall": "npx -y only-allow pnpm"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@nuxt-themes/docus": "^1.13.1",
|
"@nuxt-themes/docus": "^1.13.1",
|
||||||
"@nuxt/devtools": "^0.6.7",
|
"@nuxt/devtools": "^1.0.1",
|
||||||
"@nuxt/eslint-config": "^0.1.1",
|
"@nuxt/eslint-config": "^0.1.1",
|
||||||
"@nuxtjs/plausible": "^0.2.1",
|
"@nuxtjs/plausible": "^0.2.1",
|
||||||
"@types/node": "^20.4.0",
|
"@types/node": "^20.4.0",
|
||||||
|
10025
.docs/pnpm-lock.yaml
generated
Normal file
10025
.docs/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
72
.eslintrc
Normal file
72
.eslintrc
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"env": {
|
||||||
|
"browser": true
|
||||||
|
},
|
||||||
|
"extends": ["airbnb-base", "plugin:@typescript-eslint/recommended", "plugin:prettier/recommended"],
|
||||||
|
"ignorePatterns": ["lib/*", "tests/*", "/*.js", "/*.ts", "/src/__test__/*", "/**/*.test.ts", "test/*"],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"project": "./tsconfig.json",
|
||||||
|
"tsconfigRootDir": "./"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"import/resolver": {
|
||||||
|
"typescript": {
|
||||||
|
"project": "./tsconfig.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"plugins": ["@typescript-eslint", "import", "prettier"],
|
||||||
|
"rules": {
|
||||||
|
"no-plusplus": "off",
|
||||||
|
"class-methods-use-this": "off",
|
||||||
|
"no-bitwise": "off",
|
||||||
|
"no-underscore-dangle": "off",
|
||||||
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
|
"no-console": ["error", { "allow": ["warn", "error"] }],
|
||||||
|
"@typescript-eslint/no-this-alias": "off",
|
||||||
|
"import/prefer-default-export": "off",
|
||||||
|
"@typescript-eslint/no-empty-function": "off",
|
||||||
|
"no-shadow": "off",
|
||||||
|
"@typescript-eslint/no-shadow": ["error"],
|
||||||
|
"no-restricted-syntax": "off",
|
||||||
|
"import/no-unresolved": ["error", { "ignore": ["^virtual:"] }],
|
||||||
|
"consistent-return": "off",
|
||||||
|
"no-continue": "off",
|
||||||
|
"no-eval": "off",
|
||||||
|
"no-await-in-loop": "off",
|
||||||
|
"no-nested-ternary": "off",
|
||||||
|
"no-param-reassign": ["error", { "props": false }],
|
||||||
|
"prefer-destructuring": "off",
|
||||||
|
"@typescript-eslint/no-unused-vars": ["warn", { "argsIgnorePattern": "^_" }],
|
||||||
|
"import/extensions": [
|
||||||
|
"error",
|
||||||
|
"ignorePackages",
|
||||||
|
{
|
||||||
|
"ts": "never",
|
||||||
|
"tsx": "never"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"import/order": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"groups": ["builtin", "external", "internal", ["sibling", "parent"], "index", "unknown"],
|
||||||
|
"newlines-between": "always",
|
||||||
|
"alphabetize": {
|
||||||
|
"order": "asc",
|
||||||
|
"caseInsensitive": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort-imports": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"ignoreCase": false,
|
||||||
|
"ignoreDeclarationSort": true,
|
||||||
|
"ignoreMemberSort": false,
|
||||||
|
"memberSyntaxSortOrder": ["none", "all", "multiple", "single"],
|
||||||
|
"allowSeparatedGroups": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
69
.eslintrc.js
69
.eslintrc.js
@@ -1,69 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
env: {
|
|
||||||
browser: true,
|
|
||||||
},
|
|
||||||
extends: ['airbnb-base', 'plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
|
|
||||||
ignorePatterns: ['lib/*', 'tests/*', '/*.js', '/*.ts', '/**/*.test.ts', 'test/*'],
|
|
||||||
parser: '@typescript-eslint/parser',
|
|
||||||
parserOptions: {
|
|
||||||
project: './tsconfig.json',
|
|
||||||
tsconfigRootDir: './',
|
|
||||||
},
|
|
||||||
settings: {
|
|
||||||
'import/resolver': {
|
|
||||||
typescript: {
|
|
||||||
project: './tsconfig.json',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
plugins: ['@typescript-eslint', 'import', 'prettier'],
|
|
||||||
rules: {
|
|
||||||
'no-underscore-dangle': 'off',
|
|
||||||
'@typescript-eslint/no-explicit-any': 'off',
|
|
||||||
'no-console': 'off',
|
|
||||||
'@typescript-eslint/no-this-alias': 'off',
|
|
||||||
'import/prefer-default-export': 'off',
|
|
||||||
'@typescript-eslint/no-empty-function': 'off',
|
|
||||||
'no-shadow': 'off',
|
|
||||||
'@typescript-eslint/no-shadow': ['error'],
|
|
||||||
'no-restricted-syntax': 'off',
|
|
||||||
'import/no-unresolved': ['error', { ignore: ['^virtual:'] }],
|
|
||||||
'consistent-return': 'off',
|
|
||||||
'no-continue': 'off',
|
|
||||||
'no-eval': 'off',
|
|
||||||
'no-await-in-loop': 'off',
|
|
||||||
'no-nested-ternary': 'off',
|
|
||||||
'no-param-reassign': ['error', { props: false }],
|
|
||||||
'prefer-destructuring': 'off',
|
|
||||||
'@typescript-eslint/no-unused-vars': ['warn', { argsIgnorePattern: '^_' }],
|
|
||||||
'import/extensions': [
|
|
||||||
'error',
|
|
||||||
'ignorePackages',
|
|
||||||
{
|
|
||||||
ts: 'never',
|
|
||||||
tsx: 'never',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'import/order': [
|
|
||||||
'error',
|
|
||||||
{
|
|
||||||
groups: ['builtin', 'external', 'internal', ['sibling', 'parent'], 'index', 'unknown'],
|
|
||||||
'newlines-between': 'always',
|
|
||||||
alphabetize: {
|
|
||||||
order: 'asc',
|
|
||||||
caseInsensitive: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'sort-imports': [
|
|
||||||
'error',
|
|
||||||
{
|
|
||||||
ignoreCase: false,
|
|
||||||
ignoreDeclarationSort: true,
|
|
||||||
ignoreMemberSort: false,
|
|
||||||
memberSyntaxSortOrder: ['none', 'all', 'multiple', 'single'],
|
|
||||||
allowSeparatedGroups: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -1,3 +1 @@
|
|||||||
* @movie-web/core
|
* @movie-web/project-leads
|
||||||
|
|
||||||
.github @binaryoverload
|
|
||||||
|
2
.github/SECURITY.md
vendored
2
.github/SECURITY.md
vendored
@@ -11,4 +11,4 @@ Support is not provided for any forks or mirrors of movie-web.
|
|||||||
|
|
||||||
There are two ways you can contact the movie-web maintainers to report a vulnerability:
|
There are two ways you can contact the movie-web maintainers to report a vulnerability:
|
||||||
- Email [security@movie-web.app](mailto:security@movie-web.app)
|
- Email [security@movie-web.app](mailto:security@movie-web.app)
|
||||||
- Report the vulnerability in the [movie-web Discord server](https://discord.movie-web.app)
|
- Report the vulnerability in the [movie-web Discord server](https://movie-web.github.io/links/discord)
|
||||||
|
43
.github/workflows/docs.yml
vendored
43
.github/workflows/docs.yml
vendored
@@ -11,26 +11,33 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Node.js
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Install packages
|
- uses: pnpm/action-setup@v3
|
||||||
working-directory: ./.docs
|
with:
|
||||||
run: npm install
|
version: 8
|
||||||
|
|
||||||
- name: Build project
|
|
||||||
working-directory: ./.docs
|
|
||||||
run: npm run generate
|
|
||||||
|
|
||||||
- name: Upload production-ready build files
|
- name: Install Node.js
|
||||||
uses: actions/upload-pages-artifact@v1
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
path: ./.docs/.output/public
|
node-version: 20
|
||||||
|
cache: "pnpm"
|
||||||
|
|
||||||
|
- name: Install packages
|
||||||
|
working-directory: ./.docs
|
||||||
|
run: pnpm install
|
||||||
|
|
||||||
|
- name: Build project
|
||||||
|
working-directory: ./.docs
|
||||||
|
run: pnpm run generate
|
||||||
|
env:
|
||||||
|
NUXT_APP_BASE_URL: /providers/
|
||||||
|
|
||||||
|
- name: Upload production-ready build files
|
||||||
|
uses: actions/upload-pages-artifact@v1
|
||||||
|
with:
|
||||||
|
path: ./.docs/.output/public
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
name: Deploy
|
name: Deploy
|
||||||
|
21
.github/workflows/publish.yml
vendored
21
.github/workflows/publish.yml
vendored
@@ -13,17 +13,22 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Node.js
|
- uses: pnpm/action-setup@v3
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
version: 8
|
||||||
registry-url: 'https://registry.npmjs.org'
|
|
||||||
|
- name: Install Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: "pnpm"
|
||||||
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
|
||||||
- name: Install packages
|
- name: Install packages
|
||||||
run: npm ci
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
- name: Publish
|
- name: Publish
|
||||||
run: npm publish --access public
|
run: pnpm publish --access public
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
36
.github/workflows/tests.yml
vendored
36
.github/workflows/tests.yml
vendored
@@ -13,22 +13,30 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Node.js
|
- uses: pnpm/action-setup@v3
|
||||||
uses: actions/setup-node@v3
|
with:
|
||||||
with:
|
version: 8
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Install packages
|
- name: Install Node.js
|
||||||
run: npm install
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: "pnpm"
|
||||||
|
|
||||||
- name: Run tests
|
- name: Install packages
|
||||||
run: npm run test
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
- name: Run integration tests
|
- name: Install puppeteer
|
||||||
run: npm run build && npm run test:integration
|
run: node ./node_modules/puppeteer/install.mjs
|
||||||
|
|
||||||
- name: Run linting
|
- name: Run tests
|
||||||
run: npm run lint
|
run: pnpm run test
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
run: pnpm run build && pnpm run test:integration
|
||||||
|
|
||||||
|
- name: Run linting
|
||||||
|
run: pnpm run lint
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,3 +2,5 @@ node_modules/
|
|||||||
/lib
|
/lib
|
||||||
coverage
|
coverage
|
||||||
.env
|
.env
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2023 movie-web
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
27
README.md
27
README.md
@@ -7,29 +7,8 @@ features:
|
|||||||
- scrape popular streaming websites
|
- scrape popular streaming websites
|
||||||
- works in both browser and server-side
|
- works in both browser and server-side
|
||||||
|
|
||||||
Visit documentation here: https://providers.docs.movie-web.app/
|
Visit documentation here: https://movie-web.github.io/providers/
|
||||||
|
|
||||||
## Development
|
## How to run locally or test my changes
|
||||||
To make testing scrapers easier during development a CLI tool is available to run specific sources. To run the CLI testing tool, use `npm run test:dev`. The script supports 2 execution modes
|
|
||||||
|
|
||||||
- CLI Mode, for passing in arguments directly to the script
|
These topics are also covered in the documentation, [read about it here](https://movie-web.github.io/providers/extra-topics/development).
|
||||||
- Question Mode, where the script asks you questions about which source you wish to test
|
|
||||||
|
|
||||||
The following CLI Mode arguments are available
|
|
||||||
|
|
||||||
| Argument | Alias | Description | Default |
|
|
||||||
|---------------|--------|-------------------------------------------------------------------------|--------------|
|
|
||||||
| `--fetcher` | `-f` | Fetcher type. Either `node-fetch` or `native` | `node-fetch` |
|
|
||||||
| `--source-id` | `-sid` | Source ID for the source to be tested | |
|
|
||||||
| `--tmdb-id` | `-tid` | TMDB ID for the media to scrape. Only used if source is a provider | |
|
|
||||||
| `--type` | `-t` | Media type. Either `movie` or `show`. Only used if source is a provider | `movie` |
|
|
||||||
| `--season` | `-s` | Season number. Only used if type is `show` | `0` |
|
|
||||||
| `--episode` | `-e` | Episode number. Only used if type is `show` | `0` |
|
|
||||||
| `--url` | `-u` | URL to a video embed. Only used if source is an embed | |
|
|
||||||
| `--help` | `-h` | Shows help for the command arguments | |
|
|
||||||
|
|
||||||
Example testing the FlixHQ source on the movie "Spirited Away"
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run test:dev -- -sid flixhq -tid 129 -t movie
|
|
||||||
```
|
|
||||||
|
6752
package-lock.json
generated
6752
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
84
package.json
84
package.json
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@movie-web/providers",
|
"name": "@movie-web/providers",
|
||||||
"version": "1.0.1",
|
"version": "2.2.9",
|
||||||
"description": "Package that contains all the providers of movie-web",
|
"description": "Package that contains all the providers of movie-web",
|
||||||
"main": "./lib/index.umd.js",
|
"type": "module",
|
||||||
|
"main": "./lib/index.js",
|
||||||
"types": "./lib/index.d.ts",
|
"types": "./lib/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"./lib"
|
"./lib"
|
||||||
@@ -10,12 +11,12 @@
|
|||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": {
|
"import": {
|
||||||
"types": "./lib/index.d.mts",
|
"types": "./lib/index.d.ts",
|
||||||
"default": "./lib/index.mjs"
|
"default": "./lib/index.js"
|
||||||
},
|
},
|
||||||
"require": {
|
"require": {
|
||||||
"types": "./lib/index.d.ts",
|
"types": "./lib/index.d.ts",
|
||||||
"default": "./lib/index.umd.js"
|
"default": "./lib/index.umd.cjs"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -32,55 +33,64 @@
|
|||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/movie-web/providers/issues"
|
"url": "https://github.com/movie-web/providers/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://providers.docs.movie-web.app/",
|
"homepage": "https://movie-web.github.io/providers/",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "vite build",
|
"build": "vite build && tsc --noEmit",
|
||||||
|
"cli": "vite-node ./src/dev-cli/index.ts",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:dev": "ts-node ./src/dev-cli.ts",
|
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
"test:integration": "node ./tests/cjs && node ./tests/esm",
|
"test:providers": "cross-env MW_TEST_PROVIDERS=true vitest run --reporter verbose",
|
||||||
|
"test:integration": "node ./tests/cjs && node ./tests/esm && node ./tests/browser",
|
||||||
"test:coverage": "vitest run --coverage",
|
"test:coverage": "vitest run --coverage",
|
||||||
"lint": "eslint --ext .ts,.js src/",
|
"lint": "eslint --ext .ts,.js src/",
|
||||||
"lint:fix": "eslint --fix --ext .ts,.js src/",
|
"lint:fix": "eslint --fix --ext .ts,.js src/",
|
||||||
"lint:report": "eslint --ext .ts,.js --output-file eslint_report.json --format json src/",
|
"lint:report": "eslint --ext .ts,.js --output-file eslint_report.json --format json src/",
|
||||||
"prepare": "npm run build",
|
"preinstall": "npx -y only-allow pnpm",
|
||||||
"prepublishOnly": "npm test && npm run lint"
|
"prepare": "pnpm run build",
|
||||||
|
"prepublishOnly": "pnpm test && pnpm run lint"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/crypto-js": "^4.1.1",
|
"@nabla/vite-plugin-eslint": "^2.0.2",
|
||||||
"@types/node-fetch": "^2.6.6",
|
"@types/cookie": "^0.6.0",
|
||||||
"@types/randombytes": "^2.0.1",
|
"@types/crypto-js": "^4.2.2",
|
||||||
"@types/spinnies": "^0.5.1",
|
"@types/node-fetch": "^2.6.11",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.60.0",
|
"@types/randombytes": "^2.0.3",
|
||||||
"@typescript-eslint/parser": "^5.60.0",
|
"@types/set-cookie-parser": "^2.4.7",
|
||||||
"@vitest/coverage-v8": "^0.34.3",
|
"@types/spinnies": "^0.5.3",
|
||||||
"commander": "^11.0.0",
|
"@typescript-eslint/eslint-plugin": "^7.4.0",
|
||||||
"dotenv": "^16.3.1",
|
"@typescript-eslint/parser": "^7.4.0",
|
||||||
|
"@vitest/coverage-v8": "^1.4.0",
|
||||||
|
"commander": "^12.0.0",
|
||||||
|
"cross-env": "^7.0.3",
|
||||||
|
"dotenv": "^16.4.5",
|
||||||
"enquirer": "^2.4.1",
|
"enquirer": "^2.4.1",
|
||||||
"eslint": "^8.30.0",
|
"eslint": "^8.57.0",
|
||||||
"eslint-config-airbnb-base": "^15.0.0",
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
"eslint-config-prettier": "^8.5.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-import-resolver-typescript": "^3.5.5",
|
"eslint-import-resolver-typescript": "^3.6.1",
|
||||||
"eslint-plugin-import": "^2.27.5",
|
"eslint-plugin-import": "^2.29.1",
|
||||||
"eslint-plugin-prettier": "^4.2.1",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"node-fetch": "^2.7.0",
|
"node-fetch": "^3.3.2",
|
||||||
"prettier": "^2.6.2",
|
"prettier": "^3.2.5",
|
||||||
|
"puppeteer": "^22.6.1",
|
||||||
"spinnies": "^0.5.1",
|
"spinnies": "^0.5.1",
|
||||||
"ts-node": "^10.9.1",
|
"tsc-alias": "^1.8.8",
|
||||||
"tsc-alias": "^1.6.7",
|
|
||||||
"tsconfig-paths": "^4.2.0",
|
"tsconfig-paths": "^4.2.0",
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^5.4.3",
|
||||||
"vite": "^4.0.0",
|
"vite": "^5.2.7",
|
||||||
"vite-plugin-dts": "^3.5.3",
|
"vite-node": "^1.4.0",
|
||||||
"vite-plugin-eslint": "^1.8.1",
|
"vite-plugin-dts": "^3.8.1",
|
||||||
"vitest": "^0.32.2"
|
"vitest": "^1.4.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cheerio": "^1.0.0-rc.12",
|
"cheerio": "^1.0.0-rc.12",
|
||||||
"crypto-js": "^4.1.1",
|
"cookie": "^0.6.0",
|
||||||
|
"crypto-js": "^4.2.0",
|
||||||
"form-data": "^4.0.0",
|
"form-data": "^4.0.0",
|
||||||
"nanoid": "^3.3.6",
|
"iso-639-1": "^3.1.2",
|
||||||
"node-fetch": "^2.7.0",
|
"nanoid": "^3.3.7",
|
||||||
|
"node-fetch": "^3.3.2",
|
||||||
|
"set-cookie-parser": "^2.6.0",
|
||||||
"unpacker": "^1.0.1"
|
"unpacker": "^1.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
4346
pnpm-lock.yaml
generated
Normal file
4346
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
|||||||
import { serializeBody } from "@/fetchers/body";
|
|
||||||
import FormData from "form-data";
|
|
||||||
import { describe, expect, it } from "vitest";
|
|
||||||
|
|
||||||
describe("serializeBody()", () => {
|
|
||||||
it('should work with standard text', () => {
|
|
||||||
expect(serializeBody("hello world")).toEqual({
|
|
||||||
headers: {},
|
|
||||||
body: "hello world"
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should work with objects', () => {
|
|
||||||
expect(serializeBody({ hello: "world", a: 42 })).toEqual({
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
},
|
|
||||||
body: JSON.stringify({ hello: "world", a: 42 })
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should work x-www-form-urlencoded', () => {
|
|
||||||
const obj = new URLSearchParams()
|
|
||||||
obj.set("a", "b");
|
|
||||||
expect(serializeBody(obj)).toEqual({
|
|
||||||
headers: {},
|
|
||||||
body: obj
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should work multipart/form-data', () => {
|
|
||||||
const obj = new FormData()
|
|
||||||
obj.append("a", "b");
|
|
||||||
expect(serializeBody(obj)).toEqual({
|
|
||||||
headers: {},
|
|
||||||
body: obj
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,125 +0,0 @@
|
|||||||
import { makeSimpleProxyFetcher } from "@/fetchers/simpleProxy";
|
|
||||||
import { DefaultedFetcherOptions, FetcherOptions } from "@/fetchers/types";
|
|
||||||
import { Headers } from "node-fetch";
|
|
||||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
|
||||||
|
|
||||||
describe("makeSimpleProxyFetcher()", () => {
|
|
||||||
const fetch = vi.fn();
|
|
||||||
const fetcher = makeSimpleProxyFetcher("https://example.com/proxy", fetch);
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
vi.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
function setResult(type: "text" | "json", value: any) {
|
|
||||||
if (type === 'text') return fetch.mockResolvedValueOnce({
|
|
||||||
headers: new Headers({
|
|
||||||
"content-type": "text/plain",
|
|
||||||
}),
|
|
||||||
text() {
|
|
||||||
return Promise.resolve(value);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
if (type === 'json') return fetch.mockResolvedValueOnce({
|
|
||||||
headers: new Headers({
|
|
||||||
"content-type": "application/json",
|
|
||||||
}),
|
|
||||||
json() {
|
|
||||||
return Promise.resolve(value);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function expectFetchCall(ops: { inputUrl: string, input: DefaultedFetcherOptions, outputUrl?: string, output: any, outputBody: any }) {
|
|
||||||
expect(fetcher(ops.inputUrl, ops.input)).resolves.toEqual(ops.outputBody);
|
|
||||||
expect(fetch).toBeCalledWith(ops.outputUrl ?? ops.inputUrl, ops.output);
|
|
||||||
vi.clearAllMocks();
|
|
||||||
}
|
|
||||||
|
|
||||||
it('should pass options through', () => {
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
method: "GET",
|
|
||||||
query: {},
|
|
||||||
headers: {
|
|
||||||
"X-Hello": "world",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"X-Hello": "world",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
query: {
|
|
||||||
"a": 'b',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/?a=b')}`,
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
method: "GET",
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should parse response correctly', () => {
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com/",
|
|
||||||
input: {
|
|
||||||
method: "POST",
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
|
||||||
output: {
|
|
||||||
method: "POST",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("json", { hello: 42 });
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com/",
|
|
||||||
input: {
|
|
||||||
method: "POST",
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
|
||||||
output: {
|
|
||||||
method: "POST",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: { hello: 42 }
|
|
||||||
})
|
|
||||||
});
|
|
||||||
});
|
|
@@ -1,125 +0,0 @@
|
|||||||
import { makeStandardFetcher } from "@/fetchers/standardFetch";
|
|
||||||
import { DefaultedFetcherOptions } from "@/fetchers/types";
|
|
||||||
import { Headers } from "node-fetch";
|
|
||||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
|
||||||
|
|
||||||
describe("makeStandardFetcher()", () => {
|
|
||||||
const fetch = vi.fn();
|
|
||||||
const fetcher = makeStandardFetcher(fetch);
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
vi.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
function setResult(type: "text" | "json", value: any) {
|
|
||||||
if (type === 'text') return fetch.mockResolvedValueOnce({
|
|
||||||
headers: new Headers({
|
|
||||||
"content-type": "text/plain",
|
|
||||||
}),
|
|
||||||
text() {
|
|
||||||
return Promise.resolve(value);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
if (type === 'json') return fetch.mockResolvedValueOnce({
|
|
||||||
headers: new Headers({
|
|
||||||
"content-type": "application/json",
|
|
||||||
}),
|
|
||||||
json() {
|
|
||||||
return Promise.resolve(value);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function expectFetchCall(ops: { inputUrl: string, input: DefaultedFetcherOptions, outputUrl?: string, output: any, outputBody: any }) {
|
|
||||||
expect(fetcher(ops.inputUrl, ops.input)).resolves.toEqual(ops.outputBody);
|
|
||||||
expect(fetch).toBeCalledWith(ops.outputUrl ?? ops.inputUrl, ops.output);
|
|
||||||
vi.clearAllMocks();
|
|
||||||
}
|
|
||||||
|
|
||||||
it('should pass options through', () => {
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
method: "GET",
|
|
||||||
query: {},
|
|
||||||
headers: {
|
|
||||||
"X-Hello": "world",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
outputUrl: "https://google.com/",
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"X-Hello": "world",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
query: {
|
|
||||||
"a": 'b',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
outputUrl: "https://google.com/?a=b",
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com",
|
|
||||||
input: {
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
method: "GET"
|
|
||||||
},
|
|
||||||
outputUrl: "https://google.com/",
|
|
||||||
output: {
|
|
||||||
method: "GET",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should parse response correctly', () => {
|
|
||||||
setResult("text", "hello world");
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com/",
|
|
||||||
input: {
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
method: "POST"
|
|
||||||
},
|
|
||||||
outputUrl: "https://google.com/",
|
|
||||||
output: {
|
|
||||||
method: "POST",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: "hello world"
|
|
||||||
})
|
|
||||||
setResult("json", { hello: 42 });
|
|
||||||
expectFetchCall({
|
|
||||||
inputUrl: "https://google.com/",
|
|
||||||
input: {
|
|
||||||
query: {},
|
|
||||||
headers: {},
|
|
||||||
method: "POST"
|
|
||||||
},
|
|
||||||
outputUrl: "https://google.com/",
|
|
||||||
output: {
|
|
||||||
method: "POST",
|
|
||||||
headers: {},
|
|
||||||
},
|
|
||||||
outputBody: { hello: 42 }
|
|
||||||
})
|
|
||||||
});
|
|
||||||
});
|
|
90
src/__test__/providers/embedUtils.ts
Normal file
90
src/__test__/providers/embedUtils.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { buildProviders } from '@/entrypoint/builder';
|
||||||
|
import { ScrapeMedia } from '@/entrypoint/utils/media';
|
||||||
|
import { targets } from '@/entrypoint/utils/targets';
|
||||||
|
import { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||||
|
import { Embed, Sourcerer, SourcererEmbed } from '@/providers/base';
|
||||||
|
import { TestTypes } from './providerUtils';
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
import { ProviderControls } from '@/entrypoint/controls';
|
||||||
|
import { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
||||||
|
|
||||||
|
export interface TestEmbedOptions {
|
||||||
|
embed: Embed;
|
||||||
|
source: Sourcerer;
|
||||||
|
testSuite: ScrapeMedia[];
|
||||||
|
types: TestTypes[];
|
||||||
|
debug?: boolean;
|
||||||
|
expect: {
|
||||||
|
embeds: number;
|
||||||
|
streams?: number;
|
||||||
|
error?: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeBaseEmbedProviders() {
|
||||||
|
const builder = buildProviders().setTarget(targets.ANY).setFetcher(makeStandardFetcher(fetch));
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function testEmbed(ops: TestEmbedOptions) {
|
||||||
|
if (ops.testSuite.length === 0) throw new Error('Test suite must have at least one test');
|
||||||
|
describe(`embed:${ops.source.id}:${ops.embed.id}`, () => {
|
||||||
|
ops.testSuite.forEach((test) => {
|
||||||
|
describe(`test ${test.title}`, async () => {
|
||||||
|
async function gatherEmbeds(providers: ProviderControls): Promise<SourcererEmbed[]> {
|
||||||
|
const results = await providers.runSourceScraper({
|
||||||
|
id: ops.source.id,
|
||||||
|
media: test,
|
||||||
|
});
|
||||||
|
if (results.embeds.length !== ops.expect.embeds)
|
||||||
|
throw new Error(
|
||||||
|
`Embeds don't match expected amount of embeds (${ops.source.id}, ${ops.embed.id}, got ${results.embeds.length} but expected ${ops.expect.embeds})`,
|
||||||
|
);
|
||||||
|
return results.embeds;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runTest(providers: ProviderControls, embedUrl: string) {
|
||||||
|
let hasError = false;
|
||||||
|
let streamCount = 0;
|
||||||
|
try {
|
||||||
|
const result = await providers.runEmbedScraper({
|
||||||
|
id: ops.embed.id,
|
||||||
|
url: embedUrl,
|
||||||
|
});
|
||||||
|
if (ops.debug) console.log(result);
|
||||||
|
streamCount = (result.stream ?? []).length;
|
||||||
|
} catch (err) {
|
||||||
|
if (ops.debug) console.log(err);
|
||||||
|
hasError = true;
|
||||||
|
}
|
||||||
|
expect(ops.expect.error ?? false).toBe(hasError);
|
||||||
|
expect(ops.expect.streams ?? 0).toBe(streamCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const t of ops.types) {
|
||||||
|
const builder = makeBaseEmbedProviders().addSource(ops.source).addEmbed(ops.embed);
|
||||||
|
if (t === 'standard') {
|
||||||
|
} else if (t === 'ip:standard') builder.enableConsistentIpForRequests();
|
||||||
|
else if (t === 'proxied') {
|
||||||
|
if (!process.env.MOVIE_WEB_PROXY_URL)
|
||||||
|
throw new Error('Cant use proxied test without setting MOVIE_WEB_PROXY_URL env');
|
||||||
|
builder.setProxiedFetcher(makeSimpleProxyFetcher(process.env.MOVIE_WEB_PROXY_URL, fetch));
|
||||||
|
}
|
||||||
|
const providers = builder.build();
|
||||||
|
try {
|
||||||
|
const embeds = await gatherEmbeds(providers);
|
||||||
|
embeds.forEach((embed, i) => {
|
||||||
|
it(`${t} - embed ${i}`, async () => {
|
||||||
|
await runTest(providers, embed.url);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
it(`${t} - embed ??`, () => {
|
||||||
|
throw new Error('Failed to get streams: ' + err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
118
src/__test__/providers/embeds.test.ts
Normal file
118
src/__test__/providers/embeds.test.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import dotenv from 'dotenv';
|
||||||
|
import { febboxMp4Scraper } from '@/providers/embeds/febbox/mp4';
|
||||||
|
import { testEmbed } from './embedUtils';
|
||||||
|
import { showboxScraper } from '@/providers/sources/showbox';
|
||||||
|
import { testMedia } from './testMedia';
|
||||||
|
import { flixhqScraper } from '@/providers/sources/flixhq';
|
||||||
|
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||||
|
import { goMoviesScraper } from '@/providers/sources/gomovies';
|
||||||
|
import { smashyStreamScraper } from '@/providers/sources/smashystream';
|
||||||
|
import { smashyStreamDScraper } from '@/providers/embeds/smashystream/dued';
|
||||||
|
import { vidsrcembedScraper } from '@/providers/embeds/vidsrc';
|
||||||
|
import { vidsrcScraper } from '@/providers/sources/vidsrc';
|
||||||
|
import { vidSrcToScraper } from '@/providers/sources/vidsrcto';
|
||||||
|
import { vidplayScraper } from '@/providers/embeds/vidplay';
|
||||||
|
import { fileMoonScraper } from '@/providers/embeds/filemoon';
|
||||||
|
import { zoechipScraper } from '@/providers/sources/zoechip';
|
||||||
|
import { mixdropScraper } from '@/providers/embeds/mixdrop';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: febboxMp4Scraper,
|
||||||
|
source: showboxScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: upcloudScraper,
|
||||||
|
source: flixhqScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: upcloudScraper,
|
||||||
|
source: goMoviesScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: smashyStreamDScraper,
|
||||||
|
source: smashyStreamScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: vidsrcembedScraper,
|
||||||
|
source: vidsrcScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: vidplayScraper,
|
||||||
|
source: vidSrcToScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: fileMoonScraper,
|
||||||
|
source: vidSrcToScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: upcloudScraper,
|
||||||
|
source: zoechipScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 2,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testEmbed({
|
||||||
|
embed: mixdropScraper,
|
||||||
|
source: zoechipScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 2,
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
92
src/__test__/providers/providerUtils.ts
Normal file
92
src/__test__/providers/providerUtils.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import { ScrapeMedia } from '@/entrypoint/utils/media';
|
||||||
|
import { Embed, Sourcerer, SourcererEmbed } from '@/providers/base';
|
||||||
|
import { buildProviders } from '@/entrypoint/builder';
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
import { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||||
|
import { ProviderControls } from '@/entrypoint/controls';
|
||||||
|
import { NotFoundError } from '@/utils/errors';
|
||||||
|
import { targets } from '@/entrypoint/utils/targets';
|
||||||
|
import { getBuiltinEmbeds } from '@/entrypoint/providers';
|
||||||
|
import { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
||||||
|
|
||||||
|
export type TestTypes = 'standard' | 'ip:standard' | 'proxied';
|
||||||
|
|
||||||
|
export interface TestSourceOptions {
|
||||||
|
source: Sourcerer;
|
||||||
|
testSuite: ScrapeMedia[];
|
||||||
|
types: TestTypes[];
|
||||||
|
debug?: boolean;
|
||||||
|
expect: {
|
||||||
|
embeds?: number;
|
||||||
|
streams?: number;
|
||||||
|
error?: boolean;
|
||||||
|
notfound?: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeBaseProviders() {
|
||||||
|
const builder = buildProviders().setTarget(targets.ANY).setFetcher(makeStandardFetcher(fetch));
|
||||||
|
const embeds = getBuiltinEmbeds();
|
||||||
|
embeds.forEach((embed) => builder.addEmbed(embed));
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function testSource(ops: TestSourceOptions) {
|
||||||
|
if (ops.testSuite.length === 0) throw new Error('Test suite must have at least one test');
|
||||||
|
describe(`source:${ops.source.id}`, () => {
|
||||||
|
ops.testSuite.forEach((test) => {
|
||||||
|
describe(`test ${test.title}`, () => {
|
||||||
|
async function runTest(providers: ProviderControls) {
|
||||||
|
let hasNotFound = false;
|
||||||
|
let hasError = false;
|
||||||
|
let streamCount = 0;
|
||||||
|
let embedCount = 0;
|
||||||
|
let embeds = [];
|
||||||
|
try {
|
||||||
|
const result = await providers.runSourceScraper({
|
||||||
|
id: ops.source.id,
|
||||||
|
media: test,
|
||||||
|
});
|
||||||
|
if (ops.debug) console.log(result);
|
||||||
|
streamCount = (result.stream ?? []).length;
|
||||||
|
embedCount = result.embeds.length;
|
||||||
|
} catch (err) {
|
||||||
|
if (ops.debug) console.log(err);
|
||||||
|
if (err instanceof NotFoundError) hasNotFound = true;
|
||||||
|
else hasError = true;
|
||||||
|
}
|
||||||
|
expect(ops.expect.error ?? false).toBe(hasError);
|
||||||
|
expect(ops.expect.notfound ?? false).toBe(hasNotFound);
|
||||||
|
expect(ops.expect.streams ?? 0).toBe(streamCount);
|
||||||
|
expect(ops.expect.embeds ?? 0).toBe(embedCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ops.types.includes('standard')) {
|
||||||
|
it(`standard`, async () => {
|
||||||
|
const providers = makeBaseProviders().addSource(ops.source).build();
|
||||||
|
await runTest(providers);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ops.types.includes('ip:standard')) {
|
||||||
|
it(`standard:ip`, async () => {
|
||||||
|
const providers = makeBaseProviders().addSource(ops.source).enableConsistentIpForRequests().build();
|
||||||
|
await runTest(providers);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ops.types.includes('proxied')) {
|
||||||
|
it(`proxied`, async () => {
|
||||||
|
if (!process.env.MOVIE_WEB_PROXY_URL)
|
||||||
|
throw new Error('Cant use proxied test without setting MOVIE_WEB_PROXY_URL env');
|
||||||
|
const providers = makeBaseProviders()
|
||||||
|
.addSource(ops.source)
|
||||||
|
.setProxiedFetcher(makeSimpleProxyFetcher(process.env.MOVIE_WEB_PROXY_URL, fetch))
|
||||||
|
.build();
|
||||||
|
await runTest(providers);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
95
src/__test__/providers/providers.test.ts
Normal file
95
src/__test__/providers/providers.test.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import { testSource } from './providerUtils';
|
||||||
|
import { lookmovieScraper } from '@/providers/sources/lookmovie';
|
||||||
|
import { testMedia } from './testMedia';
|
||||||
|
import { showboxScraper } from '@/providers/sources/showbox';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import { flixhqScraper } from '@/providers/sources/flixhq';
|
||||||
|
import { goMoviesScraper } from '@/providers/sources/gomovies';
|
||||||
|
import { smashyStreamScraper } from '@/providers/sources/smashystream';
|
||||||
|
import { vidsrcScraper } from '@/providers/sources/vidsrc';
|
||||||
|
import { vidSrcToScraper } from '@/providers/sources/vidsrcto';
|
||||||
|
import { zoechipScraper } from '@/providers/sources/zoechip';
|
||||||
|
import { remotestreamScraper } from '@/providers/sources/remotestream';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: lookmovieScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['ip:standard'],
|
||||||
|
expect: {
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: showboxScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: flixhqScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: goMoviesScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: smashyStreamScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: vidsrcScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: vidSrcToScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 2,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: zoechipScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
embeds: 3,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
testSource({
|
||||||
|
source: remotestreamScraper,
|
||||||
|
testSuite: [testMedia.arcane, testMedia.hamilton],
|
||||||
|
types: ['standard', 'proxied'],
|
||||||
|
expect: {
|
||||||
|
streams: 1,
|
||||||
|
},
|
||||||
|
});
|
30
src/__test__/providers/testMedia.ts
Normal file
30
src/__test__/providers/testMedia.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { ScrapeMedia } from '@/entrypoint/utils/media';
|
||||||
|
|
||||||
|
function makeMedia(media: ScrapeMedia): ScrapeMedia {
|
||||||
|
return media;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const testMedia = {
|
||||||
|
arcane: makeMedia({
|
||||||
|
type: 'show',
|
||||||
|
title: 'Arcane',
|
||||||
|
tmdbId: '94605',
|
||||||
|
releaseYear: 2021,
|
||||||
|
episode: {
|
||||||
|
number: 1,
|
||||||
|
tmdbId: '1953812',
|
||||||
|
},
|
||||||
|
season: {
|
||||||
|
number: 1,
|
||||||
|
tmdbId: '134187',
|
||||||
|
},
|
||||||
|
imdbId: 'tt11126994',
|
||||||
|
}),
|
||||||
|
hamilton: makeMedia({
|
||||||
|
type: 'movie',
|
||||||
|
tmdbId: '556574',
|
||||||
|
imdbId: 'tt8503618',
|
||||||
|
releaseYear: 2020,
|
||||||
|
title: 'Hamilton',
|
||||||
|
}),
|
||||||
|
};
|
39
src/__test__/standard/fetchers/body.test.ts
Normal file
39
src/__test__/standard/fetchers/body.test.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { serializeBody } from '@/fetchers/body';
|
||||||
|
import FormData from 'form-data';
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
|
describe('serializeBody()', () => {
|
||||||
|
it('should work with standard text', () => {
|
||||||
|
expect(serializeBody('hello world')).toEqual({
|
||||||
|
headers: {},
|
||||||
|
body: 'hello world',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work with objects', () => {
|
||||||
|
expect(serializeBody({ hello: 'world', a: 42 })).toEqual({
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ hello: 'world', a: 42 }),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work x-www-form-urlencoded', () => {
|
||||||
|
const obj = new URLSearchParams();
|
||||||
|
obj.set('a', 'b');
|
||||||
|
expect(serializeBody(obj)).toEqual({
|
||||||
|
headers: {},
|
||||||
|
body: obj,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work multipart/form-data', () => {
|
||||||
|
const obj = new FormData();
|
||||||
|
obj.append('a', 'b');
|
||||||
|
expect(serializeBody(obj)).toEqual({
|
||||||
|
headers: {},
|
||||||
|
body: obj,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -1,48 +1,62 @@
|
|||||||
import { makeFullUrl } from "@/fetchers/common";
|
import { makeFullUrl } from '@/fetchers/common';
|
||||||
import { describe, expect, it } from "vitest";
|
import { describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
describe("makeFullUrl()", () => {
|
describe('makeFullUrl()', () => {
|
||||||
it('should pass normal url if no options', () => {
|
it('should pass normal url if no options', () => {
|
||||||
expect(makeFullUrl('https://example.com/hello/world')).toEqual("https://example.com/hello/world")
|
expect(makeFullUrl('https://example.com/hello/world')).toEqual('https://example.com/hello/world');
|
||||||
expect(makeFullUrl('https://example.com/hello/world?a=b')).toEqual("https://example.com/hello/world?a=b")
|
expect(makeFullUrl('https://example.com/hello/world?a=b')).toEqual('https://example.com/hello/world?a=b');
|
||||||
expect(makeFullUrl('https://example.com/hello/world?a=b#hello')).toEqual("https://example.com/hello/world?a=b#hello")
|
expect(makeFullUrl('https://example.com/hello/world?a=b#hello')).toEqual(
|
||||||
expect(makeFullUrl('https://example.com/hello/world#hello')).toEqual("https://example.com/hello/world#hello")
|
'https://example.com/hello/world?a=b#hello',
|
||||||
})
|
);
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world#hello')).toEqual('https://example.com/hello/world#hello');
|
||||||
|
});
|
||||||
|
|
||||||
it('should append baseurl correctly', () => {
|
it('should append baseurl correctly', () => {
|
||||||
const correctResult = "https://example.com/hello/world";
|
const correctResult = 'https://example.com/hello/world';
|
||||||
expect(makeFullUrl(correctResult, { baseUrl: '' })).toEqual(correctResult)
|
expect(makeFullUrl(correctResult, { baseUrl: '' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult)
|
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult)
|
expect(makeFullUrl('/hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult)
|
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com/' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult)
|
expect(makeFullUrl('hello/world', { baseUrl: 'https://example.com' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult)
|
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult)
|
expect(makeFullUrl('/world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult)
|
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello/' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult)
|
expect(makeFullUrl('world', { baseUrl: 'https://example.com/hello' })).toEqual(correctResult);
|
||||||
expect(makeFullUrl('world?a=b', { baseUrl: 'https://example.com/hello' })).toEqual("https://example.com/hello/world?a=b")
|
expect(makeFullUrl('world?a=b', { baseUrl: 'https://example.com/hello' })).toEqual(
|
||||||
})
|
'https://example.com/hello/world?a=b',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
it('should throw with invalid baseurl combinations', () => {
|
it('should throw with invalid baseurl combinations', () => {
|
||||||
expect(() => makeFullUrl('example.com/hello/world', { baseUrl: '' })).toThrowError()
|
expect(() => makeFullUrl('example.com/hello/world', { baseUrl: '' })).toThrowError();
|
||||||
expect(() => makeFullUrl('/hello/world', { baseUrl: 'example.com' })).toThrowError()
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'example.com' })).toThrowError();
|
||||||
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError()
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError();
|
||||||
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError()
|
expect(() => makeFullUrl('/hello/world', { baseUrl: 'tcp://example.com' })).toThrowError();
|
||||||
})
|
});
|
||||||
|
|
||||||
it('should add/merge query parameters', () => {
|
it('should add/merge query parameters', () => {
|
||||||
expect(makeFullUrl('https://example.com/hello/world', { query: { a: 'b' } })).toEqual("https://example.com/hello/world?a=b")
|
expect(makeFullUrl('https://example.com/hello/world', { query: { a: 'b' } })).toEqual(
|
||||||
expect(makeFullUrl('https://example.com/hello/world/', { query: { a: 'b' } })).toEqual("https://example.com/hello/world/?a=b")
|
'https://example.com/hello/world?a=b',
|
||||||
expect(makeFullUrl('https://example.com', { query: { a: 'b' } })).toEqual("https://example.com/?a=b")
|
);
|
||||||
expect(makeFullUrl('https://example.com/', { query: { a: 'b' } })).toEqual("https://example.com/?a=b")
|
expect(makeFullUrl('https://example.com/hello/world/', { query: { a: 'b' } })).toEqual(
|
||||||
|
'https://example.com/hello/world/?a=b',
|
||||||
|
);
|
||||||
|
expect(makeFullUrl('https://example.com', { query: { a: 'b' } })).toEqual('https://example.com/?a=b');
|
||||||
|
expect(makeFullUrl('https://example.com/', { query: { a: 'b' } })).toEqual('https://example.com/?a=b');
|
||||||
|
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: { a: 'b' } })).toEqual(
|
||||||
|
'https://example.com/hello/world?c=d&a=b',
|
||||||
|
);
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: {} })).toEqual(
|
||||||
|
'https://example.com/hello/world?c=d',
|
||||||
|
);
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d')).toEqual('https://example.com/hello/world?c=d');
|
||||||
|
expect(makeFullUrl('https://example.com/hello/world?c=d', {})).toEqual('https://example.com/hello/world?c=d');
|
||||||
|
});
|
||||||
|
|
||||||
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: { a: 'b' } })).toEqual("https://example.com/hello/world?c=d&a=b")
|
|
||||||
expect(makeFullUrl('https://example.com/hello/world?c=d', { query: {} })).toEqual("https://example.com/hello/world?c=d")
|
|
||||||
expect(makeFullUrl('https://example.com/hello/world?c=d')).toEqual("https://example.com/hello/world?c=d")
|
|
||||||
expect(makeFullUrl('https://example.com/hello/world?c=d', {})).toEqual("https://example.com/hello/world?c=d")
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should work with a mix of multiple options', () => {
|
it('should work with a mix of multiple options', () => {
|
||||||
expect(makeFullUrl('/hello/world?c=d', { baseUrl: 'https://example.com/', query: { a: 'b' } })).toEqual("https://example.com/hello/world?c=d&a=b")
|
expect(makeFullUrl('/hello/world?c=d', { baseUrl: 'https://example.com/', query: { a: 'b' } })).toEqual(
|
||||||
})
|
'https://example.com/hello/world?c=d&a=b',
|
||||||
})
|
);
|
||||||
|
});
|
||||||
|
});
|
148
src/__test__/standard/fetchers/simpleProxy.test.ts
Normal file
148
src/__test__/standard/fetchers/simpleProxy.test.ts
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
import { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
||||||
|
import { DefaultedFetcherOptions, FetcherOptions } from '@/fetchers/types';
|
||||||
|
import { Headers } from 'node-fetch';
|
||||||
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
describe('makeSimpleProxyFetcher()', () => {
|
||||||
|
const fetch = vi.fn();
|
||||||
|
const fetcher = makeSimpleProxyFetcher('https://example.com/proxy', fetch);
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
function setResult(type: 'text' | 'json', value: any) {
|
||||||
|
if (type === 'text')
|
||||||
|
return fetch.mockResolvedValueOnce({
|
||||||
|
headers: new Headers({
|
||||||
|
'content-type': 'text/plain',
|
||||||
|
}),
|
||||||
|
status: 204,
|
||||||
|
url: 'test123',
|
||||||
|
text() {
|
||||||
|
return Promise.resolve(value);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (type === 'json')
|
||||||
|
return fetch.mockResolvedValueOnce({
|
||||||
|
headers: new Headers({
|
||||||
|
'content-type': 'application/json',
|
||||||
|
}),
|
||||||
|
status: 204,
|
||||||
|
url: 'test123',
|
||||||
|
json() {
|
||||||
|
return Promise.resolve(value);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function expectFetchCall(ops: {
|
||||||
|
inputUrl: string;
|
||||||
|
input: DefaultedFetcherOptions;
|
||||||
|
outputUrl?: string;
|
||||||
|
output: any;
|
||||||
|
outputBody: any;
|
||||||
|
}) {
|
||||||
|
const prom = fetcher(ops.inputUrl, ops.input);
|
||||||
|
expect((async () => (await prom).body)()).resolves.toEqual(ops.outputBody);
|
||||||
|
expect((async () => Array.from((await prom).headers.entries()))()).resolves.toEqual(
|
||||||
|
Array.from(new Headers().entries()),
|
||||||
|
);
|
||||||
|
expect((async () => (await prom).statusCode)()).resolves.toEqual(204);
|
||||||
|
expect((async () => (await prom).finalUrl)()).resolves.toEqual('test123');
|
||||||
|
expect(fetch).toBeCalledWith(ops.outputUrl ?? ops.inputUrl, ops.output);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should pass options through', () => {
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
method: 'GET',
|
||||||
|
query: {},
|
||||||
|
readHeaders: [],
|
||||||
|
headers: {
|
||||||
|
'X-Hello': 'world',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'X-Hello': 'world',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
readHeaders: [],
|
||||||
|
query: {
|
||||||
|
a: 'b',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/?a=b')}`,
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
method: 'GET',
|
||||||
|
query: {},
|
||||||
|
readHeaders: [],
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse response correctly', () => {
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com/',
|
||||||
|
input: {
|
||||||
|
method: 'POST',
|
||||||
|
query: {},
|
||||||
|
readHeaders: [],
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
||||||
|
output: {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('json', { hello: 42 });
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com/',
|
||||||
|
input: {
|
||||||
|
method: 'POST',
|
||||||
|
query: {},
|
||||||
|
readHeaders: [],
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputUrl: `https://example.com/proxy?destination=${encodeURIComponent('https://google.com/')}`,
|
||||||
|
output: {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: { hello: 42 },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
149
src/__test__/standard/fetchers/standard.test.ts
Normal file
149
src/__test__/standard/fetchers/standard.test.ts
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
import { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||||
|
import { DefaultedFetcherOptions } from '@/fetchers/types';
|
||||||
|
import { Headers } from 'node-fetch';
|
||||||
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
describe('makeStandardFetcher()', () => {
|
||||||
|
const fetch = vi.fn();
|
||||||
|
const fetcher = makeStandardFetcher(fetch);
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
function setResult(type: 'text' | 'json', value: any) {
|
||||||
|
if (type === 'text')
|
||||||
|
return fetch.mockResolvedValueOnce({
|
||||||
|
headers: new Headers({
|
||||||
|
'content-type': 'text/plain',
|
||||||
|
}),
|
||||||
|
status: 204,
|
||||||
|
url: 'test123',
|
||||||
|
text() {
|
||||||
|
return Promise.resolve(value);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (type === 'json')
|
||||||
|
return fetch.mockResolvedValueOnce({
|
||||||
|
headers: new Headers({
|
||||||
|
'content-type': 'application/json',
|
||||||
|
}),
|
||||||
|
status: 204,
|
||||||
|
url: 'test123',
|
||||||
|
json() {
|
||||||
|
return Promise.resolve(value);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function expectFetchCall(ops: {
|
||||||
|
inputUrl: string;
|
||||||
|
input: DefaultedFetcherOptions;
|
||||||
|
outputUrl?: string;
|
||||||
|
output: any;
|
||||||
|
outputBody: any;
|
||||||
|
}) {
|
||||||
|
const prom = fetcher(ops.inputUrl, ops.input);
|
||||||
|
expect((async () => (await prom).body)()).resolves.toEqual(ops.outputBody);
|
||||||
|
expect((async () => Array.from((await prom).headers.entries()))()).resolves.toEqual(
|
||||||
|
Array.from(new Headers().entries()),
|
||||||
|
);
|
||||||
|
expect((async () => (await prom).statusCode)()).resolves.toEqual(204);
|
||||||
|
expect((async () => (await prom).finalUrl)()).resolves.toEqual('test123');
|
||||||
|
expect(fetch).toBeCalledWith(ops.outputUrl ?? ops.inputUrl, ops.output);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should pass options through', () => {
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
method: 'GET',
|
||||||
|
query: {},
|
||||||
|
readHeaders: [],
|
||||||
|
headers: {
|
||||||
|
'X-Hello': 'world',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputUrl: 'https://google.com/',
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'X-Hello': 'world',
|
||||||
|
},
|
||||||
|
body: undefined,
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
readHeaders: [],
|
||||||
|
query: {
|
||||||
|
a: 'b',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputUrl: 'https://google.com/?a=b',
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com',
|
||||||
|
input: {
|
||||||
|
query: {},
|
||||||
|
headers: {},
|
||||||
|
readHeaders: [],
|
||||||
|
method: 'GET',
|
||||||
|
},
|
||||||
|
outputUrl: 'https://google.com/',
|
||||||
|
output: {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse response correctly', () => {
|
||||||
|
setResult('text', 'hello world');
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com/',
|
||||||
|
input: {
|
||||||
|
query: {},
|
||||||
|
headers: {},
|
||||||
|
readHeaders: [],
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
outputUrl: 'https://google.com/',
|
||||||
|
output: {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: 'hello world',
|
||||||
|
});
|
||||||
|
setResult('json', { hello: 42 });
|
||||||
|
expectFetchCall({
|
||||||
|
inputUrl: 'https://google.com/',
|
||||||
|
input: {
|
||||||
|
query: {},
|
||||||
|
headers: {},
|
||||||
|
readHeaders: [],
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
outputUrl: 'https://google.com/',
|
||||||
|
output: {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
outputBody: { hello: 42 },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -2,7 +2,7 @@
|
|||||||
import { vi } from 'vitest';
|
import { vi } from 'vitest';
|
||||||
|
|
||||||
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
||||||
import { Embed, Sourcerer } from '@/providers/base';
|
import { makeEmbed, makeSourcerer } from '@/providers/base';
|
||||||
|
|
||||||
export function makeProviderMocks() {
|
export function makeProviderMocks() {
|
||||||
const embedsMock = vi.fn<Parameters<typeof gatherAllEmbeds>, ReturnType<typeof gatherAllEmbeds>>();
|
const embedsMock = vi.fn<Parameters<typeof gatherAllEmbeds>, ReturnType<typeof gatherAllEmbeds>>();
|
||||||
@@ -13,91 +13,104 @@ export function makeProviderMocks() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const sourceA = {
|
const sourceA = makeSourcerer({
|
||||||
id: 'a',
|
id: 'a',
|
||||||
|
name: 'A',
|
||||||
rank: 1,
|
rank: 1,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const sourceB = {
|
});
|
||||||
|
const sourceB = makeSourcerer({
|
||||||
id: 'b',
|
id: 'b',
|
||||||
|
name: 'B',
|
||||||
rank: 2,
|
rank: 2,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const sourceCDisabled = {
|
});
|
||||||
|
const sourceCDisabled = makeSourcerer({
|
||||||
id: 'c',
|
id: 'c',
|
||||||
|
name: 'C',
|
||||||
rank: 3,
|
rank: 3,
|
||||||
disabled: true,
|
disabled: true,
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const sourceAHigherRank = {
|
});
|
||||||
|
const sourceAHigherRank = makeSourcerer({
|
||||||
id: 'a',
|
id: 'a',
|
||||||
|
name: 'A',
|
||||||
rank: 100,
|
rank: 100,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const sourceGSameRankAsA = {
|
});
|
||||||
|
const sourceGSameRankAsA = makeSourcerer({
|
||||||
id: 'g',
|
id: 'g',
|
||||||
|
name: 'G',
|
||||||
rank: 1,
|
rank: 1,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const fullSourceYMovie = {
|
});
|
||||||
|
const fullSourceYMovie = makeSourcerer({
|
||||||
id: 'y',
|
id: 'y',
|
||||||
name: 'Y',
|
name: 'Y',
|
||||||
rank: 105,
|
rank: 105,
|
||||||
scrapeMovie: vi.fn(),
|
scrapeMovie: vi.fn(),
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const fullSourceYShow = {
|
});
|
||||||
|
const fullSourceYShow = makeSourcerer({
|
||||||
id: 'y',
|
id: 'y',
|
||||||
name: 'Y',
|
name: 'Y',
|
||||||
rank: 105,
|
rank: 105,
|
||||||
scrapeShow: vi.fn(),
|
scrapeShow: vi.fn(),
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
const fullSourceZBoth = {
|
});
|
||||||
|
const fullSourceZBoth = makeSourcerer({
|
||||||
id: 'z',
|
id: 'z',
|
||||||
name: 'Z',
|
name: 'Z',
|
||||||
rank: 106,
|
rank: 106,
|
||||||
scrapeMovie: vi.fn(),
|
scrapeMovie: vi.fn(),
|
||||||
scrapeShow: vi.fn(),
|
scrapeShow: vi.fn(),
|
||||||
} as Sourcerer;
|
flags: [],
|
||||||
|
});
|
||||||
|
|
||||||
const embedD = {
|
const embedD = makeEmbed({
|
||||||
id: 'd',
|
id: 'd',
|
||||||
rank: 4,
|
rank: 4,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Embed;
|
} as any);
|
||||||
const embedA = {
|
const embedA = makeEmbed({
|
||||||
id: 'a',
|
id: 'a',
|
||||||
rank: 5,
|
rank: 5,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Embed;
|
} as any);
|
||||||
const embedEDisabled = {
|
const embedEDisabled = makeEmbed({
|
||||||
id: 'e',
|
id: 'e',
|
||||||
rank: 6,
|
rank: 6,
|
||||||
disabled: true,
|
disabled: true,
|
||||||
} as Embed;
|
} as any);
|
||||||
const embedDHigherRank = {
|
const embedDHigherRank = makeEmbed({
|
||||||
id: 'd',
|
id: 'd',
|
||||||
rank: 4000,
|
rank: 4000,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Embed;
|
} as any);
|
||||||
const embedFSameRankAsA = {
|
const embedFSameRankAsA = makeEmbed({
|
||||||
id: 'f',
|
id: 'f',
|
||||||
rank: 5,
|
rank: 5,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Embed;
|
} as any);
|
||||||
const embedHSameRankAsSourceA = {
|
const embedHSameRankAsSourceA = makeEmbed({
|
||||||
id: 'h',
|
id: 'h',
|
||||||
rank: 1,
|
rank: 1,
|
||||||
disabled: false,
|
disabled: false,
|
||||||
} as Embed;
|
} as any);
|
||||||
const fullEmbedX = {
|
const fullEmbedX = makeEmbed({
|
||||||
id: 'x',
|
id: 'x',
|
||||||
name: 'X',
|
name: 'X',
|
||||||
rank: 104,
|
rank: 104,
|
||||||
} as Embed;
|
} as any);
|
||||||
const fullEmbedZ = {
|
const fullEmbedZ = makeEmbed({
|
||||||
id: 'z',
|
id: 'z',
|
||||||
name: 'Z',
|
name: 'Z',
|
||||||
rank: 109,
|
rank: 109,
|
||||||
} as Embed;
|
} as any);
|
||||||
|
|
||||||
export const mockSources = {
|
export const mockSources = {
|
||||||
sourceA,
|
sourceA,
|
@@ -1,13 +1,16 @@
|
|||||||
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
import { mockEmbeds, mockSources } from '../providerTests';
|
||||||
|
import { getBuiltinEmbeds, getBuiltinSources } from '@/entrypoint/providers';
|
||||||
|
import { FeatureMap } from '@/entrypoint/utils/targets';
|
||||||
import { getProviders } from '@/providers/get';
|
import { getProviders } from '@/providers/get';
|
||||||
import { vi, describe, it, expect, afterEach } from 'vitest';
|
import { vi, describe, it, expect, afterEach } from 'vitest';
|
||||||
|
|
||||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
const mocks = await vi.hoisted(async () => (await import('../providerTests')).makeProviderMocks());
|
||||||
vi.mock('@/providers/all', () => mocks);
|
vi.mock('@/providers/all', () => mocks);
|
||||||
|
|
||||||
const features = {
|
const features: FeatureMap = {
|
||||||
requires: [],
|
requires: [],
|
||||||
}
|
disallowed: [],
|
||||||
|
};
|
||||||
|
|
||||||
describe('getProviders()', () => {
|
describe('getProviders()', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -17,7 +20,12 @@ describe('getProviders()', () => {
|
|||||||
it('should return providers', () => {
|
it('should return providers', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
expect(getProviders(features)).toEqual({
|
expect(
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toEqual({
|
||||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
embeds: [mockEmbeds.embedD],
|
embeds: [mockEmbeds.embedD],
|
||||||
});
|
});
|
||||||
@@ -26,7 +34,12 @@ describe('getProviders()', () => {
|
|||||||
it('should filter out disabled providers', () => {
|
it('should filter out disabled providers', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedEDisabled]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedEDisabled]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceCDisabled, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceCDisabled, mockSources.sourceB]);
|
||||||
expect(getProviders(features)).toEqual({
|
expect(
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toEqual({
|
||||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
embeds: [mockEmbeds.embedD],
|
embeds: [mockEmbeds.embedD],
|
||||||
});
|
});
|
||||||
@@ -35,31 +48,56 @@ describe('getProviders()', () => {
|
|||||||
it('should throw on duplicate ids in sources', () => {
|
it('should throw on duplicate ids in sources', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([]);
|
mocks.gatherAllEmbeds.mockReturnValue([]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceAHigherRank, mockSources.sourceA, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceAHigherRank, mockSources.sourceA, mockSources.sourceB]);
|
||||||
expect(() => getProviders(features)).toThrowError();
|
expect(() =>
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toThrowError();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw on duplicate ids in embeds', () => {
|
it('should throw on duplicate ids in embeds', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedDHigherRank, mockEmbeds.embedA]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedDHigherRank, mockEmbeds.embedA]);
|
||||||
mocks.gatherAllSources.mockReturnValue([]);
|
mocks.gatherAllSources.mockReturnValue([]);
|
||||||
expect(() => getProviders(features)).toThrowError();
|
expect(() =>
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toThrowError();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw on duplicate ids between sources and embeds', () => {
|
it('should throw on duplicate ids between sources and embeds', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
expect(() => getProviders(features)).toThrowError();
|
expect(() =>
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toThrowError();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw on duplicate rank between sources and embeds', () => {
|
it('should throw on duplicate rank between sources and embeds', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedA]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
expect(() => getProviders(features)).toThrowError();
|
expect(() =>
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toThrowError();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not throw with same rank between sources and embeds', () => {
|
it('should not throw with same rank between sources and embeds', () => {
|
||||||
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA]);
|
mocks.gatherAllEmbeds.mockReturnValue([mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA]);
|
||||||
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
mocks.gatherAllSources.mockReturnValue([mockSources.sourceA, mockSources.sourceB]);
|
||||||
expect(getProviders(features)).toEqual({
|
expect(
|
||||||
|
getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
}),
|
||||||
|
).toEqual({
|
||||||
sources: [mockSources.sourceA, mockSources.sourceB],
|
sources: [mockSources.sourceA, mockSources.sourceB],
|
||||||
embeds: [mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA],
|
embeds: [mockEmbeds.embedD, mockEmbeds.embedHSameRankAsSourceA],
|
||||||
});
|
});
|
@@ -1,6 +1,6 @@
|
|||||||
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
import { mockEmbeds, mockSources } from '../providerTests.ts';
|
||||||
import { makeProviders } from '@/main/builder';
|
import { makeProviders } from '@/entrypoint/declare';
|
||||||
import { targets } from '@/main/targets.ts';
|
import { targets } from '@/entrypoint/utils/targets';
|
||||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
@@ -1,6 +1,6 @@
|
|||||||
import { mockEmbeds, mockSources } from '@/__test__/providerTests';
|
import { mockEmbeds, mockSources } from '../providerTests.ts';
|
||||||
import { makeProviders } from '@/main/builder';
|
import { makeProviders } from '@/entrypoint/declare';
|
||||||
import { targets } from '@/main/targets.ts';
|
import { targets } from '@/entrypoint/utils/targets';
|
||||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
const mocks = await vi.hoisted(async () => (await import('../providerTests.ts')).makeProviderMocks());
|
137
src/__test__/standard/utils/features.test.ts
Normal file
137
src/__test__/standard/utils/features.test.ts
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import { FeatureMap, Flags, flags, flagsAllowedInFeatures } from '@/entrypoint/utils/targets';
|
||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
|
describe('flagsAllowedInFeatures()', () => {
|
||||||
|
function checkFeatures(featureMap: FeatureMap, flags: Flags[], output: boolean) {
|
||||||
|
expect(flagsAllowedInFeatures(featureMap, flags)).toEqual(output);
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should check required correctly', () => {
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED, flags.IP_LOCKED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED, flags.IP_LOCKED],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.IP_LOCKED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.IP_LOCKED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should check disallowed correctly', () => {
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [flags.CORS_ALLOWED],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [flags.CORS_ALLOWED],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [flags.CORS_ALLOWED],
|
||||||
|
},
|
||||||
|
[flags.IP_LOCKED],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [],
|
||||||
|
disallowed: [flags.CORS_ALLOWED, flags.IP_LOCKED],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass mixed tests', () => {
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [flags.IP_LOCKED],
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [flags.IP_LOCKED],
|
||||||
|
},
|
||||||
|
[flags.CORS_ALLOWED],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [flags.IP_LOCKED],
|
||||||
|
},
|
||||||
|
[flags.IP_LOCKED],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
checkFeatures(
|
||||||
|
{
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [flags.IP_LOCKED],
|
||||||
|
},
|
||||||
|
[flags.IP_LOCKED, flags.CORS_ALLOWED],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
@@ -1,16 +1,16 @@
|
|||||||
import { reorderOnIdList } from "@/utils/list";
|
import { reorderOnIdList } from '@/utils/list';
|
||||||
import { describe, it, expect } from "vitest";
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
function list(def: string) {
|
function list(def: string) {
|
||||||
return def.split(",").map(v=>({
|
return def.split(',').map((v) => ({
|
||||||
rank: parseInt(v),
|
rank: parseInt(v),
|
||||||
id: v,
|
id: v,
|
||||||
}))
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
function expectListToEqual(l1: ReturnType<typeof list>, l2: ReturnType<typeof list>) {
|
function expectListToEqual(l1: ReturnType<typeof list>, l2: ReturnType<typeof list>) {
|
||||||
function flatten(l: ReturnType<typeof list>) {
|
function flatten(l: ReturnType<typeof list>) {
|
||||||
return l.map(v=>v.id).join(",");
|
return l.map((v) => v.id).join(',');
|
||||||
}
|
}
|
||||||
expect(flatten(l1)).toEqual(flatten(l2));
|
expect(flatten(l1)).toEqual(flatten(l2));
|
||||||
}
|
}
|
||||||
@@ -18,36 +18,36 @@ function expectListToEqual(l1: ReturnType<typeof list>, l2: ReturnType<typeof li
|
|||||||
describe('reorderOnIdList()', () => {
|
describe('reorderOnIdList()', () => {
|
||||||
it('should reorder based on rank', () => {
|
it('should reorder based on rank', () => {
|
||||||
const l = list('2,1,4,3');
|
const l = list('2,1,4,3');
|
||||||
const sortedList = list('4,3,2,1')
|
const sortedList = list('4,3,2,1');
|
||||||
expectListToEqual(reorderOnIdList([], l), sortedList);
|
expectListToEqual(reorderOnIdList([], l), sortedList);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should work with empty input', () => {
|
it('should work with empty input', () => {
|
||||||
expectListToEqual(reorderOnIdList([], []), []);
|
expectListToEqual(reorderOnIdList([], []), []);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reorder based on id list', () => {
|
it('should reorder based on id list', () => {
|
||||||
const l = list('4,2,1,3');
|
const l = list('4,2,1,3');
|
||||||
const sortedList = list('4,3,2,1')
|
const sortedList = list('4,3,2,1');
|
||||||
expectListToEqual(reorderOnIdList(["4","3","2","1"], l), sortedList);
|
expectListToEqual(reorderOnIdList(['4', '3', '2', '1'], l), sortedList);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reorder based on id list and rank second', () => {
|
it('should reorder based on id list and rank second', () => {
|
||||||
const l = list('4,2,1,3');
|
const l = list('4,2,1,3');
|
||||||
const sortedList = list('4,3,2,1')
|
const sortedList = list('4,3,2,1');
|
||||||
expectListToEqual(reorderOnIdList(["4","3"], l), sortedList);
|
expectListToEqual(reorderOnIdList(['4', '3'], l), sortedList);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should work with only one item', () => {
|
it('should work with only one item', () => {
|
||||||
const l = list('1');
|
const l = list('1');
|
||||||
const sortedList = list('1')
|
const sortedList = list('1');
|
||||||
expectListToEqual(reorderOnIdList(["1"], l), sortedList);
|
expectListToEqual(reorderOnIdList(['1'], l), sortedList);
|
||||||
expectListToEqual(reorderOnIdList([], l), sortedList);
|
expectListToEqual(reorderOnIdList([], l), sortedList);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not affect original list', () => {
|
it('should not affect original list', () => {
|
||||||
const l = list('4,3,2,1');
|
const l = list('4,3,2,1');
|
||||||
const unsortedList = list('4,3,2,1')
|
const unsortedList = list('4,3,2,1');
|
||||||
reorderOnIdList([], l);
|
reorderOnIdList([], l);
|
||||||
expectListToEqual(l, unsortedList);
|
expectListToEqual(l, unsortedList);
|
||||||
});
|
});
|
71
src/__test__/standard/utils/valid.test.ts
Normal file
71
src/__test__/standard/utils/valid.test.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { isValidStream } from '@/utils/valid';
|
||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
|
describe('isValidStream()', () => {
|
||||||
|
it('should pass valid streams', () => {
|
||||||
|
expect(
|
||||||
|
isValidStream({
|
||||||
|
type: 'file',
|
||||||
|
id: 'a',
|
||||||
|
flags: [],
|
||||||
|
captions: [],
|
||||||
|
qualities: {
|
||||||
|
'1080': {
|
||||||
|
type: 'mp4',
|
||||||
|
url: 'hello-world',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
isValidStream({
|
||||||
|
type: 'hls',
|
||||||
|
id: 'a',
|
||||||
|
flags: [],
|
||||||
|
captions: [],
|
||||||
|
playlist: 'hello-world',
|
||||||
|
}),
|
||||||
|
).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect empty qualities', () => {
|
||||||
|
expect(
|
||||||
|
isValidStream({
|
||||||
|
type: 'file',
|
||||||
|
id: 'a',
|
||||||
|
flags: [],
|
||||||
|
captions: [],
|
||||||
|
qualities: {},
|
||||||
|
}),
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect empty stream urls', () => {
|
||||||
|
expect(
|
||||||
|
isValidStream({
|
||||||
|
type: 'file',
|
||||||
|
id: 'a',
|
||||||
|
flags: [],
|
||||||
|
captions: [],
|
||||||
|
qualities: {
|
||||||
|
'1080': {
|
||||||
|
type: 'mp4',
|
||||||
|
url: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect emtpy HLS playlists', () => {
|
||||||
|
expect(
|
||||||
|
isValidStream({
|
||||||
|
type: 'hls',
|
||||||
|
id: 'a',
|
||||||
|
flags: [],
|
||||||
|
captions: [],
|
||||||
|
playlist: '',
|
||||||
|
}),
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
417
src/dev-cli.ts
417
src/dev-cli.ts
@@ -1,417 +0,0 @@
|
|||||||
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
|
|
||||||
|
|
||||||
import { program } from 'commander';
|
|
||||||
import dotenv from 'dotenv';
|
|
||||||
import { prompt } from 'enquirer';
|
|
||||||
import nodeFetch from 'node-fetch';
|
|
||||||
import Spinnies from 'spinnies';
|
|
||||||
|
|
||||||
import { MetaOutput, MovieMedia, ProviderControls, ShowMedia, makeProviders, makeStandardFetcher, targets } from '.';
|
|
||||||
|
|
||||||
dotenv.config();
|
|
||||||
|
|
||||||
type ProviderSourceAnswers = {
|
|
||||||
id: string;
|
|
||||||
type: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type EmbedSourceAnswers = {
|
|
||||||
url: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type CommonAnswers = {
|
|
||||||
fetcher: string;
|
|
||||||
source: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type ShowAnswers = {
|
|
||||||
season: string;
|
|
||||||
episode: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type CommandLineArguments = {
|
|
||||||
fetcher: string;
|
|
||||||
sourceId: string;
|
|
||||||
tmdbId: string;
|
|
||||||
type: string;
|
|
||||||
season: string;
|
|
||||||
episode: string;
|
|
||||||
url: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
const TMDB_API_KEY = process.env.MOVIE_WEB_TMDB_API_KEY ?? '';
|
|
||||||
|
|
||||||
if (!TMDB_API_KEY?.trim()) {
|
|
||||||
throw new Error('Missing MOVIE_WEB_TMDB_API_KEY environment variable');
|
|
||||||
}
|
|
||||||
|
|
||||||
function getAllSources() {
|
|
||||||
// * The only way to get a list of all sources is to
|
|
||||||
// * create all these things. Maybe this should change
|
|
||||||
const providers = makeProviders({
|
|
||||||
fetcher: makeStandardFetcher(nodeFetch),
|
|
||||||
target: targets.NATIVE,
|
|
||||||
});
|
|
||||||
|
|
||||||
const combined = [...providers.listSources(), ...providers.listEmbeds()];
|
|
||||||
|
|
||||||
// * Remove dupes
|
|
||||||
const map = new Map(combined.map((source) => [source.id, source]));
|
|
||||||
|
|
||||||
return [...map.values()];
|
|
||||||
}
|
|
||||||
|
|
||||||
// * Defined here cuz ESLint didn't like the order these were defined in
|
|
||||||
const sources = getAllSources();
|
|
||||||
|
|
||||||
async function makeTMDBRequest(url: string): Promise<Response> {
|
|
||||||
const headers: {
|
|
||||||
accept: 'application/json';
|
|
||||||
authorization?: string;
|
|
||||||
} = {
|
|
||||||
accept: 'application/json',
|
|
||||||
};
|
|
||||||
|
|
||||||
// * Used to get around ESLint
|
|
||||||
// * Assignment to function parameter 'url'. eslint (no-param-reassign)
|
|
||||||
let requestURL = url;
|
|
||||||
|
|
||||||
// * JWT keys always start with ey and are ONLY valid as a header.
|
|
||||||
// * All other keys are ONLY valid as a query param.
|
|
||||||
// * Thanks TMDB.
|
|
||||||
if (TMDB_API_KEY.startsWith('ey')) {
|
|
||||||
headers.authorization = `Bearer ${TMDB_API_KEY}`;
|
|
||||||
} else {
|
|
||||||
requestURL += `?api_key=${TMDB_API_KEY}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return fetch(requestURL, {
|
|
||||||
method: 'GET',
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getMovieMediaDetails(id: string): Promise<MovieMedia> {
|
|
||||||
const response = await makeTMDBRequest(`https://api.themoviedb.org/3/movie/${id}`);
|
|
||||||
const movie = await response.json();
|
|
||||||
|
|
||||||
if (movie.success === false) {
|
|
||||||
throw new Error(movie.status_message);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!movie.release_date) {
|
|
||||||
throw new Error(`${movie.title} has no release_date. Assuming unreleased`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
type: 'movie',
|
|
||||||
title: movie.title,
|
|
||||||
releaseYear: Number(movie.release_date.split('-')[0]),
|
|
||||||
tmdbId: id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getShowMediaDetails(id: string, seasonNumber: string, episodeNumber: string): Promise<ShowMedia> {
|
|
||||||
// * TV shows require the TMDB ID for the series, season, and episode
|
|
||||||
// * and the name of the series. Needs multiple requests
|
|
||||||
let response = await makeTMDBRequest(`https://api.themoviedb.org/3/tv/${id}`);
|
|
||||||
const series = await response.json();
|
|
||||||
|
|
||||||
if (series.success === false) {
|
|
||||||
throw new Error(series.status_message);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!series.first_air_date) {
|
|
||||||
throw new Error(`${series.name} has no first_air_date. Assuming unaired`);
|
|
||||||
}
|
|
||||||
|
|
||||||
response = await makeTMDBRequest(`https://api.themoviedb.org/3/tv/${id}/season/${seasonNumber}`);
|
|
||||||
const season = await response.json();
|
|
||||||
|
|
||||||
if (season.success === false) {
|
|
||||||
throw new Error(season.status_message);
|
|
||||||
}
|
|
||||||
|
|
||||||
response = await makeTMDBRequest(
|
|
||||||
`https://api.themoviedb.org/3/tv/${id}/season/${seasonNumber}/episode/${episodeNumber}`,
|
|
||||||
);
|
|
||||||
const episode = await response.json();
|
|
||||||
|
|
||||||
if (episode.success === false) {
|
|
||||||
throw new Error(episode.status_message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
type: 'show',
|
|
||||||
title: series.name,
|
|
||||||
releaseYear: Number(series.first_air_date.split('-')[0]),
|
|
||||||
tmdbId: id,
|
|
||||||
episode: {
|
|
||||||
number: episode.episode_number,
|
|
||||||
tmdbId: episode.id,
|
|
||||||
},
|
|
||||||
season: {
|
|
||||||
number: season.season_number,
|
|
||||||
tmdbId: season.id,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function joinMediaTypes(mediaTypes: string[] | undefined) {
|
|
||||||
if (mediaTypes) {
|
|
||||||
const formatted = mediaTypes
|
|
||||||
.map((type: string) => {
|
|
||||||
return `${type[0].toUpperCase() + type.substring(1).toLowerCase()}s`;
|
|
||||||
})
|
|
||||||
.join(' / ');
|
|
||||||
|
|
||||||
return `(${formatted})`;
|
|
||||||
}
|
|
||||||
return ''; // * Embed sources pass through here too
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runScraper(providers: ProviderControls, source: MetaOutput, options: CommandLineArguments) {
|
|
||||||
const spinnies = new Spinnies();
|
|
||||||
|
|
||||||
if (source.type === 'embed') {
|
|
||||||
spinnies.add('scrape', { text: `Running ${source.name} scraper on ${options.url}` });
|
|
||||||
try {
|
|
||||||
const result = await providers.runEmbedScraper({
|
|
||||||
url: options.url,
|
|
||||||
id: source.id,
|
|
||||||
});
|
|
||||||
spinnies.succeed('scrape', { text: 'Done!' });
|
|
||||||
console.log(result);
|
|
||||||
} catch (error) {
|
|
||||||
let message = 'Unknown error';
|
|
||||||
if (error instanceof Error) {
|
|
||||||
message = error.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
spinnies.fail('scrape', { text: `ERROR: ${message}` });
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let media;
|
|
||||||
|
|
||||||
if (options.type === 'movie') {
|
|
||||||
media = await getMovieMediaDetails(options.tmdbId);
|
|
||||||
} else {
|
|
||||||
media = await getShowMediaDetails(options.tmdbId, options.season, options.episode);
|
|
||||||
}
|
|
||||||
|
|
||||||
spinnies.add('scrape', { text: `Running ${source.name} scraper on ${media.title}` });
|
|
||||||
try {
|
|
||||||
const result = await providers.runSourceScraper({
|
|
||||||
media,
|
|
||||||
id: source.id,
|
|
||||||
});
|
|
||||||
spinnies.succeed('scrape', { text: 'Done!' });
|
|
||||||
console.log(result);
|
|
||||||
} catch (error) {
|
|
||||||
let message = 'Unknown error';
|
|
||||||
if (error instanceof Error) {
|
|
||||||
message = error.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
spinnies.fail('scrape', { text: `ERROR: ${message}` });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processOptions(options: CommandLineArguments) {
|
|
||||||
if (options.fetcher !== 'node-fetch' && options.fetcher !== 'native') {
|
|
||||||
throw new Error("Fetcher must be either 'native' or 'node-fetch'");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.sourceId.trim()) {
|
|
||||||
throw new Error('Source ID must be provided');
|
|
||||||
}
|
|
||||||
|
|
||||||
const source = sources.find(({ id }) => id === options.sourceId);
|
|
||||||
|
|
||||||
if (!source) {
|
|
||||||
throw new Error('Invalid source ID. No source found');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (source.type === 'embed' && !options.url.trim()) {
|
|
||||||
throw new Error('Must provide an embed URL for embed sources');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (source.type === 'source') {
|
|
||||||
if (!options.tmdbId.trim()) {
|
|
||||||
throw new Error('Must provide a TMDB ID for provider sources');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Number.isNaN(Number(options.tmdbId)) || Number(options.tmdbId) < 0) {
|
|
||||||
throw new Error('TMDB ID must be a number greater than 0');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.type.trim()) {
|
|
||||||
throw new Error('Must provide a type for provider sources');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.type !== 'movie' && options.type !== 'show') {
|
|
||||||
throw new Error("Invalid media type. Must be either 'movie' or 'show'");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.type === 'show') {
|
|
||||||
if (!options.season.trim()) {
|
|
||||||
throw new Error('Must provide a season number for TV shows');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.episode.trim()) {
|
|
||||||
throw new Error('Must provide an episode number for TV shows');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Number.isNaN(Number(options.season)) || Number(options.season) <= 0) {
|
|
||||||
throw new Error('Season number must be a number greater than 0');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Number.isNaN(Number(options.episode)) || Number(options.episode) <= 0) {
|
|
||||||
throw new Error('Episode number must be a number greater than 0');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fetcher;
|
|
||||||
|
|
||||||
if (options.fetcher === 'native') {
|
|
||||||
fetcher = makeStandardFetcher(fetch);
|
|
||||||
} else {
|
|
||||||
fetcher = makeStandardFetcher(nodeFetch);
|
|
||||||
}
|
|
||||||
|
|
||||||
const providers = makeProviders({
|
|
||||||
fetcher,
|
|
||||||
target: targets.NATIVE,
|
|
||||||
});
|
|
||||||
|
|
||||||
await runScraper(providers, source, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runQuestions() {
|
|
||||||
const options = {
|
|
||||||
fetcher: 'node-fetch',
|
|
||||||
sourceId: '',
|
|
||||||
tmdbId: '',
|
|
||||||
type: 'movie',
|
|
||||||
season: '0',
|
|
||||||
episode: '0',
|
|
||||||
url: '',
|
|
||||||
};
|
|
||||||
|
|
||||||
const answers = await prompt<CommonAnswers>([
|
|
||||||
{
|
|
||||||
type: 'select',
|
|
||||||
name: 'fetcher',
|
|
||||||
message: 'Select a fetcher',
|
|
||||||
choices: [
|
|
||||||
{
|
|
||||||
message: 'Native',
|
|
||||||
name: 'native',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
message: 'Node fetch',
|
|
||||||
name: 'node-fetch',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'select',
|
|
||||||
name: 'source',
|
|
||||||
message: 'Select a source',
|
|
||||||
choices: sources.map((source) => ({
|
|
||||||
message: `[${source.type.toLocaleUpperCase()}] ${source.name} ${joinMediaTypes(source.mediaTypes)}`.trim(),
|
|
||||||
name: source.id,
|
|
||||||
})),
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
options.fetcher = answers.fetcher;
|
|
||||||
options.sourceId = answers.source;
|
|
||||||
|
|
||||||
const source = sources.find(({ id }) => id === answers.source);
|
|
||||||
|
|
||||||
if (!source) {
|
|
||||||
throw new Error(`No source with ID ${answers.source} found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (source.type === 'embed') {
|
|
||||||
const sourceAnswers = await prompt<EmbedSourceAnswers>([
|
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'url',
|
|
||||||
message: 'Embed URL',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
options.url = sourceAnswers.url;
|
|
||||||
} else {
|
|
||||||
const sourceAnswers = await prompt<ProviderSourceAnswers>([
|
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'id',
|
|
||||||
message: 'TMDB ID',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'select',
|
|
||||||
name: 'type',
|
|
||||||
message: 'Media type',
|
|
||||||
choices: [
|
|
||||||
{
|
|
||||||
message: 'Movie',
|
|
||||||
name: 'movie',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
message: 'TV Show',
|
|
||||||
name: 'show',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
options.tmdbId = sourceAnswers.id;
|
|
||||||
options.type = sourceAnswers.type;
|
|
||||||
|
|
||||||
if (sourceAnswers.type === 'show') {
|
|
||||||
const seriesAnswers = await prompt<ShowAnswers>([
|
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'season',
|
|
||||||
message: 'Season',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'episode',
|
|
||||||
message: 'Episode',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
options.season = seriesAnswers.season;
|
|
||||||
options.episode = seriesAnswers.episode;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await processOptions(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runCommandLine() {
|
|
||||||
program
|
|
||||||
.option('-f, --fetcher <fetcher>', "Fetcher to use. Either 'native' or 'node-fetch'", 'node-fetch')
|
|
||||||
.option('-sid, --source-id <id>', 'ID for the source to use. Either an embed or provider', '')
|
|
||||||
.option('-tid, --tmdb-id <id>', 'TMDB ID for the media to scrape. Only used if source is a provider', '')
|
|
||||||
.option('-t, --type <type>', "Media type. Either 'movie' or 'show'. Only used if source is a provider", 'movie')
|
|
||||||
.option('-s, --season <number>', "Season number. Only used if type is 'show'", '0')
|
|
||||||
.option('-e, --episode <number>', "Episode number. Only used if type is 'show'", '0')
|
|
||||||
.option('-u, --url <embed URL>', 'URL to a video embed. Only used if source is an embed', '');
|
|
||||||
|
|
||||||
program.parse();
|
|
||||||
|
|
||||||
await processOptions(program.opts());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.argv.length === 2) {
|
|
||||||
runQuestions();
|
|
||||||
} else {
|
|
||||||
runCommandLine();
|
|
||||||
}
|
|
1
src/dev-cli/browser/.gitignore
vendored
Normal file
1
src/dev-cli/browser/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
dist
|
11
src/dev-cli/browser/index.html
Normal file
11
src/dev-cli/browser/index.html
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Scraper CLI</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<script src="./index.ts" type="module"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
17
src/dev-cli/browser/index.ts
Normal file
17
src/dev-cli/browser/index.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { makeProviders, makeSimpleProxyFetcher, makeStandardFetcher, targets } from '../../../lib';
|
||||||
|
|
||||||
|
(window as any).scrape = (proxyUrl: string, type: 'source' | 'embed', input: any) => {
|
||||||
|
const providers = makeProviders({
|
||||||
|
fetcher: makeStandardFetcher(fetch),
|
||||||
|
target: targets.BROWSER,
|
||||||
|
proxiedFetcher: makeSimpleProxyFetcher(proxyUrl, fetch),
|
||||||
|
});
|
||||||
|
if (type === 'source') {
|
||||||
|
return providers.runSourceScraper(input);
|
||||||
|
}
|
||||||
|
if (type === 'embed') {
|
||||||
|
return providers.runEmbedScraper(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Input input type');
|
||||||
|
};
|
16
src/dev-cli/config.ts
Normal file
16
src/dev-cli/config.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
export function getConfig() {
|
||||||
|
let tmdbApiKey = process.env.MOVIE_WEB_TMDB_API_KEY ?? '';
|
||||||
|
tmdbApiKey = tmdbApiKey.trim();
|
||||||
|
|
||||||
|
if (!tmdbApiKey) {
|
||||||
|
throw new Error('Missing MOVIE_WEB_TMDB_API_KEY environment variable');
|
||||||
|
}
|
||||||
|
|
||||||
|
let proxyUrl: undefined | string = process.env.MOVIE_WEB_PROXY_URL;
|
||||||
|
proxyUrl = !proxyUrl ? undefined : proxyUrl;
|
||||||
|
|
||||||
|
return {
|
||||||
|
tmdbApiKey,
|
||||||
|
proxyUrl,
|
||||||
|
};
|
||||||
|
}
|
189
src/dev-cli/index.ts
Normal file
189
src/dev-cli/index.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
|
||||||
|
|
||||||
|
import { program } from 'commander';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import { prompt } from 'enquirer';
|
||||||
|
|
||||||
|
import { runScraper } from '@/dev-cli/scraper';
|
||||||
|
import { processOptions } from '@/dev-cli/validate';
|
||||||
|
|
||||||
|
import { getBuiltinEmbeds, getBuiltinSources } from '..';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
type ProviderSourceAnswers = {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type EmbedSourceAnswers = {
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type CommonAnswers = {
|
||||||
|
fetcher: string;
|
||||||
|
source: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ShowAnswers = {
|
||||||
|
season: string;
|
||||||
|
episode: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const sourceScrapers = getBuiltinSources().sort((a, b) => b.rank - a.rank);
|
||||||
|
const embedScrapers = getBuiltinEmbeds().sort((a, b) => b.rank - a.rank);
|
||||||
|
const sources = [...sourceScrapers, ...embedScrapers];
|
||||||
|
|
||||||
|
function joinMediaTypes(mediaTypes: string[] | undefined) {
|
||||||
|
if (mediaTypes) {
|
||||||
|
const formatted = mediaTypes
|
||||||
|
.map((type: string) => {
|
||||||
|
return `${type[0].toUpperCase() + type.substring(1).toLowerCase()}s`;
|
||||||
|
})
|
||||||
|
.join(' / ');
|
||||||
|
|
||||||
|
return `(${formatted})`;
|
||||||
|
}
|
||||||
|
return ''; // * Embed sources pass through here too
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runQuestions() {
|
||||||
|
const options = {
|
||||||
|
fetcher: 'node-fetch',
|
||||||
|
sourceId: '',
|
||||||
|
tmdbId: '',
|
||||||
|
type: 'movie',
|
||||||
|
season: '0',
|
||||||
|
episode: '0',
|
||||||
|
url: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
const answers = await prompt<CommonAnswers>([
|
||||||
|
{
|
||||||
|
type: 'select',
|
||||||
|
name: 'fetcher',
|
||||||
|
message: 'Select a fetcher mode',
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: 'Native',
|
||||||
|
name: 'native',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message: 'Node fetch',
|
||||||
|
name: 'node-fetch',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message: 'Browser',
|
||||||
|
name: 'browser',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'select',
|
||||||
|
name: 'source',
|
||||||
|
message: 'Select a source',
|
||||||
|
choices: sources.map((source) => ({
|
||||||
|
message: `[${source.type.toLocaleUpperCase()}] ${source.name} ${joinMediaTypes(source.mediaTypes)}`.trim(),
|
||||||
|
name: source.id,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
options.fetcher = answers.fetcher;
|
||||||
|
options.sourceId = answers.source;
|
||||||
|
|
||||||
|
const source = sources.find(({ id }) => id === answers.source);
|
||||||
|
|
||||||
|
if (!source) {
|
||||||
|
throw new Error(`No source with ID ${answers.source} found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.type === 'embed') {
|
||||||
|
const sourceAnswers = await prompt<EmbedSourceAnswers>([
|
||||||
|
{
|
||||||
|
type: 'input',
|
||||||
|
name: 'url',
|
||||||
|
message: 'Embed URL',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
options.url = sourceAnswers.url;
|
||||||
|
} else {
|
||||||
|
const sourceAnswers = await prompt<ProviderSourceAnswers>([
|
||||||
|
{
|
||||||
|
type: 'input',
|
||||||
|
name: 'id',
|
||||||
|
message: 'TMDB ID',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'select',
|
||||||
|
name: 'type',
|
||||||
|
message: 'Media type',
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: 'Movie',
|
||||||
|
name: 'movie',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message: 'TV Show',
|
||||||
|
name: 'show',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
options.tmdbId = sourceAnswers.id;
|
||||||
|
options.type = sourceAnswers.type;
|
||||||
|
|
||||||
|
if (sourceAnswers.type === 'show') {
|
||||||
|
const seriesAnswers = await prompt<ShowAnswers>([
|
||||||
|
{
|
||||||
|
type: 'input',
|
||||||
|
name: 'season',
|
||||||
|
message: 'Season',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'input',
|
||||||
|
name: 'episode',
|
||||||
|
message: 'Episode',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
options.season = seriesAnswers.season;
|
||||||
|
options.episode = seriesAnswers.episode;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { providerOptions, source: validatedSource, options: validatedOps } = await processOptions(sources, options);
|
||||||
|
await runScraper(providerOptions, validatedSource, validatedOps);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runCommandLine() {
|
||||||
|
program
|
||||||
|
.option('-f, --fetcher <fetcher>', "Fetcher to use. Either 'native' or 'node-fetch'", 'node-fetch')
|
||||||
|
.option('-sid, --source-id <id>', 'ID for the source to use. Either an embed or provider', '')
|
||||||
|
.option('-tid, --tmdb-id <id>', 'TMDB ID for the media to scrape. Only used if source is a provider', '')
|
||||||
|
.option('-t, --type <type>', "Media type. Either 'movie' or 'show'. Only used if source is a provider", 'movie')
|
||||||
|
.option('-s, --season <number>', "Season number. Only used if type is 'show'", '0')
|
||||||
|
.option('-e, --episode <number>', "Episode number. Only used if type is 'show'", '0')
|
||||||
|
.option('-u, --url <embed URL>', 'URL to a video embed. Only used if source is an embed', '');
|
||||||
|
|
||||||
|
program.parse();
|
||||||
|
|
||||||
|
const {
|
||||||
|
providerOptions,
|
||||||
|
source: validatedSource,
|
||||||
|
options: validatedOps,
|
||||||
|
} = await processOptions(sources, program.opts());
|
||||||
|
await runScraper(providerOptions, validatedSource, validatedOps);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.argv.length === 2) {
|
||||||
|
runQuestions()
|
||||||
|
.catch(() => console.error('Exited.'))
|
||||||
|
.finally(() => process.exit(0));
|
||||||
|
} else {
|
||||||
|
runCommandLine()
|
||||||
|
.catch(() => console.error('Exited.'))
|
||||||
|
.finally(() => process.exit(0));
|
||||||
|
}
|
7
src/dev-cli/logging.ts
Normal file
7
src/dev-cli/logging.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { inspect } from 'node:util';
|
||||||
|
|
||||||
|
export function logDeepObject(object: Record<any, any>) {
|
||||||
|
// This is the dev cli, so we can use console.log
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(inspect(object, { showHidden: false, depth: null, colors: true }));
|
||||||
|
}
|
141
src/dev-cli/scraper.ts
Normal file
141
src/dev-cli/scraper.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
|
||||||
|
|
||||||
|
import { existsSync } from 'fs';
|
||||||
|
import { join } from 'path';
|
||||||
|
|
||||||
|
import puppeteer, { Browser } from 'puppeteer';
|
||||||
|
import Spinnies from 'spinnies';
|
||||||
|
import { PreviewServer, build, preview } from 'vite';
|
||||||
|
|
||||||
|
import { getConfig } from '@/dev-cli/config';
|
||||||
|
import { logDeepObject } from '@/dev-cli/logging';
|
||||||
|
import { getMovieMediaDetails, getShowMediaDetails } from '@/dev-cli/tmdb';
|
||||||
|
import { CommandLineArguments } from '@/dev-cli/validate';
|
||||||
|
|
||||||
|
import { MetaOutput, ProviderMakerOptions, makeProviders } from '..';
|
||||||
|
|
||||||
|
async function runBrowserScraping(
|
||||||
|
providerOptions: ProviderMakerOptions,
|
||||||
|
source: MetaOutput,
|
||||||
|
options: CommandLineArguments,
|
||||||
|
) {
|
||||||
|
if (!existsSync(join(__dirname, '../../lib/index.js')))
|
||||||
|
throw new Error('Please compile before running cli in browser mode');
|
||||||
|
const config = getConfig();
|
||||||
|
if (!config.proxyUrl)
|
||||||
|
throw new Error('Simple proxy url must be set in the environment (MOVIE_WEB_PROXY_URL) for browser mode to work');
|
||||||
|
|
||||||
|
const root = join(__dirname, 'browser');
|
||||||
|
let server: PreviewServer | undefined;
|
||||||
|
let browser: Browser | undefined;
|
||||||
|
try {
|
||||||
|
// setup browser
|
||||||
|
await build({
|
||||||
|
root,
|
||||||
|
});
|
||||||
|
server = await preview({
|
||||||
|
root,
|
||||||
|
});
|
||||||
|
browser = await puppeteer.launch({
|
||||||
|
headless: true,
|
||||||
|
args: ['--no-sandbox', '--disable-setuid-sandbox'],
|
||||||
|
});
|
||||||
|
const page = await browser.newPage();
|
||||||
|
// This is the dev cli, so we can use console.log
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
page.on('console', (message) => console.log(`${message.type().slice(0, 3).toUpperCase()} ${message.text()}`));
|
||||||
|
|
||||||
|
if (!server.resolvedUrls?.local.length) throw new Error('Server did not start');
|
||||||
|
await page.goto(server.resolvedUrls.local[0]);
|
||||||
|
await page.waitForFunction('!!window.scrape', { timeout: 5000 });
|
||||||
|
|
||||||
|
// get input media
|
||||||
|
let input: any;
|
||||||
|
if (source.type === 'embed') {
|
||||||
|
input = {
|
||||||
|
url: options.url,
|
||||||
|
id: source.id,
|
||||||
|
};
|
||||||
|
} else if (source.type === 'source') {
|
||||||
|
let media;
|
||||||
|
if (options.type === 'movie') {
|
||||||
|
media = await getMovieMediaDetails(options.tmdbId);
|
||||||
|
} else {
|
||||||
|
media = await getShowMediaDetails(options.tmdbId, options.season, options.episode);
|
||||||
|
}
|
||||||
|
input = {
|
||||||
|
media,
|
||||||
|
id: source.id,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
throw new Error('Wrong source input type');
|
||||||
|
}
|
||||||
|
|
||||||
|
return await page.evaluate(
|
||||||
|
async (proxy, type, inp) => {
|
||||||
|
return (window as any).scrape(proxy, type, inp);
|
||||||
|
},
|
||||||
|
config.proxyUrl,
|
||||||
|
source.type,
|
||||||
|
input,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
server?.httpServer.close();
|
||||||
|
await browser?.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runActualScraping(
|
||||||
|
providerOptions: ProviderMakerOptions,
|
||||||
|
source: MetaOutput,
|
||||||
|
options: CommandLineArguments,
|
||||||
|
): Promise<any> {
|
||||||
|
if (options.fetcher === 'browser') return runBrowserScraping(providerOptions, source, options);
|
||||||
|
const providers = makeProviders(providerOptions);
|
||||||
|
|
||||||
|
if (source.type === 'embed') {
|
||||||
|
return providers.runEmbedScraper({
|
||||||
|
url: options.url,
|
||||||
|
id: source.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.type === 'source') {
|
||||||
|
let media;
|
||||||
|
|
||||||
|
if (options.type === 'movie') {
|
||||||
|
media = await getMovieMediaDetails(options.tmdbId);
|
||||||
|
} else {
|
||||||
|
media = await getShowMediaDetails(options.tmdbId, options.season, options.episode);
|
||||||
|
}
|
||||||
|
|
||||||
|
return providers.runSourceScraper({
|
||||||
|
media,
|
||||||
|
id: source.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Invalid source type');
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runScraper(
|
||||||
|
providerOptions: ProviderMakerOptions,
|
||||||
|
source: MetaOutput,
|
||||||
|
options: CommandLineArguments,
|
||||||
|
) {
|
||||||
|
const spinnies = new Spinnies();
|
||||||
|
|
||||||
|
spinnies.add('scrape', { text: `Running ${source.name} scraper` });
|
||||||
|
try {
|
||||||
|
const result = await runActualScraping(providerOptions, source, options);
|
||||||
|
spinnies.succeed('scrape', { text: 'Done!' });
|
||||||
|
logDeepObject(result);
|
||||||
|
} catch (error) {
|
||||||
|
let message = 'Unknown error';
|
||||||
|
if (error instanceof Error) {
|
||||||
|
message = error.message;
|
||||||
|
}
|
||||||
|
spinnies.fail('scrape', { text: `ERROR: ${message}` });
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
}
|
101
src/dev-cli/tmdb.ts
Normal file
101
src/dev-cli/tmdb.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import { getConfig } from '@/dev-cli/config';
|
||||||
|
|
||||||
|
import { MovieMedia, ShowMedia } from '..';
|
||||||
|
|
||||||
|
export async function makeTMDBRequest(url: string, appendToResponse?: string): Promise<Response> {
|
||||||
|
const headers: {
|
||||||
|
accept: 'application/json';
|
||||||
|
authorization?: string;
|
||||||
|
} = {
|
||||||
|
accept: 'application/json',
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestURL = new URL(url);
|
||||||
|
const key = getConfig().tmdbApiKey;
|
||||||
|
|
||||||
|
// * JWT keys always start with ey and are ONLY valid as a header.
|
||||||
|
// * All other keys are ONLY valid as a query param.
|
||||||
|
// * Thanks TMDB.
|
||||||
|
if (key.startsWith('ey')) {
|
||||||
|
headers.authorization = `Bearer ${key}`;
|
||||||
|
} else {
|
||||||
|
requestURL.searchParams.append('api_key', key);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (appendToResponse) {
|
||||||
|
requestURL.searchParams.append('append_to_response', appendToResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fetch(requestURL, {
|
||||||
|
method: 'GET',
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getMovieMediaDetails(id: string): Promise<MovieMedia> {
|
||||||
|
const response = await makeTMDBRequest(`https://api.themoviedb.org/3/movie/${id}`, 'external_ids');
|
||||||
|
const movie = await response.json();
|
||||||
|
|
||||||
|
if (movie.success === false) {
|
||||||
|
throw new Error(movie.status_message);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!movie.release_date) {
|
||||||
|
throw new Error(`${movie.title} has no release_date. Assuming unreleased`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: 'movie',
|
||||||
|
title: movie.title,
|
||||||
|
releaseYear: Number(movie.release_date.split('-')[0]),
|
||||||
|
tmdbId: id,
|
||||||
|
imdbId: movie.imdb_id,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getShowMediaDetails(id: string, seasonNumber: string, episodeNumber: string): Promise<ShowMedia> {
|
||||||
|
// * TV shows require the TMDB ID for the series, season, and episode
|
||||||
|
// * and the name of the series. Needs multiple requests
|
||||||
|
let response = await makeTMDBRequest(`https://api.themoviedb.org/3/tv/${id}`, 'external_ids');
|
||||||
|
const series = await response.json();
|
||||||
|
|
||||||
|
if (series.success === false) {
|
||||||
|
throw new Error(series.status_message);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!series.first_air_date) {
|
||||||
|
throw new Error(`${series.name} has no first_air_date. Assuming unaired`);
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await makeTMDBRequest(`https://api.themoviedb.org/3/tv/${id}/season/${seasonNumber}`);
|
||||||
|
const season = await response.json();
|
||||||
|
|
||||||
|
if (season.success === false) {
|
||||||
|
throw new Error(season.status_message);
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await makeTMDBRequest(
|
||||||
|
`https://api.themoviedb.org/3/tv/${id}/season/${seasonNumber}/episode/${episodeNumber}`,
|
||||||
|
);
|
||||||
|
const episode = await response.json();
|
||||||
|
|
||||||
|
if (episode.success === false) {
|
||||||
|
throw new Error(episode.status_message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: 'show',
|
||||||
|
title: series.name,
|
||||||
|
releaseYear: Number(series.first_air_date.split('-')[0]),
|
||||||
|
tmdbId: id,
|
||||||
|
episode: {
|
||||||
|
number: episode.episode_number,
|
||||||
|
tmdbId: episode.id,
|
||||||
|
},
|
||||||
|
season: {
|
||||||
|
number: season.season_number,
|
||||||
|
tmdbId: season.id,
|
||||||
|
},
|
||||||
|
imdbId: series.external_ids.imdb_id,
|
||||||
|
};
|
||||||
|
}
|
92
src/dev-cli/validate.ts
Normal file
92
src/dev-cli/validate.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import nodeFetch from 'node-fetch';
|
||||||
|
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
|
||||||
|
import { ProviderMakerOptions, makeStandardFetcher, targets } from '..';
|
||||||
|
|
||||||
|
export type CommandLineArguments = {
|
||||||
|
fetcher: string;
|
||||||
|
sourceId: string;
|
||||||
|
tmdbId: string;
|
||||||
|
type: string;
|
||||||
|
season: string;
|
||||||
|
episode: string;
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function processOptions(sources: Array<Embed | Sourcerer>, options: CommandLineArguments) {
|
||||||
|
const fetcherOptions = ['node-fetch', 'native', 'browser'];
|
||||||
|
if (!fetcherOptions.includes(options.fetcher)) {
|
||||||
|
throw new Error(`Fetcher must be any of: ${fetcherOptions.join()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.sourceId.trim()) {
|
||||||
|
throw new Error('Source ID must be provided');
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = sources.find(({ id }) => id === options.sourceId);
|
||||||
|
|
||||||
|
if (!source) {
|
||||||
|
throw new Error('Invalid source ID. No source found');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.type === 'embed' && !options.url.trim()) {
|
||||||
|
throw new Error('Must provide an embed URL for embed sources');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.type === 'source') {
|
||||||
|
if (!options.tmdbId.trim()) {
|
||||||
|
throw new Error('Must provide a TMDB ID for provider sources');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Number.isNaN(Number(options.tmdbId)) || Number(options.tmdbId) < 0) {
|
||||||
|
throw new Error('TMDB ID must be a number greater than 0');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.type.trim()) {
|
||||||
|
throw new Error('Must provide a type for provider sources');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.type !== 'movie' && options.type !== 'show') {
|
||||||
|
throw new Error("Invalid media type. Must be either 'movie' or 'show'");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.type === 'show') {
|
||||||
|
if (!options.season.trim()) {
|
||||||
|
throw new Error('Must provide a season number for TV shows');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.episode.trim()) {
|
||||||
|
throw new Error('Must provide an episode number for TV shows');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Number.isNaN(Number(options.season)) || Number(options.season) <= 0) {
|
||||||
|
throw new Error('Season number must be a number greater than 0');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Number.isNaN(Number(options.episode)) || Number(options.episode) <= 0) {
|
||||||
|
throw new Error('Episode number must be a number greater than 0');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let fetcher;
|
||||||
|
|
||||||
|
if (options.fetcher === 'native') {
|
||||||
|
fetcher = makeStandardFetcher(fetch);
|
||||||
|
} else {
|
||||||
|
fetcher = makeStandardFetcher(nodeFetch);
|
||||||
|
}
|
||||||
|
|
||||||
|
const providerOptions: ProviderMakerOptions = {
|
||||||
|
fetcher,
|
||||||
|
target: targets.ANY,
|
||||||
|
consistentIpForRequests: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
providerOptions,
|
||||||
|
options,
|
||||||
|
source,
|
||||||
|
};
|
||||||
|
}
|
93
src/entrypoint/builder.ts
Normal file
93
src/entrypoint/builder.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import { ProviderControls, makeControls } from '@/entrypoint/controls';
|
||||||
|
import { getBuiltinEmbeds, getBuiltinSources } from '@/entrypoint/providers';
|
||||||
|
import { Targets, getTargetFeatures } from '@/entrypoint/utils/targets';
|
||||||
|
import { Fetcher } from '@/fetchers/types';
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
import { getProviders } from '@/providers/get';
|
||||||
|
|
||||||
|
export type ProviderBuilder = {
|
||||||
|
setTarget(target: Targets): ProviderBuilder;
|
||||||
|
setFetcher(fetcher: Fetcher): ProviderBuilder;
|
||||||
|
setProxiedFetcher(fetcher: Fetcher): ProviderBuilder;
|
||||||
|
addSource(scraper: Sourcerer): ProviderBuilder;
|
||||||
|
addSource(name: string): ProviderBuilder;
|
||||||
|
addEmbed(scraper: Embed): ProviderBuilder;
|
||||||
|
addEmbed(name: string): ProviderBuilder;
|
||||||
|
addBuiltinProviders(): ProviderBuilder;
|
||||||
|
enableConsistentIpForRequests(): ProviderBuilder;
|
||||||
|
build(): ProviderControls;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function buildProviders(): ProviderBuilder {
|
||||||
|
let consistentIpForRequests = false;
|
||||||
|
let target: Targets | null = null;
|
||||||
|
let fetcher: Fetcher | null = null;
|
||||||
|
let proxiedFetcher: Fetcher | null = null;
|
||||||
|
const embeds: Embed[] = [];
|
||||||
|
const sources: Sourcerer[] = [];
|
||||||
|
const builtinSources = getBuiltinSources();
|
||||||
|
const builtinEmbeds = getBuiltinEmbeds();
|
||||||
|
|
||||||
|
return {
|
||||||
|
enableConsistentIpForRequests() {
|
||||||
|
consistentIpForRequests = true;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
setFetcher(f) {
|
||||||
|
fetcher = f;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
setProxiedFetcher(f) {
|
||||||
|
proxiedFetcher = f;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
setTarget(t) {
|
||||||
|
target = t;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
addSource(input) {
|
||||||
|
if (typeof input !== 'string') {
|
||||||
|
sources.push(input);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
const matchingSource = builtinSources.find((v) => v.id === input);
|
||||||
|
if (!matchingSource) throw new Error('Source not found');
|
||||||
|
sources.push(matchingSource);
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
addEmbed(input) {
|
||||||
|
if (typeof input !== 'string') {
|
||||||
|
embeds.push(input);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
const matchingEmbed = builtinEmbeds.find((v) => v.id === input);
|
||||||
|
if (!matchingEmbed) throw new Error('Embed not found');
|
||||||
|
embeds.push(matchingEmbed);
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
addBuiltinProviders() {
|
||||||
|
sources.push(...builtinSources);
|
||||||
|
embeds.push(...builtinEmbeds);
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
build() {
|
||||||
|
if (!target) throw new Error('Target not set');
|
||||||
|
if (!fetcher) throw new Error('Fetcher not set');
|
||||||
|
const features = getTargetFeatures(target, consistentIpForRequests);
|
||||||
|
const list = getProviders(features, {
|
||||||
|
embeds,
|
||||||
|
sources,
|
||||||
|
});
|
||||||
|
|
||||||
|
return makeControls({
|
||||||
|
fetcher,
|
||||||
|
proxiedFetcher: proxiedFetcher ?? undefined,
|
||||||
|
embeds: list.embeds,
|
||||||
|
sources: list.sources,
|
||||||
|
features,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
@@ -1,24 +1,19 @@
|
|||||||
import { makeFullFetcher } from '@/fetchers/common';
|
import { FullScraperEvents, IndividualScraperEvents } from '@/entrypoint/utils/events';
|
||||||
|
import { ScrapeMedia } from '@/entrypoint/utils/media';
|
||||||
|
import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/entrypoint/utils/meta';
|
||||||
|
import { FeatureMap } from '@/entrypoint/utils/targets';
|
||||||
|
import { makeFetcher } from '@/fetchers/common';
|
||||||
import { Fetcher } from '@/fetchers/types';
|
import { Fetcher } from '@/fetchers/types';
|
||||||
import { FullScraperEvents, IndividualScraperEvents } from '@/main/events';
|
import { Embed, EmbedOutput, Sourcerer, SourcererOutput } from '@/providers/base';
|
||||||
import { scrapeIndividualEmbed, scrapeInvidualSource } from '@/main/individualRunner';
|
import { scrapeIndividualEmbed, scrapeInvidualSource } from '@/runners/individualRunner';
|
||||||
import { ScrapeMedia } from '@/main/media';
|
import { RunOutput, runAllProviders } from '@/runners/runner';
|
||||||
import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/main/meta';
|
|
||||||
import { RunOutput, runAllProviders } from '@/main/runner';
|
|
||||||
import { Targets, getTargetFeatures } from '@/main/targets';
|
|
||||||
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
|
||||||
import { getProviders } from '@/providers/get';
|
|
||||||
|
|
||||||
export interface ProviderBuilderOptions {
|
export interface ProviderControlsInput {
|
||||||
// fetcher, every web request gets called through here
|
|
||||||
fetcher: Fetcher;
|
fetcher: Fetcher;
|
||||||
|
|
||||||
// proxied fetcher, if the scraper needs to access a CORS proxy. this fetcher will be called instead
|
|
||||||
// of the normal fetcher. Defaults to the normal fetcher.
|
|
||||||
proxiedFetcher?: Fetcher;
|
proxiedFetcher?: Fetcher;
|
||||||
|
features: FeatureMap;
|
||||||
// target of where the streams will be used
|
sources: Sourcerer[];
|
||||||
target: Targets;
|
embeds: Embed[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RunnerOptions {
|
export interface RunnerOptions {
|
||||||
@@ -80,13 +75,16 @@ export interface ProviderControls {
|
|||||||
listEmbeds(): MetaOutput[];
|
listEmbeds(): MetaOutput[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function makeProviders(ops: ProviderBuilderOptions): ProviderControls {
|
export function makeControls(ops: ProviderControlsInput): ProviderControls {
|
||||||
const features = getTargetFeatures(ops.target);
|
const list = {
|
||||||
const list = getProviders(features);
|
embeds: ops.embeds,
|
||||||
|
sources: ops.sources,
|
||||||
|
};
|
||||||
|
|
||||||
const providerRunnerOps = {
|
const providerRunnerOps = {
|
||||||
features,
|
features: ops.features,
|
||||||
fetcher: makeFullFetcher(ops.fetcher),
|
fetcher: makeFetcher(ops.fetcher),
|
||||||
proxiedFetcher: makeFullFetcher(ops.proxiedFetcher ?? ops.fetcher),
|
proxiedFetcher: makeFetcher(ops.proxiedFetcher ?? ops.fetcher),
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
37
src/entrypoint/declare.ts
Normal file
37
src/entrypoint/declare.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { makeControls } from '@/entrypoint/controls';
|
||||||
|
import { getBuiltinEmbeds, getBuiltinSources } from '@/entrypoint/providers';
|
||||||
|
import { Targets, getTargetFeatures } from '@/entrypoint/utils/targets';
|
||||||
|
import { Fetcher } from '@/fetchers/types';
|
||||||
|
import { getProviders } from '@/providers/get';
|
||||||
|
|
||||||
|
export interface ProviderMakerOptions {
|
||||||
|
// fetcher, every web request gets called through here
|
||||||
|
fetcher: Fetcher;
|
||||||
|
|
||||||
|
// proxied fetcher, if the scraper needs to access a CORS proxy. this fetcher will be called instead
|
||||||
|
// of the normal fetcher. Defaults to the normal fetcher.
|
||||||
|
proxiedFetcher?: Fetcher;
|
||||||
|
|
||||||
|
// target of where the streams will be used
|
||||||
|
target: Targets;
|
||||||
|
|
||||||
|
// Set this to true, if the requests will have the same IP as
|
||||||
|
// the device that the stream will be played on
|
||||||
|
consistentIpForRequests?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeProviders(ops: ProviderMakerOptions) {
|
||||||
|
const features = getTargetFeatures(ops.target, ops.consistentIpForRequests ?? false);
|
||||||
|
const list = getProviders(features, {
|
||||||
|
embeds: getBuiltinEmbeds(),
|
||||||
|
sources: getBuiltinSources(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return makeControls({
|
||||||
|
embeds: list.embeds,
|
||||||
|
sources: list.sources,
|
||||||
|
features,
|
||||||
|
fetcher: ops.fetcher,
|
||||||
|
proxiedFetcher: ops.proxiedFetcher,
|
||||||
|
});
|
||||||
|
}
|
10
src/entrypoint/providers.ts
Normal file
10
src/entrypoint/providers.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { gatherAllEmbeds, gatherAllSources } from '@/providers/all';
|
||||||
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
|
||||||
|
export function getBuiltinSources(): Sourcerer[] {
|
||||||
|
return gatherAllSources().filter((v) => !v.disabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getBuiltinEmbeds(): Embed[] {
|
||||||
|
return gatherAllEmbeds().filter((v) => !v.disabled);
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
import { MediaTypes } from '@/main/media';
|
import { MediaTypes } from '@/entrypoint/utils/media';
|
||||||
import { Embed, Sourcerer } from '@/providers/base';
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
import { ProviderList } from '@/providers/get';
|
import { ProviderList } from '@/providers/get';
|
||||||
|
|
68
src/entrypoint/utils/targets.ts
Normal file
68
src/entrypoint/utils/targets.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
export const flags = {
|
||||||
|
// CORS are set to allow any origin
|
||||||
|
CORS_ALLOWED: 'cors-allowed',
|
||||||
|
|
||||||
|
// the stream is locked on IP, so only works if
|
||||||
|
// request maker is same as player (not compatible with proxies)
|
||||||
|
IP_LOCKED: 'ip-locked',
|
||||||
|
|
||||||
|
// The source/embed is blocking cloudflare ip's
|
||||||
|
// This flag is not compatible with a proxy hosted on cloudflare
|
||||||
|
CF_BLOCKED: 'cf-blocked',
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export type Flags = (typeof flags)[keyof typeof flags];
|
||||||
|
|
||||||
|
export const targets = {
|
||||||
|
// browser with CORS restrictions
|
||||||
|
BROWSER: 'browser',
|
||||||
|
|
||||||
|
// browser, but no CORS restrictions through a browser extension
|
||||||
|
BROWSER_EXTENSION: 'browser-extension',
|
||||||
|
|
||||||
|
// native app, so no restrictions in what can be played
|
||||||
|
NATIVE: 'native',
|
||||||
|
|
||||||
|
// any target, no target restrictions
|
||||||
|
ANY: 'any',
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export type Targets = (typeof targets)[keyof typeof targets];
|
||||||
|
|
||||||
|
export type FeatureMap = {
|
||||||
|
requires: Flags[];
|
||||||
|
disallowed: Flags[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export const targetToFeatures: Record<Targets, FeatureMap> = {
|
||||||
|
browser: {
|
||||||
|
requires: [flags.CORS_ALLOWED],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
'browser-extension': {
|
||||||
|
requires: [],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
native: {
|
||||||
|
requires: [],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
any: {
|
||||||
|
requires: [],
|
||||||
|
disallowed: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export function getTargetFeatures(target: Targets, consistentIpForRequests: boolean): FeatureMap {
|
||||||
|
const features = targetToFeatures[target];
|
||||||
|
if (!consistentIpForRequests) features.disallowed.push(flags.IP_LOCKED);
|
||||||
|
return features;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function flagsAllowedInFeatures(features: FeatureMap, inputFlags: Flags[]): boolean {
|
||||||
|
const hasAllFlags = features.requires.every((v) => inputFlags.includes(v));
|
||||||
|
if (!hasAllFlags) return false;
|
||||||
|
const hasDisallowedFlag = features.disallowed.some((v) => inputFlags.includes(v));
|
||||||
|
if (hasDisallowedFlag) return false;
|
||||||
|
return true;
|
||||||
|
}
|
@@ -1,6 +1,7 @@
|
|||||||
import FormData from 'form-data';
|
import FormData from 'form-data';
|
||||||
|
|
||||||
import { FetcherOptions } from '@/fetchers/types';
|
import { FetcherOptions } from '@/fetchers/types';
|
||||||
|
import { isReactNative } from '@/utils/native';
|
||||||
|
|
||||||
export interface SeralizedBody {
|
export interface SeralizedBody {
|
||||||
headers: Record<string, string>;
|
headers: Record<string, string>;
|
||||||
@@ -8,11 +9,20 @@ export interface SeralizedBody {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function serializeBody(body: FetcherOptions['body']): SeralizedBody {
|
export function serializeBody(body: FetcherOptions['body']): SeralizedBody {
|
||||||
if (body === undefined || typeof body === 'string' || body instanceof URLSearchParams || body instanceof FormData)
|
if (body === undefined || typeof body === 'string' || body instanceof URLSearchParams || body instanceof FormData) {
|
||||||
|
if (body instanceof URLSearchParams && isReactNative()) {
|
||||||
|
return {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
|
},
|
||||||
|
body: body.toString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
headers: {},
|
headers: {},
|
||||||
body,
|
body,
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// serialize as JSON
|
// serialize as JSON
|
||||||
return {
|
return {
|
||||||
|
@@ -26,14 +26,18 @@ export function makeFullUrl(url: string, ops?: FullUrlOptions): string {
|
|||||||
return parsedUrl.toString();
|
return parsedUrl.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function makeFullFetcher(fetcher: Fetcher): UseableFetcher {
|
export function makeFetcher(fetcher: Fetcher): UseableFetcher {
|
||||||
return (url, ops) => {
|
const newFetcher = (url: string, ops?: FetcherOptions) => {
|
||||||
return fetcher(url, {
|
return fetcher(url, {
|
||||||
headers: ops?.headers ?? {},
|
headers: ops?.headers ?? {},
|
||||||
method: ops?.method ?? 'GET',
|
method: ops?.method ?? 'GET',
|
||||||
query: ops?.query ?? {},
|
query: ops?.query ?? {},
|
||||||
baseUrl: ops?.baseUrl ?? '',
|
baseUrl: ops?.baseUrl ?? '',
|
||||||
|
readHeaders: ops?.readHeaders ?? [],
|
||||||
body: ops?.body,
|
body: ops?.body,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
const output: UseableFetcher = async (url, ops) => (await newFetcher(url, ops)).body;
|
||||||
|
output.full = newFetcher;
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
|
@@ -11,12 +11,17 @@ export type FetchOps = {
|
|||||||
|
|
||||||
export type FetchHeaders = {
|
export type FetchHeaders = {
|
||||||
get(key: string): string | null;
|
get(key: string): string | null;
|
||||||
|
set(key: string, value: string): void;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type FetchReply = {
|
export type FetchReply = {
|
||||||
text(): Promise<string>;
|
text(): Promise<string>;
|
||||||
json(): Promise<any>;
|
json(): Promise<any>;
|
||||||
|
extraHeaders?: FetchHeaders;
|
||||||
|
extraUrl?: string;
|
||||||
headers: FetchHeaders;
|
headers: FetchHeaders;
|
||||||
|
url: string;
|
||||||
|
status: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type FetchLike = (url: string, ops?: FetchOps | undefined) => Promise<FetchReply>;
|
export type FetchLike = (url: string, ops?: FetchOps | undefined) => Promise<FetchReply>;
|
||||||
|
@@ -7,11 +7,32 @@ const headerMap: Record<string, string> = {
|
|||||||
cookie: 'X-Cookie',
|
cookie: 'X-Cookie',
|
||||||
referer: 'X-Referer',
|
referer: 'X-Referer',
|
||||||
origin: 'X-Origin',
|
origin: 'X-Origin',
|
||||||
|
'user-agent': 'X-User-Agent',
|
||||||
|
'x-real-ip': 'X-X-Real-Ip',
|
||||||
|
};
|
||||||
|
|
||||||
|
const responseHeaderMap: Record<string, string> = {
|
||||||
|
'x-set-cookie': 'Set-Cookie',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function makeSimpleProxyFetcher(proxyUrl: string, f: FetchLike): Fetcher {
|
export function makeSimpleProxyFetcher(proxyUrl: string, f: FetchLike): Fetcher {
|
||||||
const fetcher = makeStandardFetcher(f);
|
|
||||||
const proxiedFetch: Fetcher = async (url, ops) => {
|
const proxiedFetch: Fetcher = async (url, ops) => {
|
||||||
|
const fetcher = makeStandardFetcher(async (a, b) => {
|
||||||
|
const res = await f(a, b);
|
||||||
|
|
||||||
|
// set extra headers that cant normally be accessed
|
||||||
|
res.extraHeaders = new Headers();
|
||||||
|
Object.entries(responseHeaderMap).forEach((entry) => {
|
||||||
|
const value = res.headers.get(entry[0]);
|
||||||
|
if (!value) return;
|
||||||
|
res.extraHeaders?.set(entry[0].toLowerCase(), value);
|
||||||
|
});
|
||||||
|
|
||||||
|
// set correct final url
|
||||||
|
res.extraUrl = res.headers.get('X-Final-Destination') ?? res.url;
|
||||||
|
return res;
|
||||||
|
});
|
||||||
|
|
||||||
const fullUrl = makeFullUrl(url, ops);
|
const fullUrl = makeFullUrl(url, ops);
|
||||||
|
|
||||||
const headerEntries = Object.entries(ops.headers).map((entry) => {
|
const headerEntries = Object.entries(ops.headers).map((entry) => {
|
||||||
|
@@ -1,8 +1,20 @@
|
|||||||
import { serializeBody } from '@/fetchers/body';
|
import { serializeBody } from '@/fetchers/body';
|
||||||
import { makeFullUrl } from '@/fetchers/common';
|
import { makeFullUrl } from '@/fetchers/common';
|
||||||
import { FetchLike } from '@/fetchers/fetch';
|
import { FetchLike, FetchReply } from '@/fetchers/fetch';
|
||||||
import { Fetcher } from '@/fetchers/types';
|
import { Fetcher } from '@/fetchers/types';
|
||||||
|
|
||||||
|
function getHeaders(list: string[], res: FetchReply): Headers {
|
||||||
|
const output = new Headers();
|
||||||
|
list.forEach((header) => {
|
||||||
|
const realHeader = header.toLowerCase();
|
||||||
|
const value = res.headers.get(realHeader);
|
||||||
|
const extraValue = res.extraHeaders?.get(realHeader);
|
||||||
|
if (!value) return;
|
||||||
|
output.set(realHeader, extraValue ?? value);
|
||||||
|
});
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
export function makeStandardFetcher(f: FetchLike): Fetcher {
|
export function makeStandardFetcher(f: FetchLike): Fetcher {
|
||||||
const normalFetch: Fetcher = async (url, ops) => {
|
const normalFetch: Fetcher = async (url, ops) => {
|
||||||
const fullUrl = makeFullUrl(url, ops);
|
const fullUrl = makeFullUrl(url, ops);
|
||||||
@@ -17,9 +29,17 @@ export function makeStandardFetcher(f: FetchLike): Fetcher {
|
|||||||
body: seralizedBody.body,
|
body: seralizedBody.body,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let body: any;
|
||||||
const isJson = res.headers.get('content-type')?.includes('application/json');
|
const isJson = res.headers.get('content-type')?.includes('application/json');
|
||||||
if (isJson) return res.json();
|
if (isJson) body = await res.json();
|
||||||
return res.text();
|
else body = await res.text();
|
||||||
|
|
||||||
|
return {
|
||||||
|
body,
|
||||||
|
finalUrl: res.extraUrl ?? res.url,
|
||||||
|
headers: getHeaders(ops.readHeaders, res),
|
||||||
|
statusCode: res.status,
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
return normalFetch;
|
return normalFetch;
|
||||||
|
@@ -4,23 +4,36 @@ export type FetcherOptions = {
|
|||||||
baseUrl?: string;
|
baseUrl?: string;
|
||||||
headers?: Record<string, string>;
|
headers?: Record<string, string>;
|
||||||
query?: Record<string, string>;
|
query?: Record<string, string>;
|
||||||
method?: 'GET' | 'POST';
|
method?: 'HEAD' | 'GET' | 'POST';
|
||||||
|
readHeaders?: string[];
|
||||||
body?: Record<string, any> | string | FormData | URLSearchParams;
|
body?: Record<string, any> | string | FormData | URLSearchParams;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Version of the options that always has the defaults set
|
||||||
|
// This is to make making fetchers yourself easier
|
||||||
export type DefaultedFetcherOptions = {
|
export type DefaultedFetcherOptions = {
|
||||||
baseUrl?: string;
|
baseUrl?: string;
|
||||||
body?: Record<string, any> | string | FormData;
|
body?: Record<string, any> | string | FormData;
|
||||||
headers: Record<string, string>;
|
headers: Record<string, string>;
|
||||||
query: Record<string, string>;
|
query: Record<string, string>;
|
||||||
method: 'GET' | 'POST';
|
readHeaders: string[];
|
||||||
|
method: 'HEAD' | 'GET' | 'POST';
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Fetcher<T = any> = {
|
export type FetcherResponse<T = any> = {
|
||||||
(url: string, ops: DefaultedFetcherOptions): Promise<T>;
|
statusCode: number;
|
||||||
|
headers: Headers;
|
||||||
|
finalUrl: string;
|
||||||
|
body: T;
|
||||||
};
|
};
|
||||||
|
|
||||||
// this feature has some quality of life features
|
// This is the version that will be inputted by library users
|
||||||
export type UseableFetcher<T = any> = {
|
export type Fetcher = {
|
||||||
(url: string, ops?: FetcherOptions): Promise<T>;
|
<T = any>(url: string, ops: DefaultedFetcherOptions): Promise<FetcherResponse<T>>;
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is the version that scrapers will be interacting with
|
||||||
|
export type UseableFetcher = {
|
||||||
|
<T = any>(url: string, ops?: FetcherOptions): Promise<T>;
|
||||||
|
full: <T = any>(url: string, ops?: FetcherOptions) => Promise<FetcherResponse<T>>;
|
||||||
};
|
};
|
||||||
|
30
src/index.ts
30
src/index.ts
@@ -1,19 +1,21 @@
|
|||||||
export type { EmbedOutput, SourcererOutput } from '@/providers/base';
|
export type { EmbedOutput, SourcererOutput } from '@/providers/base';
|
||||||
export type { RunOutput } from '@/main/runner';
|
export type { Stream, StreamFile, FileBasedStream, HlsBasedStream, Qualities } from '@/providers/streams';
|
||||||
export type { MetaOutput } from '@/main/meta';
|
export type { Fetcher, DefaultedFetcherOptions, FetcherOptions, FetcherResponse } from '@/fetchers/types';
|
||||||
export type { FullScraperEvents } from '@/main/events';
|
export type { RunOutput } from '@/runners/runner';
|
||||||
export type { Targets, Flags } from '@/main/targets';
|
export type { MetaOutput } from '@/entrypoint/utils/meta';
|
||||||
export type { MediaTypes, ShowMedia, ScrapeMedia, MovieMedia } from '@/main/media';
|
export type { FullScraperEvents } from '@/entrypoint/utils/events';
|
||||||
export type {
|
export type { Targets, Flags } from '@/entrypoint/utils/targets';
|
||||||
ProviderBuilderOptions,
|
export type { MediaTypes, ShowMedia, ScrapeMedia, MovieMedia } from '@/entrypoint/utils/media';
|
||||||
ProviderControls,
|
export type { ProviderControls, RunnerOptions, EmbedRunnerOptions, SourceRunnerOptions } from '@/entrypoint/controls';
|
||||||
RunnerOptions,
|
export type { ProviderBuilder } from '@/entrypoint/builder';
|
||||||
EmbedRunnerOptions,
|
export type { ProviderMakerOptions } from '@/entrypoint/declare';
|
||||||
SourceRunnerOptions,
|
export type { MovieScrapeContext, ShowScrapeContext, EmbedScrapeContext, ScrapeContext } from '@/utils/context';
|
||||||
} from '@/main/builder';
|
export type { SourcererOptions, EmbedOptions } from '@/providers/base';
|
||||||
|
|
||||||
export { NotFoundError } from '@/utils/errors';
|
export { NotFoundError } from '@/utils/errors';
|
||||||
export { makeProviders } from '@/main/builder';
|
export { makeProviders } from '@/entrypoint/declare';
|
||||||
|
export { buildProviders } from '@/entrypoint/builder';
|
||||||
|
export { getBuiltinEmbeds, getBuiltinSources } from '@/entrypoint/providers';
|
||||||
export { makeStandardFetcher } from '@/fetchers/standardFetch';
|
export { makeStandardFetcher } from '@/fetchers/standardFetch';
|
||||||
export { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
export { makeSimpleProxyFetcher } from '@/fetchers/simpleProxy';
|
||||||
export { flags, targets } from '@/main/targets';
|
export { flags, targets } from '@/entrypoint/utils/targets';
|
||||||
|
@@ -1,175 +0,0 @@
|
|||||||
import { UseableFetcher } from '@/fetchers/types';
|
|
||||||
import { FullScraperEvents } from '@/main/events';
|
|
||||||
import { ScrapeMedia } from '@/main/media';
|
|
||||||
import { FeatureMap, flagsAllowedInFeatures } from '@/main/targets';
|
|
||||||
import { EmbedOutput, SourcererOutput } from '@/providers/base';
|
|
||||||
import { ProviderList } from '@/providers/get';
|
|
||||||
import { Stream } from '@/providers/streams';
|
|
||||||
import { ScrapeContext } from '@/utils/context';
|
|
||||||
import { NotFoundError } from '@/utils/errors';
|
|
||||||
import { reorderOnIdList } from '@/utils/list';
|
|
||||||
|
|
||||||
export type RunOutput = {
|
|
||||||
sourceId: string;
|
|
||||||
embedId?: string;
|
|
||||||
stream: Stream;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type SourceRunOutput = {
|
|
||||||
sourceId: string;
|
|
||||||
stream?: Stream;
|
|
||||||
embeds: [];
|
|
||||||
};
|
|
||||||
|
|
||||||
export type EmbedRunOutput = {
|
|
||||||
embedId: string;
|
|
||||||
stream?: Stream;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ProviderRunnerOptions = {
|
|
||||||
fetcher: UseableFetcher;
|
|
||||||
proxiedFetcher: UseableFetcher;
|
|
||||||
features: FeatureMap;
|
|
||||||
sourceOrder?: string[];
|
|
||||||
embedOrder?: string[];
|
|
||||||
events?: FullScraperEvents;
|
|
||||||
media: ScrapeMedia;
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOptions): Promise<RunOutput | null> {
|
|
||||||
const sources = reorderOnIdList(ops.sourceOrder ?? [], list.sources).filter((v) => {
|
|
||||||
if (ops.media.type === 'movie') return !!v.scrapeMovie;
|
|
||||||
if (ops.media.type === 'show') return !!v.scrapeShow;
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
const embeds = reorderOnIdList(ops.embedOrder ?? [], list.embeds);
|
|
||||||
const embedIds = embeds.map((v) => v.id);
|
|
||||||
let lastId = '';
|
|
||||||
|
|
||||||
const contextBase: ScrapeContext = {
|
|
||||||
fetcher: ops.fetcher,
|
|
||||||
proxiedFetcher: ops.proxiedFetcher,
|
|
||||||
progress(val) {
|
|
||||||
ops.events?.update?.({
|
|
||||||
id: lastId,
|
|
||||||
percentage: val,
|
|
||||||
status: 'pending',
|
|
||||||
});
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
ops.events?.init?.({
|
|
||||||
sourceIds: sources.map((v) => v.id),
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const s of sources) {
|
|
||||||
ops.events?.start?.(s.id);
|
|
||||||
lastId = s.id;
|
|
||||||
|
|
||||||
// run source scrapers
|
|
||||||
let output: SourcererOutput | null = null;
|
|
||||||
try {
|
|
||||||
if (ops.media.type === 'movie' && s.scrapeMovie)
|
|
||||||
output = await s.scrapeMovie({
|
|
||||||
...contextBase,
|
|
||||||
media: ops.media,
|
|
||||||
});
|
|
||||||
else if (ops.media.type === 'show' && s.scrapeShow)
|
|
||||||
output = await s.scrapeShow({
|
|
||||||
...contextBase,
|
|
||||||
media: ops.media,
|
|
||||||
});
|
|
||||||
if (output?.stream && !flagsAllowedInFeatures(ops.features, output.stream.flags)) {
|
|
||||||
throw new NotFoundError("stream doesn't satisfy target feature flags");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof NotFoundError) {
|
|
||||||
ops.events?.update?.({
|
|
||||||
id: s.id,
|
|
||||||
percentage: 100,
|
|
||||||
status: 'notfound',
|
|
||||||
reason: err.message,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
ops.events?.update?.({
|
|
||||||
id: s.id,
|
|
||||||
percentage: 100,
|
|
||||||
status: 'failure',
|
|
||||||
error: err,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!output) throw new Error('Invalid media type');
|
|
||||||
|
|
||||||
// return stream is there are any
|
|
||||||
if (output.stream) {
|
|
||||||
return {
|
|
||||||
sourceId: s.id,
|
|
||||||
stream: output.stream,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (output.embeds.length > 0) {
|
|
||||||
ops.events?.discoverEmbeds?.({
|
|
||||||
embeds: output.embeds.map((v, i) => ({
|
|
||||||
id: [s.id, i].join('-'),
|
|
||||||
embedScraperId: v.embedId,
|
|
||||||
})),
|
|
||||||
sourceId: s.id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// run embed scrapers on listed embeds
|
|
||||||
const sortedEmbeds = output.embeds;
|
|
||||||
sortedEmbeds.sort((a, b) => embedIds.indexOf(a.embedId) - embedIds.indexOf(b.embedId));
|
|
||||||
|
|
||||||
for (const ind in sortedEmbeds) {
|
|
||||||
if (!Object.prototype.hasOwnProperty.call(sortedEmbeds, ind)) continue;
|
|
||||||
const e = sortedEmbeds[ind];
|
|
||||||
const scraper = embeds.find((v) => v.id === e.embedId);
|
|
||||||
if (!scraper) throw new Error('Invalid embed returned');
|
|
||||||
|
|
||||||
// run embed scraper
|
|
||||||
const id = [s.id, ind].join('-');
|
|
||||||
ops.events?.start?.(id);
|
|
||||||
lastId = id;
|
|
||||||
let embedOutput: EmbedOutput;
|
|
||||||
try {
|
|
||||||
embedOutput = await scraper.scrape({
|
|
||||||
...contextBase,
|
|
||||||
url: e.url,
|
|
||||||
});
|
|
||||||
if (!flagsAllowedInFeatures(ops.features, embedOutput.stream.flags)) {
|
|
||||||
throw new NotFoundError("stream doesn't satisfy target feature flags");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof NotFoundError) {
|
|
||||||
ops.events?.update?.({
|
|
||||||
id,
|
|
||||||
percentage: 100,
|
|
||||||
status: 'notfound',
|
|
||||||
reason: err.message,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
ops.events?.update?.({
|
|
||||||
id,
|
|
||||||
percentage: 100,
|
|
||||||
status: 'failure',
|
|
||||||
error: err,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
sourceId: s.id,
|
|
||||||
embedId: scraper.id,
|
|
||||||
stream: embedOutput.stream,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// no providers or embeds returns streams
|
|
||||||
return null;
|
|
||||||
}
|
|
@@ -1,39 +0,0 @@
|
|||||||
export const flags = {
|
|
||||||
NO_CORS: 'no-cors',
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export type Flags = (typeof flags)[keyof typeof flags];
|
|
||||||
|
|
||||||
export const targets = {
|
|
||||||
BROWSER: 'browser',
|
|
||||||
NATIVE: 'native',
|
|
||||||
ALL: 'all',
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export type Targets = (typeof targets)[keyof typeof targets];
|
|
||||||
|
|
||||||
export type FeatureMap = {
|
|
||||||
requires: readonly Flags[];
|
|
||||||
};
|
|
||||||
|
|
||||||
export const targetToFeatures: Record<Targets, FeatureMap> = {
|
|
||||||
browser: {
|
|
||||||
requires: [flags.NO_CORS],
|
|
||||||
},
|
|
||||||
native: {
|
|
||||||
requires: [],
|
|
||||||
},
|
|
||||||
all: {
|
|
||||||
requires: [],
|
|
||||||
},
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export function getTargetFeatures(target: Targets): FeatureMap {
|
|
||||||
return targetToFeatures[target];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function flagsAllowedInFeatures(features: FeatureMap, inputFlags: Flags[]): boolean {
|
|
||||||
const hasAllFlags = features.requires.every((v) => inputFlags.includes(v));
|
|
||||||
if (!hasAllFlags) return false;
|
|
||||||
return true;
|
|
||||||
}
|
|
@@ -1,22 +1,102 @@
|
|||||||
import { Embed, Sourcerer } from '@/providers/base';
|
import { Embed, Sourcerer } from '@/providers/base';
|
||||||
|
import { doodScraper } from '@/providers/embeds/dood';
|
||||||
|
import { droploadScraper } from '@/providers/embeds/dropload';
|
||||||
|
import { febboxHlsScraper } from '@/providers/embeds/febbox/hls';
|
||||||
|
import { febboxMp4Scraper } from '@/providers/embeds/febbox/mp4';
|
||||||
|
import { filelionsScraper } from '@/providers/embeds/filelions';
|
||||||
import { mixdropScraper } from '@/providers/embeds/mixdrop';
|
import { mixdropScraper } from '@/providers/embeds/mixdrop';
|
||||||
import { mp4uploadScraper } from '@/providers/embeds/mp4upload';
|
import { mp4uploadScraper } from '@/providers/embeds/mp4upload';
|
||||||
|
import { streambucketScraper } from '@/providers/embeds/streambucket';
|
||||||
import { streamsbScraper } from '@/providers/embeds/streamsb';
|
import { streamsbScraper } from '@/providers/embeds/streamsb';
|
||||||
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||||
import { upstreamScraper } from '@/providers/embeds/upstream';
|
import { upstreamScraper } from '@/providers/embeds/upstream';
|
||||||
|
import { vidsrcembedScraper } from '@/providers/embeds/vidsrc';
|
||||||
|
import { vTubeScraper } from '@/providers/embeds/vtube';
|
||||||
import { flixhqScraper } from '@/providers/sources/flixhq/index';
|
import { flixhqScraper } from '@/providers/sources/flixhq/index';
|
||||||
import { goMoviesScraper } from '@/providers/sources/gomovies/index';
|
import { goMoviesScraper } from '@/providers/sources/gomovies/index';
|
||||||
|
import { insertunitScraper } from '@/providers/sources/insertunit';
|
||||||
import { kissAsianScraper } from '@/providers/sources/kissasian/index';
|
import { kissAsianScraper } from '@/providers/sources/kissasian/index';
|
||||||
|
import { lookmovieScraper } from '@/providers/sources/lookmovie';
|
||||||
import { remotestreamScraper } from '@/providers/sources/remotestream';
|
import { remotestreamScraper } from '@/providers/sources/remotestream';
|
||||||
import { superStreamScraper } from '@/providers/sources/superstream/index';
|
import { showboxScraper } from '@/providers/sources/showbox/index';
|
||||||
|
import { vidsrcScraper } from '@/providers/sources/vidsrc/index';
|
||||||
import { zoechipScraper } from '@/providers/sources/zoechip';
|
import { zoechipScraper } from '@/providers/sources/zoechip';
|
||||||
|
|
||||||
|
import { closeLoadScraper } from './embeds/closeload';
|
||||||
|
import { fileMoonScraper } from './embeds/filemoon';
|
||||||
|
import { ridooScraper } from './embeds/ridoo';
|
||||||
|
import { smashyStreamOScraper } from './embeds/smashystream/opstream';
|
||||||
|
import { smashyStreamFScraper } from './embeds/smashystream/video1';
|
||||||
|
import { streamtapeScraper } from './embeds/streamtape';
|
||||||
|
import { streamvidScraper } from './embeds/streamvid';
|
||||||
|
import { vidCloudScraper } from './embeds/vidcloud';
|
||||||
|
import { vidplayScraper } from './embeds/vidplay';
|
||||||
|
import { voeScraper } from './embeds/voe';
|
||||||
|
import { warezcdnembedHlsScraper } from './embeds/warezcdn/hls';
|
||||||
|
import { warezcdnembedMp4Scraper } from './embeds/warezcdn/mp4';
|
||||||
|
import { wootlyScraper } from './embeds/wootly';
|
||||||
|
import { goojaraScraper } from './sources/goojara';
|
||||||
|
import { hdRezkaScraper } from './sources/hdrezka';
|
||||||
|
import { nepuScraper } from './sources/nepu';
|
||||||
|
import { primewireScraper } from './sources/primewire';
|
||||||
|
import { ridooMoviesScraper } from './sources/ridomovies';
|
||||||
|
import { smashyStreamScraper } from './sources/smashystream';
|
||||||
|
import { soaperTvScraper } from './sources/soapertv';
|
||||||
|
import { vidSrcToScraper } from './sources/vidsrcto';
|
||||||
|
import { warezcdnScraper } from './sources/warezcdn';
|
||||||
|
|
||||||
export function gatherAllSources(): Array<Sourcerer> {
|
export function gatherAllSources(): Array<Sourcerer> {
|
||||||
// all sources are gathered here
|
// all sources are gathered here
|
||||||
return [flixhqScraper, remotestreamScraper, kissAsianScraper, superStreamScraper, goMoviesScraper, zoechipScraper];
|
return [
|
||||||
|
flixhqScraper,
|
||||||
|
remotestreamScraper,
|
||||||
|
kissAsianScraper,
|
||||||
|
showboxScraper,
|
||||||
|
goMoviesScraper,
|
||||||
|
zoechipScraper,
|
||||||
|
vidsrcScraper,
|
||||||
|
lookmovieScraper,
|
||||||
|
smashyStreamScraper,
|
||||||
|
ridooMoviesScraper,
|
||||||
|
vidSrcToScraper,
|
||||||
|
nepuScraper,
|
||||||
|
goojaraScraper,
|
||||||
|
hdRezkaScraper,
|
||||||
|
primewireScraper,
|
||||||
|
warezcdnScraper,
|
||||||
|
insertunitScraper,
|
||||||
|
soaperTvScraper,
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherAllEmbeds(): Array<Embed> {
|
export function gatherAllEmbeds(): Array<Embed> {
|
||||||
// all embeds are gathered here
|
// all embeds are gathered here
|
||||||
return [upcloudScraper, mp4uploadScraper, streamsbScraper, upstreamScraper, mixdropScraper];
|
return [
|
||||||
|
upcloudScraper,
|
||||||
|
vidCloudScraper,
|
||||||
|
mp4uploadScraper,
|
||||||
|
streamsbScraper,
|
||||||
|
upstreamScraper,
|
||||||
|
febboxMp4Scraper,
|
||||||
|
febboxHlsScraper,
|
||||||
|
mixdropScraper,
|
||||||
|
vidsrcembedScraper,
|
||||||
|
streambucketScraper,
|
||||||
|
smashyStreamFScraper,
|
||||||
|
smashyStreamOScraper,
|
||||||
|
ridooScraper,
|
||||||
|
closeLoadScraper,
|
||||||
|
fileMoonScraper,
|
||||||
|
vidplayScraper,
|
||||||
|
wootlyScraper,
|
||||||
|
doodScraper,
|
||||||
|
streamvidScraper,
|
||||||
|
voeScraper,
|
||||||
|
streamtapeScraper,
|
||||||
|
droploadScraper,
|
||||||
|
filelionsScraper,
|
||||||
|
vTubeScraper,
|
||||||
|
warezcdnembedHlsScraper,
|
||||||
|
warezcdnembedMp4Scraper,
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
@@ -1,35 +1,52 @@
|
|||||||
import { MovieMedia, ShowMedia } from '@/main/media';
|
import { Flags } from '@/entrypoint/utils/targets';
|
||||||
import { Flags } from '@/main/targets';
|
|
||||||
import { Stream } from '@/providers/streams';
|
import { Stream } from '@/providers/streams';
|
||||||
import { EmbedScrapeContext, ScrapeContext } from '@/utils/context';
|
import { EmbedScrapeContext, MovieScrapeContext, ShowScrapeContext } from '@/utils/context';
|
||||||
|
|
||||||
export type SourcererOutput = {
|
export type MediaScraperTypes = 'show' | 'movie';
|
||||||
embeds: {
|
|
||||||
embedId: string;
|
export type SourcererEmbed = {
|
||||||
url: string;
|
embedId: string;
|
||||||
}[];
|
url: string;
|
||||||
stream?: Stream;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Sourcerer = {
|
export type SourcererOutput = {
|
||||||
|
embeds: SourcererEmbed[];
|
||||||
|
stream?: Stream[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type SourcererOptions = {
|
||||||
id: string;
|
id: string;
|
||||||
name: string; // displayed in the UI
|
name: string; // displayed in the UI
|
||||||
rank: number; // the higher the number, the earlier it gets put on the queue
|
rank: number; // the higher the number, the earlier it gets put on the queue
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
flags: Flags[];
|
flags: Flags[];
|
||||||
scrapeMovie?: (input: ScrapeContext & { media: MovieMedia }) => Promise<SourcererOutput>;
|
scrapeMovie?: (input: MovieScrapeContext) => Promise<SourcererOutput>;
|
||||||
scrapeShow?: (input: ScrapeContext & { media: ShowMedia }) => Promise<SourcererOutput>;
|
scrapeShow?: (input: ShowScrapeContext) => Promise<SourcererOutput>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export function makeSourcerer(state: Sourcerer): Sourcerer {
|
export type Sourcerer = SourcererOptions & {
|
||||||
return state;
|
type: 'source';
|
||||||
|
disabled: boolean;
|
||||||
|
mediaTypes: MediaScraperTypes[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export function makeSourcerer(state: SourcererOptions): Sourcerer {
|
||||||
|
const mediaTypes: MediaScraperTypes[] = [];
|
||||||
|
if (state.scrapeMovie) mediaTypes.push('movie');
|
||||||
|
if (state.scrapeShow) mediaTypes.push('show');
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
type: 'source',
|
||||||
|
disabled: state.disabled ?? false,
|
||||||
|
mediaTypes,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export type EmbedOutput = {
|
export type EmbedOutput = {
|
||||||
stream: Stream;
|
stream: Stream[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Embed = {
|
export type EmbedOptions = {
|
||||||
id: string;
|
id: string;
|
||||||
name: string; // displayed in the UI
|
name: string; // displayed in the UI
|
||||||
rank: number; // the higher the number, the earlier it gets put on the queue
|
rank: number; // the higher the number, the earlier it gets put on the queue
|
||||||
@@ -37,6 +54,17 @@ export type Embed = {
|
|||||||
scrape: (input: EmbedScrapeContext) => Promise<EmbedOutput>;
|
scrape: (input: EmbedScrapeContext) => Promise<EmbedOutput>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export function makeEmbed(state: Embed): Embed {
|
export type Embed = EmbedOptions & {
|
||||||
return state;
|
type: 'embed';
|
||||||
|
disabled: boolean;
|
||||||
|
mediaTypes: undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function makeEmbed(state: EmbedOptions): Embed {
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
type: 'embed',
|
||||||
|
disabled: state.disabled ?? false,
|
||||||
|
mediaTypes: undefined,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
43
src/providers/captions.ts
Normal file
43
src/providers/captions.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import ISO6391 from 'iso-639-1';
|
||||||
|
|
||||||
|
export const captionTypes = {
|
||||||
|
srt: 'srt',
|
||||||
|
vtt: 'vtt',
|
||||||
|
};
|
||||||
|
export type CaptionType = keyof typeof captionTypes;
|
||||||
|
|
||||||
|
export type Caption = {
|
||||||
|
type: CaptionType;
|
||||||
|
id: string; // only unique per stream
|
||||||
|
url: string;
|
||||||
|
hasCorsRestrictions: boolean;
|
||||||
|
language: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function getCaptionTypeFromUrl(url: string): CaptionType | null {
|
||||||
|
const extensions = Object.keys(captionTypes) as CaptionType[];
|
||||||
|
const type = extensions.find((v) => url.endsWith(`.${v}`));
|
||||||
|
if (!type) return null;
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function labelToLanguageCode(label: string): string | null {
|
||||||
|
const code = ISO6391.getCode(label);
|
||||||
|
if (code.length === 0) return null;
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isValidLanguageCode(code: string | null): boolean {
|
||||||
|
if (!code) return false;
|
||||||
|
return ISO6391.validate(code);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function removeDuplicatedLanguages(list: Caption[]) {
|
||||||
|
const beenSeen: Record<string, true> = {};
|
||||||
|
|
||||||
|
return list.filter((sub) => {
|
||||||
|
if (beenSeen[sub.language]) return false;
|
||||||
|
beenSeen[sub.language] = true;
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user