SuperStream

This commit is contained in:
2023-09-15 22:09:40 -05:00
parent 2aa7428711
commit 87b1763761
8 changed files with 240 additions and 7 deletions

30
package-lock.json generated
View File

@@ -12,6 +12,7 @@
"cheerio": "^1.0.0-rc.12",
"crypto-js": "^4.1.1",
"form-data": "^4.0.0",
"nanoid": "^5.0.1",
"node-fetch": "^3.3.2"
},
"devDependencies": {
@@ -4560,10 +4561,9 @@
}
},
"node_modules/nanoid": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz",
"integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==",
"dev": true,
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.1.tgz",
"integrity": "sha512-vWeVtV5Cw68aML/QaZvqN/3QQXc6fBfIieAlu05m7FZW2Dgb+3f0xc0TTxuJW+7u30t7iSDTV/j3kVI0oJqIfQ==",
"funding": [
{
"type": "github",
@@ -4571,10 +4571,10 @@
}
],
"bin": {
"nanoid": "bin/nanoid.cjs"
"nanoid": "bin/nanoid.js"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
"node": "^18 || >=20"
}
},
"node_modules/natural-compare": {
@@ -4986,6 +4986,24 @@
"node": "^10 || ^12 || >=14"
}
},
"node_modules/postcss/node_modules/nanoid": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz",
"integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/prelude-ls": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",

View File

@@ -67,6 +67,7 @@
"cheerio": "^1.0.0-rc.12",
"crypto-js": "^4.1.1",
"form-data": "^4.0.0",
"nanoid": "^5.0.1",
"node-fetch": "^3.3.2"
}
}

View File

@@ -5,10 +5,11 @@ import { upcloudScraper } from '@/providers/embeds/upcloud';
import { flixhqScraper } from '@/providers/sources/flixhq/index';
import { kissAsianScraper } from '@/providers/sources/kissasian/index';
import { remotestreamScraper } from '@/providers/sources/remotestream';
import { superStreamScraper } from '@/providers/sources/superstream/index';
export function gatherAllSources(): Array<Sourcerer> {
// all sources are gathered here
return [flixhqScraper, remotestreamScraper, kissAsianScraper];
return [flixhqScraper, remotestreamScraper, kissAsianScraper, superStreamScraper];
}
export function gatherAllEmbeds(): Array<Embed> {

View File

@@ -0,0 +1,13 @@
// We do not want content scanners to notice this scraping going on so we've hidden all constants
// The source has its origins in China so I added some extra security with banned words
// Mayhaps a tiny bit unethical, but this source is just too good :)
// If you are copying this code please use precautions so they do not change their api.
export const iv = atob('d0VpcGhUbiE=');
export const key = atob('MTIzZDZjZWRmNjI2ZHk1NDIzM2FhMXc2');
export const apiUrls = [
atob('aHR0cHM6Ly9zaG93Ym94LnNoZWd1Lm5ldC9hcGkvYXBpX2NsaWVudC9pbmRleC8='),
atob('aHR0cHM6Ly9tYnBhcGkuc2hlZ3UubmV0L2FwaS9hcGlfY2xpZW50L2luZGV4Lw=='),
];
export const appKey = atob('bW92aWVib3g=');
export const appId = atob('Y29tLnRkby5zaG93Ym94');

View File

@@ -0,0 +1,16 @@
import CryptoJS from 'crypto-js';
import { iv, key } from './common';
export function encrypt(str: string) {
return CryptoJS.TripleDES.encrypt(str, CryptoJS.enc.Utf8.parse(key), {
iv: CryptoJS.enc.Utf8.parse(iv),
}).toString();
}
export function getVerify(str: string, str2: string, str3: string) {
if (str) {
return CryptoJS.MD5(CryptoJS.MD5(str2).toString() + str3 + str).toString();
}
return null;
}

View File

@@ -0,0 +1,32 @@
import { StreamFile } from '@/providers/streams';
import { ScrapeContext } from '@/utils/context';
import { sendRequest } from './sendRequest';
import { allowedQualities } from '.';
export async function getStreamQualities(ctx: ScrapeContext, apiQuery: object) {
const mediaRes: { list: { path: string; real_quality: string }[] } = (await sendRequest(ctx, apiQuery)).data;
ctx.progress(66);
const qualityMap = mediaRes.list
.filter((file) => allowedQualities.includes(file.real_quality.replace('p', '')))
.map((file) => ({
url: file.path,
quality: file.real_quality.replace('p', ''),
}));
const qualities: Record<string, StreamFile> = {};
allowedQualities.forEach((quality) => {
const foundQuality = qualityMap.find((q) => q.quality === quality);
if (foundQuality) {
qualities[quality] = {
type: 'mp4',
url: foundQuality.url,
};
}
});
return qualities;
}

View File

@@ -0,0 +1,96 @@
import { flags } from '@/main/targets';
import { makeSourcerer } from '@/providers/base';
import { compareTitle } from '@/utils/compare';
import { NotFoundError } from '@/utils/errors';
import { getStreamQualities } from './getStreamQualities';
import { sendRequest } from './sendRequest';
export const allowedQualities = ['360', '480', '720', '1080'];
export const superStreamScraper = makeSourcerer({
id: 'superstream',
name: 'Superstream',
rank: 300,
flags: [flags.NO_CORS],
async scrapeShow(ctx) {
const searchQuery = {
module: 'Search3',
page: '1',
type: 'all',
keyword: ctx.media.title,
pagelimit: '20',
};
const searchRes = (await sendRequest(ctx, searchQuery, true)).data;
ctx.progress(33);
const superstreamEntry = searchRes.find(
(res: any) => compareTitle(res.title, ctx.media.title) && res.year === Number(ctx.media.releaseYear),
);
if (!superstreamEntry) throw new NotFoundError('No entry found');
const superstreamId = superstreamEntry.id;
// Fetch requested episode
const apiQuery = {
uid: '',
module: 'TV_downloadurl_v3',
tid: superstreamId,
season: ctx.media.season.number,
episode: ctx.media.episode.number,
oss: '1',
group: '',
};
const qualities = await getStreamQualities(ctx, apiQuery);
return {
embeds: [],
stream: {
qualities,
type: 'file',
flags: [flags.NO_CORS],
},
};
},
async scrapeMovie(ctx) {
const searchQuery = {
module: 'Search3',
page: '1',
type: 'all',
keyword: ctx.media.title,
pagelimit: '20',
};
const searchRes = (await sendRequest(ctx, searchQuery, true)).data;
ctx.progress(33);
const superstreamEntry = searchRes.find(
(res: any) => compareTitle(res.title, ctx.media.title) && res.year === Number(ctx.media.releaseYear),
);
if (!superstreamEntry) throw new NotFoundError('No entry found');
const superstreamId = superstreamEntry.id;
// Fetch requested episode
const apiQuery = {
uid: '',
module: 'Movie_downloadurl_v3',
mid: superstreamId,
oss: '1',
group: '',
};
const qualities = await getStreamQualities(ctx, apiQuery);
return {
embeds: [],
stream: {
qualities,
type: 'file',
flags: [flags.NO_CORS],
},
};
},
});

View File

@@ -0,0 +1,56 @@
import CryptoJS from 'crypto-js';
import { customAlphabet } from 'nanoid';
import type { ScrapeContext } from '@/utils/context';
import { apiUrls, appId, appKey, key } from './common';
import { encrypt, getVerify } from './crypto';
const nanoid = customAlphabet('0123456789abcdef', 32);
const expiry = () => Math.floor(Date.now() / 1000 + 60 * 60 * 12);
export const sendRequest = async (ctx: ScrapeContext, data: object, altApi = false) => {
const defaultData = {
childmode: '0',
app_version: '11.5',
appid: appId,
lang: 'en',
expired_date: `${expiry()}`,
platform: 'android',
channel: 'Website',
};
const encryptedData = encrypt(
JSON.stringify({
...defaultData,
...data,
}),
);
const appKeyHash = CryptoJS.MD5(appKey).toString();
const verify = getVerify(encryptedData, appKey, key);
const body = JSON.stringify({
app_key: appKeyHash,
verify,
encrypt_data: encryptedData,
});
const base64body = btoa(body);
const formatted = new URLSearchParams();
formatted.append('data', base64body);
formatted.append('appid', '27');
formatted.append('platform', 'android');
formatted.append('version', '129');
formatted.append('medium', 'Website');
const requestUrl = altApi ? apiUrls[1] : apiUrls[0];
const response = await ctx.proxiedFetcher<string>(requestUrl, {
method: 'POST',
headers: {
Platform: 'android',
'Content-Type': 'application/x-www-form-urlencoded',
},
body: `${formatted.toString()}&token${nanoid()}`,
});
return JSON.parse(response);
};