All changes done

This commit is contained in:
MemeCornucopia
2024-01-28 15:41:40 -05:00
parent 1d4c556d78
commit 63bbd8b858
5 changed files with 3 additions and 10 deletions

View File

@@ -24,12 +24,9 @@ import { ridooScraper } from './embeds/ridoo';
import { smashyStreamDScraper } from './embeds/smashystream/dued'; import { smashyStreamDScraper } from './embeds/smashystream/dued';
import { smashyStreamFScraper } from './embeds/smashystream/video1'; import { smashyStreamFScraper } from './embeds/smashystream/video1';
import { vidplayScraper } from './embeds/vidplay'; import { vidplayScraper } from './embeds/vidplay';
import { wootlyScraper } from './embeds/wootly'; import { wootlyScraper } from './embeds/wootly';
import { goojaraScraper } from './sources/goojara'; import { goojaraScraper } from './sources/goojara';
import { ridooMoviesScraper } from './sources/ridomovies'; import { ridooMoviesScraper } from './sources/ridomovies';
import { smashyStreamScraper } from './sources/smashystream'; import { smashyStreamScraper } from './sources/smashystream';
import { vidSrcToScraper } from './sources/vidsrcto'; import { vidSrcToScraper } from './sources/vidsrcto';

View File

@@ -32,8 +32,6 @@ export const doodScraper = makeEmbed({
const downloadURL = `${doodPage}${nanoid()}?token=${dataForLater}${Date.now()}`; const downloadURL = `${doodPage}${nanoid()}?token=${dataForLater}${Date.now()}`;
if (!downloadURL) throw new Error('dood source not found');
return { return {
stream: [ stream: [
{ {

View File

@@ -66,7 +66,6 @@ export const wootlyScraper = makeEmbed({
{ {
id: 'primary', id: 'primary',
type: 'file', type: 'file',
url,
flags: [flags.IP_LOCKED], flags: [flags.IP_LOCKED],
captions: [], captions: [],
qualities: { qualities: {

View File

@@ -41,7 +41,7 @@ export async function getEmbeds(ctx: ScrapeContext, id: string): Promise<EmbedsR
); );
// Initialize an array to hold the results // Initialize an array to hold the results
const results = []; const results: EmbedsResult = [];
// Process each page result // Process each page result
for (const result of embedPages) { for (const result of embedPages) {

View File

@@ -11,9 +11,8 @@ import { EmbedsResult, Result, baseUrl } from './type';
let data; let data;
// The cookie for this headerData doesn't matter, Goojara just checks it's there. T // The cookie for this headerData doesn't matter, Goojara just checks it's there.
const headersData = { const headersData = {
'content-type': 'application/x-www-form-urlencoded',
cookie: `aGooz=t9pmkdtef1b3lg3pmo1u2re816; bd9aa48e=0d7b89e8c79844e9df07a2; _b414=2151C6B12E2A88379AFF2C0DD65AC8298DEC2BF4; 9d287aaa=8f32ad589e1c4288fe152f`, cookie: `aGooz=t9pmkdtef1b3lg3pmo1u2re816; bd9aa48e=0d7b89e8c79844e9df07a2; _b414=2151C6B12E2A88379AFF2C0DD65AC8298DEC2BF4; 9d287aaa=8f32ad589e1c4288fe152f`,
Referer: 'https://www.goojara.to/', Referer: 'https://www.goojara.to/',
}; };
@@ -75,7 +74,7 @@ export async function scrapeIds(
const dataId = $1('#seon').attr('data-id'); const dataId = $1('#seon').attr('data-id');
if (!dataId) throw NotFoundError; if (!dataId) throw new NotFoundError();
data = await ctx.fetcher<string>(`/xhrc.php`, { data = await ctx.fetcher<string>(`/xhrc.php`, {
baseUrl, baseUrl,