mirror of
https://github.com/shafat-96/anime-mapper
synced 2026-04-17 15:51:45 +00:00
fix
This commit is contained in:
@@ -14,7 +14,7 @@ export const ANILIST_QUERY = `
|
||||
}
|
||||
}
|
||||
`;
|
||||
export const HIANIME_URL = 'https://hianimez.to';
|
||||
export const HIANIME_URL = 'https://hianime.to';
|
||||
export const ANIZIP_URL = 'https://api.ani.zip/mappings';
|
||||
|
||||
export default {
|
||||
|
||||
173
src/extractors/kwik.js
Normal file
173
src/extractors/kwik.js
Normal file
@@ -0,0 +1,173 @@
|
||||
import axios from 'axios';
|
||||
|
||||
const kwikUserAgent = "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Mobile Safari/537.36";
|
||||
|
||||
export async function extractKwik(kwikUrl, referer) {
|
||||
if (!kwikUrl) {
|
||||
throw new Error("missing kwik URL");
|
||||
}
|
||||
|
||||
try {
|
||||
const urlObj = new URL(kwikUrl);
|
||||
// Always use the origin of the kwik URL as Referer, regardless of passed-in value
|
||||
// mimicking: if u, err := url.Parse(kwikURL); err == nil { referer = u.Scheme + "://" + u.Host + "/" }
|
||||
const refinedReferer = `${urlObj.protocol}//${urlObj.host}/`;
|
||||
|
||||
const response = await axios.get(kwikUrl, {
|
||||
headers: {
|
||||
'User-Agent': kwikUserAgent,
|
||||
'Referer': refinedReferer,
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
}
|
||||
});
|
||||
|
||||
const html = response.data;
|
||||
|
||||
// Find the packed eval JS - look for eval(...) containing m3u8
|
||||
const jsMatch = html.match(/;(eval\(function\(p,a,c,k,e,d\).*?m3u8.*?\)\))/);
|
||||
if (!jsMatch || jsMatch.length < 2) {
|
||||
throw new Error("could not find eval JS pattern in Kwik page");
|
||||
}
|
||||
|
||||
const jsCode = jsMatch[1];
|
||||
|
||||
const lastBraceIdx = jsCode.lastIndexOf("}(");
|
||||
if (lastBraceIdx === -1) {
|
||||
throw new Error("could not find argument start marker '}('");
|
||||
}
|
||||
|
||||
const endIdx = jsCode.lastIndexOf("))");
|
||||
if (endIdx === -1 || endIdx <= lastBraceIdx) {
|
||||
throw new Error("could not find argument end marker '))'");
|
||||
}
|
||||
|
||||
const stripped = jsCode.substring(lastBraceIdx + 2, endIdx);
|
||||
|
||||
const parts = parsePackedArgs(stripped);
|
||||
if (parts.length < 4) {
|
||||
throw new Error(`invalid packed data: expected at least 4 parts, got ${parts.length}`);
|
||||
}
|
||||
|
||||
const p = parts[0];
|
||||
const a = parseInt(parts[1], 10);
|
||||
const c = parseInt(parts[2], 10);
|
||||
|
||||
let kStr = parts[3];
|
||||
kStr = kStr.replace(/\.split\(['"]\|['"]\)$/, "");
|
||||
const k = kStr.split("|");
|
||||
|
||||
let decoded = unpackKwik(p, a, c, k);
|
||||
|
||||
decoded = decoded.replace(/\\/g, "");
|
||||
decoded = decoded.replace("https.split(://", "https://");
|
||||
decoded = decoded.replace("http.split(://", "http://");
|
||||
|
||||
const srcMatch = decoded.match(/source=(https?:\/\/[^;]+)/);
|
||||
if (!srcMatch || srcMatch.length < 2) {
|
||||
throw new Error("could not find video URL in unpacked code");
|
||||
}
|
||||
|
||||
const videoURL = cleanKwikURL(srcMatch[1]);
|
||||
return {
|
||||
url: videoURL,
|
||||
isM3U8: videoURL.includes(".m3u8"),
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function unpackKwik(p, a, c, k) {
|
||||
const digits = "0123456789abcdefghijklmnopqrstuvwxyz";
|
||||
const dict = {};
|
||||
|
||||
function baseEncode(n) {
|
||||
const rem = n % a;
|
||||
let digit;
|
||||
if (rem > 35) {
|
||||
digit = String.fromCharCode(rem + 29);
|
||||
} else {
|
||||
digit = digits[rem];
|
||||
}
|
||||
|
||||
if (n < a) {
|
||||
return digit;
|
||||
}
|
||||
return baseEncode(Math.floor(n / a)) + digit;
|
||||
}
|
||||
|
||||
for (let i = c - 1; i >= 0; i--) {
|
||||
const key = baseEncode(i);
|
||||
if (i < k.length && k[i] !== "") {
|
||||
dict[key] = k[i];
|
||||
} else {
|
||||
dict[key] = key;
|
||||
}
|
||||
}
|
||||
|
||||
// Use regex to replace words
|
||||
return p.replace(/\b\w+\b/g, (w) => {
|
||||
if (Object.prototype.hasOwnProperty.call(dict, w)) {
|
||||
return dict[w];
|
||||
}
|
||||
return w;
|
||||
});
|
||||
}
|
||||
|
||||
function parsePackedArgs(input) {
|
||||
const result = [];
|
||||
let inQuote = false;
|
||||
let quoteChar = null;
|
||||
let depth = 0;
|
||||
let current = "";
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const r = input[i];
|
||||
|
||||
if (!inQuote) {
|
||||
if (r === '\'' || r === '"') {
|
||||
inQuote = true;
|
||||
quoteChar = r;
|
||||
// Don't add quote to current, mimicking Go logic 'continue'
|
||||
continue;
|
||||
}
|
||||
if (r === ',' && depth === 0) {
|
||||
result.push(current.trim());
|
||||
current = "";
|
||||
continue;
|
||||
}
|
||||
if (r === '(' || r === '[' || r === '{') {
|
||||
depth++;
|
||||
} else if (r === ')' || r === ']' || r === '}') {
|
||||
if (depth > 0) {
|
||||
depth--;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (r === quoteChar) {
|
||||
inQuote = false;
|
||||
// Don't add quote to current
|
||||
continue;
|
||||
}
|
||||
}
|
||||
current += r;
|
||||
}
|
||||
if (current !== "") {
|
||||
result.push(current.trim());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function cleanKwikURL(u) {
|
||||
u = u.replace(/\\\//g, "/");
|
||||
u = u.replace(/^["']|["']$/g, ''); // Trim quotes
|
||||
u = u.replace(/[\n\r\t ]/g, ''); // Trim whitespace chars
|
||||
|
||||
// Remove semicolon and anything after it
|
||||
const idx = u.indexOf(";");
|
||||
if (idx !== -1) {
|
||||
u = u.substring(0, idx);
|
||||
}
|
||||
return u;
|
||||
}
|
||||
124
src/extractors/megacloud.js
Normal file
124
src/extractors/megacloud.js
Normal file
@@ -0,0 +1,124 @@
|
||||
import axios from 'axios';
|
||||
import { client } from '../utils/client.js';
|
||||
|
||||
class MegaCloudExtractor {
|
||||
constructor() {
|
||||
this.mainUrl = "https://megacloud.blog";
|
||||
this.scriptUrl = "https://script.google.com/macros/s/AKfycbxHbYHbrGMXYD2-bC-C43D3njIbU-wGiYQuJL61H4vyy6YVXkybMNNEPJNPPuZrD1gRVA/exec";
|
||||
this.keysUrl = "https://raw.githubusercontent.com/yogesh-hacker/MegacloudKeys/refs/heads/main/keys.json";
|
||||
}
|
||||
|
||||
async extract(videoUrl) {
|
||||
try {
|
||||
const embedUrl = new URL(videoUrl);
|
||||
const headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:140.0) Gecko/20100101 Firefox/140.0",
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Origin": this.mainUrl,
|
||||
"Referer": `${this.mainUrl}/`,
|
||||
};
|
||||
|
||||
// 1. Fetch Embed Page
|
||||
const { data: html } = await client.get(videoUrl, { headers });
|
||||
|
||||
// 2. Extract Nonce
|
||||
let nonce = null;
|
||||
const match1 = html.match(/\b[a-zA-Z0-9]{48}\b/);
|
||||
if (match1) {
|
||||
nonce = match1[0];
|
||||
} else {
|
||||
const match2 = html.match(/\b([a-zA-Z0-9]{16})\b.*?\b([a-zA-Z0-9]{16})\b.*?\b([a-zA-Z0-9]{16})\b/);
|
||||
if (match2) {
|
||||
nonce = match2[1] + match2[2] + match2[3];
|
||||
}
|
||||
}
|
||||
|
||||
if (!nonce) throw new Error("Nonce not found");
|
||||
|
||||
// 3. Get Sources
|
||||
// e.g. https://megacloud.blog/embed-2/e-1/VJq4nDSaJyzH?k=1 -> ID: VJq4nDSaJyzH
|
||||
const id = embedUrl.pathname.split('/').pop();
|
||||
|
||||
const apiUrl = `${this.mainUrl}/embed-2/v3/e-1/getSources?id=${id}&_k=${nonce}`;
|
||||
const { data: response } = await client.get(apiUrl, {
|
||||
headers: {
|
||||
...headers,
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": this.mainUrl
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.sources || response.sources.length === 0) {
|
||||
throw new Error("No sources found");
|
||||
}
|
||||
|
||||
const encodedFile = response.sources[0].file;
|
||||
let m3u8Url = "";
|
||||
|
||||
if (encodedFile.includes(".m3u8")) {
|
||||
m3u8Url = encodedFile;
|
||||
} else {
|
||||
// 4. Decrypt via Google Script
|
||||
const { data: keyData } = await axios.get(this.keysUrl);
|
||||
const secret = keyData.mega;
|
||||
|
||||
const params = new URLSearchParams();
|
||||
params.append("encrypted_data", encodedFile);
|
||||
params.append("nonce", nonce);
|
||||
params.append("secret", secret);
|
||||
|
||||
const decryptUrl = `${this.scriptUrl}?${params.toString()}`;
|
||||
|
||||
// Fetch text response
|
||||
const { data: decryptedResponse } = await axios.get(decryptUrl, { responseType: 'text' });
|
||||
|
||||
// Kotlin Regex: "\"file\":\"(.*?)\""
|
||||
// Handling potentially weird JSON structure or escaped strings
|
||||
const textContent = typeof decryptedResponse === 'string' ? decryptedResponse : JSON.stringify(decryptedResponse);
|
||||
const fileMatch = textContent.match(/"file":"(.*?)"/);
|
||||
|
||||
if (fileMatch && fileMatch[1]) {
|
||||
// Clean up URL if needed (remove escape slashes)
|
||||
m3u8Url = fileMatch[1].replace(/\\/g, '');
|
||||
} else {
|
||||
throw new Error("Video URL not found in decrypted response");
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Build Result
|
||||
const tracks = [];
|
||||
if (response.tracks) {
|
||||
response.tracks.forEach(track => {
|
||||
if (track.kind === "captions" || track.kind === "subtitles") {
|
||||
tracks.push({
|
||||
url: track.file,
|
||||
lang: track.label || track.kind,
|
||||
label: track.label
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
sources: [{
|
||||
url: m3u8Url,
|
||||
isM3U8: true
|
||||
}],
|
||||
tracks: tracks,
|
||||
intro: response.intro || { start: 0, end: 0 },
|
||||
outro: response.outro || { start: 0, end: 0 },
|
||||
headers: {
|
||||
Referer: this.mainUrl,
|
||||
"User-Agent": headers["User-Agent"]
|
||||
}
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error("MegaCloud extraction failed:", error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const megaCloudExtractor = new MegaCloudExtractor();
|
||||
14
src/index.js
14
src/index.js
@@ -1,6 +1,6 @@
|
||||
import express from 'express';
|
||||
import { ANIME } from '@consumet/extensions';
|
||||
import { mapAnilistToAnimePahe, mapAnilistToHiAnime, mapAnilistToAnimeKai } from './mappers/index.js';
|
||||
import { AnimePahe } from './providers/animepahe.js';
|
||||
import { AniList } from './providers/anilist.js';
|
||||
import { AnimeKai } from './providers/animekai.js';
|
||||
import { getEpisodeServers, getEpisodeSources } from './providers/hianime-servers.js';
|
||||
@@ -158,10 +158,10 @@ app.get('/animepahe/sources/:session/:episodeId', cache('15 minutes'), async (re
|
||||
const fullEpisodeId = `${session}/${episodeId}`;
|
||||
|
||||
// Initialize a new AnimePahe instance each time
|
||||
const consumetAnimePahe = new ANIME.AnimePahe();
|
||||
const animePahe = new AnimePahe();
|
||||
|
||||
// Directly fetch and return the sources without modification
|
||||
const sources = await consumetAnimePahe.fetchEpisodeSources(fullEpisodeId);
|
||||
const sources = await animePahe.fetchEpisodeSources(fullEpisodeId);
|
||||
|
||||
// Simply return the sources directly as provided by Consumet
|
||||
return res.status(200).json(sources);
|
||||
@@ -186,10 +186,10 @@ app.get('/animepahe/sources/:id', cache('15 minutes'), async (req, res) => {
|
||||
}
|
||||
|
||||
// Initialize a new AnimePahe instance each time
|
||||
const consumetAnimePahe = new ANIME.AnimePahe();
|
||||
const animePahe = new AnimePahe();
|
||||
|
||||
// Directly fetch and return the sources without modification
|
||||
const sources = await consumetAnimePahe.fetchEpisodeSources(episodeId);
|
||||
const sources = await animePahe.fetchEpisodeSources(episodeId);
|
||||
|
||||
// Simply return the sources directly as provided by Consumet
|
||||
return res.status(200).json(sources);
|
||||
@@ -239,8 +239,8 @@ app.get('/animepahe/hls/:anilistId/:episode', cache('15 minutes'), async (req, r
|
||||
}
|
||||
|
||||
// Now fetch the sources for this episode
|
||||
const consumetAnimePahe = new ANIME.AnimePahe();
|
||||
const sources = await consumetAnimePahe.fetchEpisodeSources(targetEpisode.episodeId);
|
||||
const animePahe = new AnimePahe();
|
||||
const sources = await animePahe.fetchEpisodeSources(targetEpisode.episodeId);
|
||||
|
||||
// Return the sources directly
|
||||
return res.status(200).json({
|
||||
|
||||
@@ -1,44 +1,27 @@
|
||||
import { AniList } from '../providers/anilist.js';
|
||||
import { AnimeKai } from '../providers/animekai.js';
|
||||
|
||||
/**
|
||||
* Maps an Anilist anime to AnimeKai
|
||||
* @param {string|number} anilistId - The AniList ID to map
|
||||
* @returns {Promise<Object>} The mapping result with episodes
|
||||
*/
|
||||
export async function mapAnilistToAnimeKai(anilistId) {
|
||||
const mapper = new AnimeKaiMapper();
|
||||
return await mapper.mapAnilistToAnimeKai(anilistId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mapper class that provides mapping between Anilist and AnimeKai
|
||||
*/
|
||||
export class AnimeKaiMapper {
|
||||
constructor() {
|
||||
this.anilist = new AniList();
|
||||
this.animeKai = new AnimeKai();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps an Anilist anime to AnimeKai content
|
||||
* @param {string|number} anilistId - The AniList ID to map
|
||||
*/
|
||||
async mapAnilistToAnimeKai(anilistId) {
|
||||
try {
|
||||
// Get anime info from AniList
|
||||
const animeInfo = await this.anilist.getAnimeInfo(parseInt(anilistId));
|
||||
|
||||
if (!animeInfo) {
|
||||
throw new Error(`Anime with id ${anilistId} not found on AniList`);
|
||||
}
|
||||
|
||||
// Search for the anime on AnimeKai using the title
|
||||
const searchTitle = animeInfo.title.english || animeInfo.title.romaji || animeInfo.title.userPreferred;
|
||||
if (!searchTitle) {
|
||||
throw new Error('No title available for the anime');
|
||||
}
|
||||
|
||||
const searchResults = await this.animeKai.search(searchTitle);
|
||||
if (!searchResults || !searchResults.results || searchResults.results.length === 0) {
|
||||
return {
|
||||
@@ -47,8 +30,6 @@ export class AnimeKaiMapper {
|
||||
animekai: null
|
||||
};
|
||||
}
|
||||
|
||||
// Find the best match from search results
|
||||
const bestMatch = this.findBestMatch(searchTitle, animeInfo, searchResults.results);
|
||||
if (!bestMatch) {
|
||||
return {
|
||||
@@ -57,8 +38,6 @@ export class AnimeKaiMapper {
|
||||
animekai: null
|
||||
};
|
||||
}
|
||||
|
||||
// Get detailed info for the best match
|
||||
const animeDetails = await this.animeKai.fetchAnimeInfo(bestMatch.id);
|
||||
|
||||
return {
|
||||
@@ -86,30 +65,16 @@ export class AnimeKaiMapper {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best match from search results
|
||||
* @param {string} searchTitle - The search title
|
||||
* @param {Object} animeInfo - The AniList anime info
|
||||
* @param {Array} results - The search results
|
||||
* @returns {Object|null} The best match or null if no good match found
|
||||
*/
|
||||
findBestMatch(searchTitle, animeInfo, results) {
|
||||
if (!results || results.length === 0) return null;
|
||||
|
||||
// Normalize titles for comparison
|
||||
const normalizeTitle = title => title.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
const normalizedSearch = normalizeTitle(searchTitle);
|
||||
|
||||
// Extract year from AniList title if present
|
||||
let year = null;
|
||||
if (animeInfo.startDate && animeInfo.startDate.year) {
|
||||
year = animeInfo.startDate.year;
|
||||
} else if (animeInfo.seasonYear) {
|
||||
year = animeInfo.seasonYear;
|
||||
}
|
||||
|
||||
// First try: find exact title match
|
||||
for (const result of results) {
|
||||
const resultTitle = normalizeTitle(result.title);
|
||||
const japaneseTitle = result.japaneseTitle ? normalizeTitle(result.japaneseTitle) : '';
|
||||
@@ -118,25 +83,19 @@ export class AnimeKaiMapper {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Second try: find partial match with proper episode count match
|
||||
const expectedEpisodes = animeInfo.episodes || 0;
|
||||
for (const result of results) {
|
||||
const resultTitle = normalizeTitle(result.title);
|
||||
const japaneseTitle = result.japaneseTitle ? normalizeTitle(result.japaneseTitle) : '';
|
||||
|
||||
// Check if this is likely the right anime by comparing episode count
|
||||
if (result.episodes === expectedEpisodes && expectedEpisodes > 0) {
|
||||
if (resultTitle.includes(normalizedSearch) ||
|
||||
normalizedSearch.includes(resultTitle) ||
|
||||
japaneseTitle.includes(normalizedSearch) ||
|
||||
normalizedSearch.includes(japaneseTitle)) {
|
||||
normalizedSearch.includes(resultTitle) ||
|
||||
japaneseTitle.includes(normalizedSearch) ||
|
||||
normalizedSearch.includes(japaneseTitle)) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final fallback: just return the first result
|
||||
return results[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +1,25 @@
|
||||
import { AniList } from '../providers/anilist.js';
|
||||
import { AnimePahe } from '../providers/animepahe.js';
|
||||
|
||||
/**
|
||||
* Maps an Anilist anime to AnimePahe content
|
||||
*/
|
||||
export async function mapAnilistToAnimePahe(anilistId) {
|
||||
const mapper = new AnimepaheMapper();
|
||||
return await mapper.mapAnilistToAnimePahe(anilistId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mapper class that provides mapping between Anilist and AnimePahe
|
||||
*/
|
||||
export class AnimepaheMapper {
|
||||
constructor() {
|
||||
this.anilist = new AniList();
|
||||
this.animePahe = new AnimePahe();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps an Anilist anime to AnimePahe content
|
||||
*/
|
||||
async mapAnilistToAnimePahe(anilistId) {
|
||||
try {
|
||||
// Get anime info from AniList
|
||||
const animeInfo = await this.anilist.getAnimeInfo(parseInt(anilistId));
|
||||
|
||||
if (!animeInfo) {
|
||||
throw new Error(`Anime with id ${anilistId} not found on AniList`);
|
||||
}
|
||||
|
||||
// Try to find matching content on AnimePahe
|
||||
const bestMatch = await this.findAnimePaheMatch(animeInfo);
|
||||
|
||||
if (!bestMatch) {
|
||||
@@ -40,10 +29,8 @@ export class AnimepaheMapper {
|
||||
};
|
||||
}
|
||||
|
||||
// Get episode data for the matched anime
|
||||
const episodeData = await this.getAnimePaheEpisodes(bestMatch);
|
||||
|
||||
// Return the mapped result
|
||||
return {
|
||||
id: animeInfo.id,
|
||||
animepahe: {
|
||||
@@ -65,20 +52,12 @@ export class AnimepaheMapper {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the matching AnimePahe content for an AniList anime
|
||||
*/
|
||||
async findAnimePaheMatch(animeInfo) {
|
||||
// Only use one primary title to reduce API calls
|
||||
let bestTitle = animeInfo.title.romaji || animeInfo.title.english || animeInfo.title.userPreferred;
|
||||
const titleType = animeInfo.title.romaji ? 'romaji' : (animeInfo.title.english ? 'english' : 'userPreferred');
|
||||
|
||||
// First search attempt
|
||||
const searchResults = await this.animePahe.scrapeSearchResults(bestTitle);
|
||||
|
||||
// Process results if we found any
|
||||
if (searchResults && searchResults.length > 0) {
|
||||
// First try direct ID match (fastest path)
|
||||
const rawId = animeInfo.id.toString();
|
||||
for (const result of searchResults) {
|
||||
const resultId = (result.id || '').split('-')[0];
|
||||
@@ -87,11 +66,8 @@ export class AnimepaheMapper {
|
||||
}
|
||||
}
|
||||
|
||||
// If no direct ID match, find the best match with our algorithm
|
||||
return this.findBestMatchFromResults(animeInfo, searchResults);
|
||||
}
|
||||
|
||||
// If no results found, try a fallback search with a more generic title
|
||||
const genericTitle = this.getGenericTitle(animeInfo);
|
||||
|
||||
if (genericTitle && genericTitle !== bestTitle) {
|
||||
@@ -105,13 +81,8 @@ export class AnimepaheMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best match from available search results
|
||||
*/
|
||||
findBestMatchFromResults(animeInfo, results) {
|
||||
if (!results || results.length === 0) return null;
|
||||
|
||||
// Normalize titles just once to avoid repeating work
|
||||
const normalizeTitle = t => t.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
const anilistTitles = [
|
||||
animeInfo.title.romaji,
|
||||
@@ -119,19 +90,15 @@ export class AnimepaheMapper {
|
||||
animeInfo.title.userPreferred
|
||||
].filter(Boolean).map(normalizeTitle);
|
||||
|
||||
// Prepare year information
|
||||
const anilistYear =
|
||||
(animeInfo.startDate && animeInfo.startDate.year) ?
|
||||
animeInfo.startDate.year : animeInfo.seasonYear;
|
||||
animeInfo.startDate.year : animeInfo.seasonYear;
|
||||
|
||||
const animeYear = anilistYear || this.extractYearFromTitle(animeInfo);
|
||||
|
||||
// Process matches sequentially with early returns
|
||||
let bestMatch = null;
|
||||
|
||||
// Try exact title match with year (highest priority)
|
||||
if (animeYear) {
|
||||
// Find matches with exact year
|
||||
const yearMatches = [];
|
||||
for (const result of results) {
|
||||
const resultYear = result.year ? parseInt(result.year) : this.extractYearFromTitle(result);
|
||||
@@ -140,23 +107,20 @@ export class AnimepaheMapper {
|
||||
}
|
||||
}
|
||||
|
||||
// If we have year matches, try to find the best title match among them
|
||||
if (yearMatches.length > 0) {
|
||||
for (const match of yearMatches) {
|
||||
const resultTitle = normalizeTitle(match.title || match.name);
|
||||
|
||||
// First try: exact title match with year
|
||||
for (const title of anilistTitles) {
|
||||
if (!title) continue;
|
||||
|
||||
if (resultTitle === title ||
|
||||
(resultTitle.includes(title) && title.length > 7) ||
|
||||
(title.includes(resultTitle) && resultTitle.length > 7)) {
|
||||
return match; // Early return for best match
|
||||
(resultTitle.includes(title) && title.length > 7) ||
|
||||
(title.includes(resultTitle) && resultTitle.length > 7)) {
|
||||
return match;
|
||||
}
|
||||
}
|
||||
|
||||
// Second try: high similarity title match with year
|
||||
for (const title of anilistTitles) {
|
||||
if (!title) continue;
|
||||
|
||||
@@ -170,15 +134,12 @@ export class AnimepaheMapper {
|
||||
if (bestMatch) break;
|
||||
}
|
||||
|
||||
// If we found a title similarity match with year, return it
|
||||
if (bestMatch) return bestMatch;
|
||||
|
||||
// Otherwise use the first year match as a fallback
|
||||
return yearMatches[0];
|
||||
}
|
||||
}
|
||||
|
||||
// Try exact title match
|
||||
for (const result of results) {
|
||||
const resultTitle = normalizeTitle(result.title || result.name);
|
||||
|
||||
@@ -186,22 +147,17 @@ export class AnimepaheMapper {
|
||||
if (!title) continue;
|
||||
|
||||
if (resultTitle === title) {
|
||||
return result; // Early return for exact title match
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try high similarity title match
|
||||
bestMatch = this.findBestSimilarityMatch(anilistTitles, results);
|
||||
if (bestMatch) return bestMatch;
|
||||
|
||||
// Just use the first result as a fallback
|
||||
return results[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best match based on title similarity
|
||||
*/
|
||||
findBestSimilarityMatch(titles, results) {
|
||||
const normalizeTitle = t => t.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
let bestMatch = null;
|
||||
@@ -221,13 +177,9 @@ export class AnimepaheMapper {
|
||||
}
|
||||
}
|
||||
|
||||
// Only return if we have a reasonably good match
|
||||
return highestSimilarity > 0.6 ? bestMatch : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the AnimePahe episodes for a match
|
||||
*/
|
||||
async getAnimePaheEpisodes(match) {
|
||||
try {
|
||||
const episodeData = await this.animePahe.scrapeEpisodes(match.id);
|
||||
@@ -241,47 +193,32 @@ export class AnimepaheMapper {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate similarity between two titles
|
||||
*/
|
||||
calculateTitleSimilarity(title1, title2) {
|
||||
if (!title1 || !title2) return 0;
|
||||
|
||||
// Normalize both titles
|
||||
const norm1 = title1.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
const norm2 = title2.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Exact match is best
|
||||
if (norm1 === norm2) return 1;
|
||||
|
||||
// Split into words
|
||||
const words1 = norm1.split(' ').filter(Boolean);
|
||||
const words2 = norm2.split(' ').filter(Boolean);
|
||||
|
||||
// Count common words
|
||||
const commonCount = words1.filter(w => words2.includes(w)).length;
|
||||
|
||||
// Weight by percentage of common words
|
||||
return commonCount * 2 / (words1.length + words2.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract year from title (e.g., "JoJo's Bizarre Adventure (2012)" -> 2012)
|
||||
*/
|
||||
extractYearFromTitle(item) {
|
||||
if (!item) return null;
|
||||
|
||||
// Extract the title string based on the input type
|
||||
let titleStr = '';
|
||||
if (typeof item === 'string') {
|
||||
titleStr = item;
|
||||
} else if (typeof item === 'object') {
|
||||
// Handle both anime objects and result objects
|
||||
if (item.title) {
|
||||
if (typeof item.title === 'string') {
|
||||
titleStr = item.title;
|
||||
} else if (typeof item.title === 'object') {
|
||||
// AniList title object
|
||||
titleStr = item.title.userPreferred || item.title.english || item.title.romaji || '';
|
||||
}
|
||||
} else if (item.name) {
|
||||
@@ -291,7 +228,6 @@ export class AnimepaheMapper {
|
||||
|
||||
if (!titleStr) return null;
|
||||
|
||||
// Look for year pattern in parentheses or brackets
|
||||
const yearMatches = titleStr.match(/[\(\[](\d{4})[\)\]]/);
|
||||
|
||||
if (yearMatches && yearMatches[1]) {
|
||||
@@ -304,16 +240,12 @@ export class AnimepaheMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a generic title by removing year information and other specific identifiers
|
||||
*/
|
||||
getGenericTitle(animeInfo) {
|
||||
if (!animeInfo || !animeInfo.title) return null;
|
||||
|
||||
const title = animeInfo.title.english || animeInfo.title.romaji || animeInfo.title.userPreferred;
|
||||
if (!title) return null;
|
||||
|
||||
// Remove year information and common specifiers
|
||||
return title.replace(/\([^)]*\d{4}[^)]*\)/g, '').replace(/\[[^\]]*\d{4}[^\]]*\]/g, '').trim();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import axios from 'axios';
|
||||
import * as cheerio from 'cheerio';
|
||||
import { extractKwik } from '../extractors/kwik.js';
|
||||
|
||||
export class AnimePahe {
|
||||
constructor() {
|
||||
this.baseUrl = "https://animepahe.ru";
|
||||
this.baseUrl = "https://animepahe.si";
|
||||
this.sourceName = 'AnimePahe';
|
||||
this.isMulti = false;
|
||||
}
|
||||
@@ -108,7 +109,7 @@ export class AnimePahe {
|
||||
animeTitle = moreEpisodes.title;
|
||||
animeDetails = moreEpisodes.details || animeDetails;
|
||||
} else {
|
||||
const detailUrl = `https://animepahe.ru/a/${jsonResult.data[0].anime_id}`;
|
||||
const detailUrl = `https://animepahe.si/a/${jsonResult.data[0].anime_id}`;
|
||||
const newResponse = await axios.get(detailUrl, {
|
||||
headers: {
|
||||
'Cookie': "__ddg1_=;__ddg2_=;",
|
||||
@@ -119,21 +120,17 @@ export class AnimePahe {
|
||||
const $ = cheerio.load(newResponse.data);
|
||||
animeTitle = $('.title-wrapper span').text().trim() || 'Could not fetch title';
|
||||
|
||||
// Try to extract additional information
|
||||
try {
|
||||
// Parse type
|
||||
const typeText = $('.col-sm-4.anime-info p:contains("Type")').text();
|
||||
if (typeText) {
|
||||
animeDetails.type = typeText.replace('Type:', '').trim();
|
||||
}
|
||||
|
||||
// Parse status
|
||||
const statusText = $('.col-sm-4.anime-info p:contains("Status")').text();
|
||||
if (statusText) {
|
||||
animeDetails.status = statusText.replace('Status:', '').trim();
|
||||
}
|
||||
|
||||
// Parse season and year
|
||||
const seasonText = $('.col-sm-4.anime-info p:contains("Season")').text();
|
||||
if (seasonText) {
|
||||
const seasonMatch = seasonText.match(/Season:\s+(\w+)\s+(\d{4})/);
|
||||
@@ -143,7 +140,6 @@ export class AnimePahe {
|
||||
}
|
||||
}
|
||||
|
||||
// Parse score
|
||||
const scoreText = $('.col-sm-4.anime-info p:contains("Score")').text();
|
||||
if (scoreText) {
|
||||
const scoreMatch = scoreText.match(/Score:\s+([\d.]+)/);
|
||||
@@ -157,7 +153,6 @@ export class AnimePahe {
|
||||
}
|
||||
}
|
||||
|
||||
// Always sort episodes by number in ascending order, regardless of how the API returns them
|
||||
const sortedEpisodes = [...episodes].sort((a, b) => a.number - b.number);
|
||||
|
||||
return {
|
||||
@@ -165,7 +160,7 @@ export class AnimePahe {
|
||||
session: session,
|
||||
totalEpisodes: jsonResult.total,
|
||||
details: animeDetails,
|
||||
episodes: sortedEpisodes, // Return sorted episodes, always in ascending order
|
||||
episodes: sortedEpisodes,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error recursively fetching episodes:', error.message);
|
||||
@@ -173,9 +168,13 @@ export class AnimePahe {
|
||||
}
|
||||
}
|
||||
|
||||
async scrapeEpisodesSrcs(episodeUrl, { category, lang } = {}) {
|
||||
async fetchEpisodeSources(episodeId, options = {}) {
|
||||
return this.scrapeEpisodesSrcs(episodeId, options);
|
||||
}
|
||||
|
||||
async scrapeEpisodesSrcs(episodeId, { category, lang } = {}) {
|
||||
try {
|
||||
const response = await axios.get(`${this.baseUrl}/play/${episodeUrl}`, {
|
||||
const response = await axios.get(`${this.baseUrl}/play/${episodeId}`, {
|
||||
headers: {
|
||||
'Cookie': "__ddg1_=;__ddg2_=;",
|
||||
}
|
||||
@@ -190,20 +189,26 @@ export class AnimePahe {
|
||||
const kwikLink = $(btn).attr('data-src');
|
||||
const quality = $(btn).text();
|
||||
|
||||
// Instead of extracting, just return the link directly
|
||||
videoLinks.push({
|
||||
quality: quality,
|
||||
url: kwikLink,
|
||||
referer: "https://kwik.cx",
|
||||
});
|
||||
try {
|
||||
const extraction = await extractKwik(kwikLink, response.config.url);
|
||||
if (extraction && extraction.url) {
|
||||
videoLinks.push({
|
||||
quality: quality,
|
||||
url: extraction.url,
|
||||
isM3U8: extraction.isM3U8,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`Error extracting Kwik for ${quality}:`, e.message);
|
||||
}
|
||||
}
|
||||
|
||||
const result = {
|
||||
sources: videoLinks.length > 0 ? [{ url: videoLinks[0].url }] : [],
|
||||
multiSrc: videoLinks,
|
||||
return {
|
||||
headers: {
|
||||
Referer: "https://kwik.cx/"
|
||||
},
|
||||
sources: videoLinks
|
||||
};
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('Error fetching episode sources:', error.message);
|
||||
throw new Error('Failed to fetch episode sources');
|
||||
@@ -223,7 +228,6 @@ export class AnimePahe {
|
||||
throw new Error(`No results found for title: ${title}`);
|
||||
}
|
||||
|
||||
// First try: Direct ID match if provided and valid
|
||||
if (animeId) {
|
||||
const animeIdMatch = resBody.data.find(anime => String(anime.id) === String(animeId));
|
||||
if (animeIdMatch) {
|
||||
@@ -231,7 +235,6 @@ export class AnimePahe {
|
||||
}
|
||||
}
|
||||
|
||||
// Second try: Normalize titles and find best match
|
||||
const normalizeTitle = t => t.toLowerCase().replace(/[^\w\s]/g, '').replace(/\s+/g, ' ').trim();
|
||||
const normalizedSearchTitle = normalizeTitle(title);
|
||||
|
||||
@@ -240,21 +243,17 @@ export class AnimePahe {
|
||||
|
||||
for (const anime of resBody.data) {
|
||||
const normalizedAnimeTitle = normalizeTitle(anime.title);
|
||||
// Calculate simple similarity (more sophisticated than exact match)
|
||||
let similarity = 0;
|
||||
|
||||
// Exact match
|
||||
if (normalizedAnimeTitle === normalizedSearchTitle) {
|
||||
similarity = 1;
|
||||
}
|
||||
// Contains match
|
||||
else if (normalizedAnimeTitle.includes(normalizedSearchTitle) ||
|
||||
normalizedSearchTitle.includes(normalizedAnimeTitle)) {
|
||||
normalizedSearchTitle.includes(normalizedAnimeTitle)) {
|
||||
const lengthRatio = Math.min(normalizedAnimeTitle.length, normalizedSearchTitle.length) /
|
||||
Math.max(normalizedAnimeTitle.length, normalizedSearchTitle.length);
|
||||
Math.max(normalizedAnimeTitle.length, normalizedSearchTitle.length);
|
||||
similarity = 0.8 * lengthRatio;
|
||||
}
|
||||
// Word match
|
||||
else {
|
||||
const searchWords = normalizedSearchTitle.split(' ');
|
||||
const animeWords = normalizedAnimeTitle.split(' ');
|
||||
@@ -272,7 +271,6 @@ export class AnimePahe {
|
||||
return bestMatch.session;
|
||||
}
|
||||
|
||||
// Default to first result if no good match found
|
||||
return resBody.data[0].session;
|
||||
} catch (error) {
|
||||
console.error('Error getting session:', error.message);
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { load } from 'cheerio';
|
||||
import { client } from '../utils/client.js';
|
||||
import { HIANIME_URL } from '../constants/api-constants.js';
|
||||
import { megaCloudExtractor } from '../extractors/megacloud.js';
|
||||
|
||||
|
||||
/**
|
||||
* Get all available servers for a HiAnime episode
|
||||
* @param {string} episodeId - Episode ID in format "anime-title-123?ep=456"
|
||||
* @returns {Promise<Object>} Object containing sub, dub, and raw server lists
|
||||
*/
|
||||
export async function getEpisodeServers(episodeId) {
|
||||
const result = {
|
||||
sub: [],
|
||||
@@ -71,14 +68,6 @@ export async function getEpisodeServers(episodeId) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get streaming sources for a HiAnime episode
|
||||
* @param {string} episodeId - Episode ID in format "anime-title-123?ep=456"
|
||||
* @param {string} serverName - Name of the server to get sources from
|
||||
* @param {string} category - Type of episode: 'sub', 'dub', or 'raw'
|
||||
* @returns {Promise<Object>} Object containing sources and related metadata
|
||||
*/
|
||||
export async function getEpisodeSources(episodeId, serverName = 'vidstreaming', category = 'sub') {
|
||||
try {
|
||||
if (!episodeId || episodeId.trim() === "" || episodeId.indexOf("?ep=") === -1) {
|
||||
@@ -112,15 +101,18 @@ export async function getEpisodeSources(episodeId, serverName = 'vidstreaming',
|
||||
|
||||
// If the target is a MegaCloud embed, extract the direct source URL
|
||||
if (data?.link && /megacloud\./i.test(data.link)) {
|
||||
const extracted = await extractFromMegaCloud(data.link);
|
||||
return extracted;
|
||||
try {
|
||||
const extracted = await megaCloudExtractor.extract(data.link);
|
||||
return extracted;
|
||||
} catch (e) {
|
||||
console.warn(`MegaCloud extraction failed for ${data.link}:`, e.message);
|
||||
// Fallback to returning the link as is
|
||||
}
|
||||
}
|
||||
|
||||
// Return sources format similar to the AniWatch package for other hosts
|
||||
return {
|
||||
headers: {
|
||||
Referer: data.link,
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36"
|
||||
},
|
||||
sources: [
|
||||
{
|
||||
@@ -136,108 +128,6 @@ export async function getEpisodeSources(episodeId, serverName = 'vidstreaming',
|
||||
}
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
async function extractFromMegaCloud(embedUrl) {
|
||||
// Parse domain for Referer
|
||||
const urlObj = new URL(embedUrl);
|
||||
const defaultDomain = `${urlObj.protocol}//${urlObj.host}`;
|
||||
|
||||
// Use a mobile UA to match site expectations
|
||||
const mobileUA = "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Mobile Safari/537.36";
|
||||
|
||||
// Load embed page HTML
|
||||
const { data: html } = await client.get(embedUrl, {
|
||||
responseType: 'text',
|
||||
headers: {
|
||||
Accept: '*/*',
|
||||
'X-Requested-With': 'XMLHttpRequest',
|
||||
Referer: defaultDomain,
|
||||
'User-Agent': mobileUA,
|
||||
},
|
||||
});
|
||||
|
||||
const $ = load(html);
|
||||
|
||||
// Get file id from #megacloud-player
|
||||
const videoTag = $('#megacloud-player');
|
||||
const fileId = videoTag?.attr('data-id');
|
||||
if (!fileId) {
|
||||
throw new Error('MegaCloud: missing file id (possibly expired URL)');
|
||||
}
|
||||
|
||||
// Extract nonce - either 48 chars or 3x16 concatenated
|
||||
let nonce = null;
|
||||
const nonceRegex48 = /\b[a-zA-Z0-9]{48}\b/;
|
||||
const match48 = html.match(nonceRegex48);
|
||||
if (match48) {
|
||||
nonce = match48[0];
|
||||
} else {
|
||||
const match3x16 = html.match(/\b([a-zA-Z0-9]{16})\b[\s\S]*?\b([a-zA-Z0-9]{16})\b[\s\S]*?\b([a-zA-Z0-9]{16})\b/);
|
||||
if (match3x16) nonce = `${match3x16[1]}${match3x16[2]}${match3x16[3]}`;
|
||||
}
|
||||
if (!nonce) {
|
||||
throw new Error('MegaCloud: failed to capture nonce');
|
||||
}
|
||||
|
||||
// Get decryption key from public repo
|
||||
const { data: keyJson } = await client.get('https://raw.githubusercontent.com/yogesh-hacker/MegacloudKeys/refs/heads/main/keys.json', {
|
||||
headers: { 'User-Agent': mobileUA }
|
||||
});
|
||||
const secret = keyJson?.mega;
|
||||
|
||||
// Try to get sources JSON
|
||||
const { data: sourcesResp } = await client.get(`${defaultDomain}/embed-2/v3/e-1/getSources`, {
|
||||
params: { id: fileId, _k: nonce },
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
Referer: defaultDomain,
|
||||
'User-Agent': mobileUA,
|
||||
}
|
||||
});
|
||||
|
||||
let fileUrl = null;
|
||||
if (Array.isArray(sourcesResp?.sources) && sourcesResp.sources[0]?.file) {
|
||||
fileUrl = sourcesResp.sources[0].file;
|
||||
} else if (sourcesResp?.sources) {
|
||||
// Encrypted payload; use remote decoder
|
||||
const decodeBase = 'https://script.google.com/macros/s/AKfycbxHbYHbrGMXYD2-bC-C43D3njIbU-wGiYQuJL61H4vyy6YVXkybMNNEPJNPPuZrD1gRVA/exec';
|
||||
const params = new URLSearchParams({
|
||||
encrypted_data: String(sourcesResp.sources),
|
||||
nonce: nonce, // keep for compatibility if server expects this key
|
||||
secret: String(secret || ''),
|
||||
});
|
||||
// Some servers expect 'nonce' as '_k' or 'nonce'; try both key names
|
||||
if (!params.has('_k')) params.append('_k', nonce);
|
||||
|
||||
const { data: decodedText } = await client.get(`${decodeBase}?${params.toString()}`, {
|
||||
responseType: 'text',
|
||||
headers: { 'User-Agent': mobileUA }
|
||||
});
|
||||
const match = /\"file\":\"(.*?)\"/.exec(decodedText);
|
||||
if (match) fileUrl = match[1].replace(/\\\//g, '/');
|
||||
}
|
||||
|
||||
if (!fileUrl) {
|
||||
throw new Error('MegaCloud: failed to extract file URL');
|
||||
}
|
||||
|
||||
return {
|
||||
headers: {
|
||||
Referer: defaultDomain,
|
||||
'User-Agent': mobileUA,
|
||||
},
|
||||
tracks: [],
|
||||
intro: { start: 0, end: 0 },
|
||||
outro: { start: 0, end: 0 },
|
||||
sources: [
|
||||
{
|
||||
url: fileUrl,
|
||||
isM3U8: /\.m3u8($|\?)/.test(fileUrl),
|
||||
}
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getEpisodeServers,
|
||||
getEpisodeSources
|
||||
|
||||
Reference in New Issue
Block a user