anikai update

This commit is contained in:
2026-03-30 07:45:10 -05:00
parent 0224d88313
commit ac9d9c42e8

View File

@@ -1,144 +1,260 @@
{ {
"name": "AnimeKai Streamer", "name": "AnimeKai Streamer",
"version": "1.4.0", "version": "1.2.1",
"author": "Animex", "author": "Animex",
"description": "Cloud-optimized AnimeKai resolver. Handles Tunnel routing and deep decryption.", "description": "Resolves AnimeKai.to streams using backend encrypted APIs. Fully optimized for Cloud Tunneling.",
"type": ["ANIME_STREAMER"], "type": ["ANIME_STREAMER"],
"requirements": ["httpx", "beautifulsoup4"] "requirements":["httpx", "beautifulsoup4"]
} }
--- ---
import re import re
import json
import httpx import httpx
import asyncio
import json
import urllib.parse import urllib.parse
from typing import Optional, Dict, List, Any from typing import Optional, Dict, List
# =========================
# CONSTANTS
# =========================
# Constants
BASE_URL = "https://animekai.to" BASE_URL = "https://animekai.to"
ENC_API = "https://enc-dec.app/api" ENC_API = "https://enc-dec.app/api"
DB_URL = "https://enc-dec.app/db/kai/find" DB_URL = "https://enc-dec.app/db/kai/find"
UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
async def get_iframe_source(mal_id: int, episode: int, dub: bool) -> Optional[str]: HEADERS = {
""" "User-Agent": (
Core resolver function. 'httpx' here is the HybridClient injected by app.py. "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
""" "AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/137.0.0.0 Safari/537.36"
),
"Referer": BASE_URL
}
# =========================
# ENCRYPT / DECRYPT HELPERS
# =========================
async def enc_kai(client, text: str) -> str:
r = await client.get(
f"{ENC_API}/enc-kai",
params={"text": text},
headers=HEADERS,
timeout=10
)
r.raise_for_status()
return r.json()["result"]
async def dec_kai(client, text: str) -> str:
r = await client.post(
f"{ENC_API}/dec-kai",
json={"text": text},
headers=HEADERS,
timeout=10
)
r.raise_for_status()
return r.json()["result"]
async def dec_mega(client, text: str) -> Dict:
r = await client.post(
f"{ENC_API}/dec-mega",
json={
"text": text,
"agent": HEADERS["User-Agent"]
},
headers=HEADERS,
timeout=15
)
r.raise_for_status()
return r.json()
# =========================
# INTERNAL LOGIC
# =========================
async def get_anime_id(client, slug: str) -> str:
# slug is actually mal_id (int)
resp = await client.get(f"{DB_URL}?mal_id={slug}", headers=HEADERS)
resp.raise_for_status()
data = resp.json()
if not data or not data[0].get("info", {}).get("kai_id"):
raise RuntimeError("Anime ID not found in JSON response")
return data[0]["info"]["kai_id"], data[0]
async def get_episode_token(data: dict, episode: int) -> str:
# Always fetch the sub ("1") token for the episode
try: try:
# 1. Resolve MAL ID to AnimeKai metadata token = data["episodes"]["1"][str(episode)]["token"]
meta_res = await httpx.get(f"{DB_URL}?mal_id={mal_id}") except Exception:
meta_res.raise_for_status() raise RuntimeError("Episode token not found in JSON response")
meta_data = meta_res.json() return token
if not meta_data:
return None
# 2. Locate the specific episode token
lang_key = "2" if dub else "1"
ep_entry = meta_data[0].get("episodes", {}).get(lang_key, {}).get(str(episode))
if not ep_entry:
# Fallback to sub if dub is missing
ep_entry = meta_data[0].get("episodes", {}).get("1", {}).get(str(episode))
if not ep_entry: async def get_server_id(client, ep_token: str, dub: bool) -> str:
return None enc = await enc_kai(client, ep_token)
token = ep_entry["token"] html = await client.get(
f"{BASE_URL}/ajax/links/list",
params={"token": ep_token, "_": enc},
headers=HEADERS
)
# 3. Get Server List (requires an encrypted version of the token) # The actual HTML is in the 'result' field of the JSON response
enc_token_req = await httpx.get(f"{ENC_API}/enc-kai", params={"text": token}) try:
enc_token = enc_token_req.json().get("result") html_content = html.json()["result"]
except Exception:
html_content = html.text
servers_res = await httpx.get(f"{BASE_URL}/ajax/links/list", params={"token": token, "_": enc_token}) # Determine which language group to use
servers_html = servers_res.json().get("result", "") # Default: sub (Hard Sub), or dub if requested
lang_order = ["dub", "sub"] if dub else ["sub", "softsub", "dub"]
all_server_ids =[]
for lang in lang_order:
group_pattern = rf'<div class="server-items lang-group" data-id="{lang}"[^>]*>(.*?)</div>'
group_match = re.search(group_pattern, html_content, re.DOTALL)
if group_match:
group_html = group_match.group(1)
server_pattern = r'<span class="server"[^>]+data-lid="([^"]+)"[^>]*>Server \d+</span>'
server_ids = re.findall(server_pattern, group_html)
if server_ids:
all_server_ids.extend(server_ids)
# Extract server data-lids if not all_server_ids:
server_ids = re.findall(r'data-lid="([^"]+)"', servers_html) raise RuntimeError("No servers found in any language group")
if not server_ids: return all_server_ids
return None
async def resolve_stream_url(client, server_id: str) -> dict:
enc = await enc_kai(client, server_id)
view = await client.get(
f"{BASE_URL}/ajax/links/view",
params={"id": server_id, "_": enc},
headers=HEADERS
)
decrypted = await dec_kai(client, view.json()["result"])
print(f"decrypted (dec_kai): {decrypted}")
skip = {}
media_url = ""
if isinstance(decrypted, dict):
skip = decrypted.get("skip", {})
media_url = decrypted.get("url", "").replace("/e/", "/media/")
# Extract referer from media_url (main domain)
referer = ""
if media_url:
parsed = urllib.parse.urlparse(media_url)
referer = f"{parsed.scheme}://{parsed.netloc}"
media = await client.get(media_url, headers=HEADERS)
mega = await dec_mega(client, media.json()["result"])
print(mega)
sources = mega.get("result", {}).get("sources",[])
tracks = mega.get("result", {}).get("tracks",[])
# Find captions and thumbnails
captions =[t for t in tracks if t.get("kind") == "captions"]
thumbnails = [t for t in tracks if t.get("kind") == "thumbnails"]
if not sources:
raise RuntimeError("No stream sources found")
# Return both the file URL and skip times
return {
"url": sources[0]["file"],
"skip": skip,
"captions": captions,
"thumbnails": thumbnails,
"referer": referer
}
# =========================
# PUBLIC MODULE API
# =========================
async def get_iframe_source(
mal_id: int,
episode: int,
dub: bool
) -> Optional[str]:
"""
Returns resolved m3u8 stream URL using the injected Tunneling HybridClient
"""
# The 'httpx' variable resolves to the injected HybridClient dynamically
# replaced during module processing in app.py's `get_iframe_source` endpoint.
client = httpx
try:
ani_id, anime_json = await get_anime_id(client, mal_id)
print(f"ani_id: {ani_id}")
ep_token = await get_episode_token(anime_json, episode)
print(f"ep_token: {ep_token}")
server_ids = await get_server_id(client, ep_token, dub)
print(f"server_ids: {server_ids}")
# 4. Iterate through servers to find a working stream
for sid in server_ids: for sid in server_ids:
try: try:
# Encrypt SID for the view-link request print(f"Trying server_id: {sid}")
enc_sid_req = await httpx.get(f"{ENC_API}/enc-kai", params={"text": sid}) result = await resolve_stream_url(client, sid)
enc_sid = enc_sid_req.json().get("result")
view_res = await httpx.get(f"{BASE_URL}/ajax/links/view", params={"id": sid, "_": enc_sid}) if result:
url = result["url"]
skip = result.get("skip", {})
# Decrypt the view response to get the embed host URL and skip times # Build new player URL
dec_view_req = await httpx.post(f"{ENC_API}/dec-kai", json_data={"text": view_res.json().get("result")}) player_url = "/video_player.html?stream=true&full=true"
dec_view = dec_view_req.json().get("result")
if not dec_view or "url" not in dec_view: # Add skip_times, captions, thumbnails as separate params
continue if "intro" in skip or "outro" in skip:
skip_times_param = {}
if "intro" in skip:
skip_times_param["intro"] = skip["intro"]
if "outro" in skip:
skip_times_param["outro"] = skip["outro"]
player_url += f"&skip_times={urllib.parse.quote(json.dumps(skip_times_param))}"
# Convert embed URL to media API URL (e.g., megaup.nl/e/... -> megaup.nl/media/...) if result.get("captions"):
media_url = dec_view["url"].replace("/e/", "/media/") player_url += f"&captions={urllib.parse.quote(json.dumps(result['captions']))}"
# Fetch the media page (this is where the Home Agent's User-Agent is crucial) if result.get("thumbnails"):
media_page = await httpx.get(media_url, headers={"User-Agent": UA, "Referer": BASE_URL}) player_url += f"&thumbnails={urllib.parse.quote(json.dumps(result['thumbnails']))}"
# 5. Robust Extraction of the encrypted "result" blob if result.get("referer"):
# We use regex directly on the HTML to find the result string player_url += f"&referer={urllib.parse.quote(result['referer'])}"
blob_match = re.search(r'"result"\s*:\s*"([^"]+)"', media_page.text)
if not blob_match:
continue
encrypted_blob = blob_match.group(1) player_url += f"&id={mal_id}&episode={episode}&video={urllib.parse.quote(url)}"
return player_url
# 6. Final decryption of the stream sources
final_dec_req = await httpx.post(f"{ENC_API}/dec-mega", json_data={"text": encrypted_blob, "agent": UA})
final_data = final_dec_req.json().get("result", {})
sources = final_data.get("sources", [])
if not sources:
continue
video_url = sources[0].get("file")
# 7. Construct the final player URL for the frontend
# Extract the host for the referer header
parsed_uri = urllib.parse.urlparse(media_url)
referer_host = f"{parsed_uri.scheme}://{parsed_uri.netloc}"
params = {
"video": video_url,
"referer": referer_host,
"id": mal_id,
"episode": episode,
"stream": "true",
"full": "true"
}
# Add skip times (Intro/Outro) if available
if dec_view.get("skip"):
params["skip_times"] = json.dumps(dec_view["skip"])
# Add subtitles if available
tracks = final_data.get("tracks", [])
subs = [t for t in tracks if t.get("kind") == "captions"]
if subs:
params["captions"] = json.dumps(subs)
return f"/video_player.html?{urllib.parse.urlencode(params)}"
except Exception as e: except Exception as e:
print(f"[AnimeKai] Sid {sid} failed: {str(e)}") print(f"Failed to resolve server {sid}: {e}")
continue
print("All servers failed.")
return None return None
except Exception as e: except Exception as e:
print(f"[AnimeKai] Global Module Error: {str(e)}") import traceback
print(f"Exception in get_iframe_source: {type(e).__name__}: {e}")
traceback.print_exc()
return None return None
async def get_download_link(mal_id: int, episode: int, dub: bool, quality: str) -> Optional[str]:
async def get_download_link(
mal_id: int,
episode: int,
dub: bool,
quality: str
) -> Optional[str]:
""" """
Returns the direct m3u8 link. Alias for get_iframe_source (AnimeKai uses adaptive m3u8)
""" """
source_url = await get_iframe_source(mal_id, episode, dub) return await get_iframe_source(mal_id, episode, dub)
if source_url:
parsed = urllib.parse.urlparse(source_url)
qs = urllib.parse.parse_qs(parsed.query)
return qs.get("video", [None])[0]
return None