Merge pull request #669 from ArjixGamer/patch-30

fixed animesuge
master
Red 2021-05-10 20:02:22 +01:00 committed by GitHub
commit 6b49b7e06d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 19 additions and 7 deletions

View File

@ -67,6 +67,7 @@ Yeah. Me too! That's why this tool exists.
- animeout
- Animerush
- Animesimple
- AnimeSuge - requires Node.js
- Animevibe
- AnimeTake
- AniTube

View File

@ -5,12 +5,14 @@ from anime_downloader.sites import helpers
import re
import json
class AnimeSuge(Anime, sitename="animesuge"):
sitename = "animesuge"
@classmethod
def search(cls, query):
soup = helpers.soupify(helpers.get("https://animesuge.io/ajax/anime/search", params={"keyword": query}).json()['html'])
soup = helpers.soupify(helpers.get(
"https://animesuge.io/ajax/anime/search", params={"keyword": query}).json()['html'])
search_results = [
SearchResult(
@ -27,8 +29,9 @@ class AnimeSuge(Anime, sitename="animesuge"):
_id = re.search(r".*-(.*)", self.url).group(1)
soup = helpers.soupify(helpers.get(ep_url, params={'id': _id}))
return ['https://animesuge.io' + x.get('href') for x in soup.select('a:not(.more)')]
eps = ['https://animesuge.io' + re.search(r"(/anime.*?/ep-\d+)", x.get(
'href')).group(1).replace('\\', '') for x in soup.select('a:not(.more)')]
return eps
def _scrape_metadata(self):
self.title = helpers.soupify(helpers.get(self.url)).find("h1").text
@ -37,13 +40,17 @@ class AnimeSuge(Anime, sitename="animesuge"):
class AnimeSugeEpisode(NineAnimeEpisode, sitename='animesuge'):
def _get_sources(self):
# Get id and ep no. from url, e.g: https://animesuge.io/anime/naruto-xx8z/ep-190 -> xx8z, 190
_id, ep_no = re.search(r".*\/anime\/.*-(.*?)\/.*-(\d+)$", self.url).group(1, 2)
_id, ep_no = re.search(
r".*\/anime\/.*-(.*?)\/.*-(\d+)$", self.url).group(1, 2)
# Get sources json from html, e.g:
"""
<a class="active" data-base="190" data-name-normalized="190" data-sources='{"28":"8e663a9230406b753ba778dba15c723b3bf221b61fbdde6e8adae899adbad7ab","40":"565ff0ca78263f80a8f8a344e06085854f87e3449e321032425498b9d129dbf0","35":"c800a3ec0dfe68265d685792375169007b74c89aa13849869a16a3674d971f45"}' href="/anime/naruto-xx8z/ep-190">190</a>"""
# data_sources = json.loads(
data_sources = json.loads(helpers.soupify(helpers.get("https://animesuge.io/ajax/anime/servers",
params={"id": _id, "episode": ep_no})).select(f"a[data-base='{ep_no}']")[0].get("data-sources"))
params={"id": _id, "episode": ep_no}).json()['html']).select(f"a[data-base='{ep_no}']")[0].get("data-sources"))
#
# Only includes supported
# Unsupported ones {'28': 'openstream'}
@ -60,14 +67,18 @@ class AnimeSugeEpisode(NineAnimeEpisode, sitename='animesuge'):
params={"id": _id}).json()['url']
break
# Makes it more consistent.
except HTTPError:
except requests.HTTPError:
time.sleep(5)
continue
server = id_source_map[key]
link = self.decodeString(link)
if 'mp4upload.com/embed' in link:
link = re.search(r"(https://.*?\.html)", link).group(1)
sources_list.append({
'extractor': server,
'url': self.decodeString(link),
'url': link,
'server': server,
# This may not be true, can't see the info on page.
'version': 'subbed'