fixed animefrenzy
note: apparently it has exactly the same api as shiro.is but its just hosted somewhere else... im really confusedmaster
parent
05118113f6
commit
0ce1f79d4c
|
@ -7,52 +7,68 @@ import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_token():
|
||||||
|
r = helpers.get('https://animefrenzy.org').text
|
||||||
|
script = 'https://animefrenzy.org' + re.search(r'src\=\"(\/static\/js\/main\..*?)\"', r)[1] # noqa
|
||||||
|
script = helpers.get(script).text
|
||||||
|
token = re.search(r'token\:\"(.*?)\"', script)[1]
|
||||||
|
return token
|
||||||
|
|
||||||
|
|
||||||
class AnimeFrenzy(Anime, sitename='animefrenzy'):
|
class AnimeFrenzy(Anime, sitename='animefrenzy'):
|
||||||
sitename = 'animefrenzy'
|
sitename = 'animefrenzy'
|
||||||
|
token = get_token()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def search(cls, query):
|
def search(cls, query):
|
||||||
r = helpers.get("https://animefrenzy.net/search", params={"q": query})
|
results = helpers.get("https://moo.yare.wtf/advanced", params={"search": query, "token": cls.token}).json()['data'] # noqa
|
||||||
soup = helpers.soupify(r)
|
if 'nav' in results:
|
||||||
titleName = soup.select("div.conm > a.cona")
|
results = results['nav']['currentPage']['items']
|
||||||
search_results = [
|
search_results = [
|
||||||
SearchResult(
|
SearchResult(
|
||||||
title=a.text,
|
title=i['name'],
|
||||||
url='https://animefrenzy.net/' + a.get('href')
|
url='https://animefrenzy.org/anime/' + i['slug'],
|
||||||
)
|
poster='https://moo.yare.wtf/' + i['image'],
|
||||||
for a in titleName
|
meta_info={
|
||||||
]
|
'version_key_dubbed': '(Sub)' if i['language'] == 'subbed' else '(Dub)' # noqa
|
||||||
return(search_results)
|
}
|
||||||
|
)
|
||||||
|
for i in results
|
||||||
|
]
|
||||||
|
search_results = sorted(search_results, key=lambda x: len(x.title))
|
||||||
|
return search_results
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
def _scrape_episodes(self):
|
def _scrape_episodes(self):
|
||||||
soup = helpers.soupify(helpers.get(self.url))
|
self.token = get_token()
|
||||||
lang = self.config.get("version")
|
slug = self.url.split('/')[-1]
|
||||||
if lang == "subbed":
|
if 'episode' in slug:
|
||||||
ep_list = [x for x in soup.select("div.sub1 > a")]
|
slug = slug.split('-episode')[0]
|
||||||
elif lang == "dubbed":
|
|
||||||
ep_list = [x for x in soup.select("div.dub1 > a")]
|
api_link = 'https://moo.yare.wtf/anime/slug/' + slug
|
||||||
|
r = helpers.get(api_link, params={'token': self.token}).json()
|
||||||
|
if r['status'] == 'Found':
|
||||||
|
episodes = r['data']['episodes']
|
||||||
|
episodes = [
|
||||||
|
'https://moo.yare.wtf/vidstreaming/animefrenzy/' + x['videos'][0]['video_id'] # noqa
|
||||||
|
for x in episodes
|
||||||
|
]
|
||||||
|
return episodes
|
||||||
else:
|
else:
|
||||||
logger.warning("Wrong Language Setting, Defaulting to Subbed")
|
return []
|
||||||
ep_list = [x for x in soup.select("div.sub1 > a")]
|
|
||||||
|
|
||||||
episodes = ["https://animefrenzy.net" + x.get("href") for x in ep_list]
|
|
||||||
|
|
||||||
if len(episodes) == 0:
|
|
||||||
logger.warning("No Episodes available, if lang is \"dubbed\" try switching to subbed")
|
|
||||||
|
|
||||||
return episodes[::-1]
|
|
||||||
# raise NotImplementedError
|
|
||||||
|
|
||||||
def _scrape_metadata(self):
|
def _scrape_metadata(self):
|
||||||
soup = helpers.soupify(helpers.get(self.url))
|
slug = self.url.split('/')[-1]
|
||||||
self.title = soup.select_one("div.infodes > h1").text
|
if 'episode' in slug:
|
||||||
|
slug = slug.split('-episode')[0]
|
||||||
|
api_link = 'https://moo.yare.wtf/anime/slug/' + slug
|
||||||
|
r = helpers.get(api_link, params={'token': self.token}).json()
|
||||||
|
self.title = r['data']['name']
|
||||||
|
|
||||||
|
|
||||||
class AnimeFrenzyEpisode(AnimeEpisode, sitename='animefrenzy'):
|
class AnimeFrenzyEpisode(AnimeEpisode, sitename='animefrenzy'):
|
||||||
def _get_sources(self):
|
def _get_sources(self):
|
||||||
logger.debug(self.url)
|
r = helpers.get(self.url).text
|
||||||
soup = helpers.soupify(helpers.get(self.url))
|
link = re.search(r'\"file\"\:\"(.*?)\"', r)[1]
|
||||||
link = soup.select_one("div.host > a.btn-video")
|
return [('no_extractor', link)]
|
||||||
logger.debug(link)
|
|
||||||
return [("vidstreaming", link.get("data-video-link"))]
|
|
||||||
# raise NotImplementedError
|
|
||||||
|
|
Loading…
Reference in New Issue