Update tenshimoe.py

master
Arjix 2021-07-22 01:06:05 +03:00 committed by GitHub
parent a99a73bedf
commit ebfad498db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 50 additions and 13 deletions

View File

@ -2,6 +2,18 @@ from anime_downloader.sites.anime import Anime, AnimeEpisode, SearchResult
from anime_downloader.sites import helpers from anime_downloader.sites import helpers
def parse_search_page(soup):
results = soup.select('ul.thumb > li > a')
return [
SearchResult(
title=x['title'],
url=x['href'],
poster=x.find('img')['src']
)
for x in results
]
class TenshiMoe(Anime, sitename='tenshi.moe'): class TenshiMoe(Anime, sitename='tenshi.moe'):
sitename = 'tenshi.moe' sitename = 'tenshi.moe'
@ -9,21 +21,34 @@ class TenshiMoe(Anime, sitename='tenshi.moe'):
@classmethod @classmethod
def search(cls, query): def search(cls, query):
soup = helpers.soupify( soup = helpers.soupify(
helpers.get('https://tenshi.moe/anime', params={'q': query})) helpers.get(
results = soup.select('ul.loop.anime-loop.list > li > a') 'https://tenshi.moe/anime',
params={'q': query},
return [ cookies={'loop-view': 'thumb'}
SearchResult(
title=x['title'],
url=x['href'],
) )
for x in results )
]
results = parse_search_page(soup)
while soup.select_one(".pagination"):
link = soup.select_one('a.page-link[rel="next"]')
if link:
soup = helpers.soupify(
helpers.get(
link['href'],
cookies={'loop-view': 'thumb'}
)
)
results.extend(parse_search_page(soup))
else:
break
return results
def _scrape_episodes(self): def _scrape_episodes(self):
soup = helpers.soupify(helpers.get(self.url)) soup = helpers.soupify(helpers.get(self.url))
eps = soup.select( eps = soup.select(
'li[class^=episode] > a' 'li[class*="episode"] > a'
) )
eps = [x['href'] for x in eps] eps = [x['href'] for x in eps]
return eps return eps
@ -34,8 +59,20 @@ class TenshiMoe(Anime, sitename='tenshi.moe'):
class TenshiMoeEpisode(AnimeEpisode, sitename='tenshi.moe'): class TenshiMoeEpisode(AnimeEpisode, sitename='tenshi.moe'):
QUALITIES = ['360p', '480p', '720p', '1080p']
def _get_sources(self): def _get_sources(self):
soup = helpers.soupify(helpers.get(self.url)) soup = helpers.soupify(helpers.get(self.url))
# Might break with something other than mp4! soup = soup.select_one('.embed-responsive > iframe')
link = soup.find_all('source', type="video/mp4")[-1]['src']
return [('no_extractor', link)] mp4moe = helpers.soupify(helpers.get(soup.get('src'), referer=self.url))
mp4moe = mp4moe.select_one('video#player')
qualities_ = [x.get("title") for x in mp4moe.select('source')]
sources = [
('no_extractor', x.get('src'))
for x in mp4moe.select('source')
]
if self.quality in qualities_:
return [sources[qualities_.index(self.quality)]]