anime-downloader/anime_downloader/sites/twistmoe.py

123 lines
4.1 KiB
Python
Raw Normal View History

2018-07-17 07:50:29 -07:00
from Crypto.Cipher import AES
import base64
from hashlib import md5
import warnings
2019-07-12 04:53:54 -07:00
import requests_cache
import requests
2019-07-12 05:04:43 -07:00
import logging
2018-07-17 07:50:29 -07:00
2019-02-23 06:57:37 -08:00
from anime_downloader.sites.anime import Anime, AnimeEpisode, SearchResult
from anime_downloader.sites import helpers
2019-07-12 04:53:54 -07:00
from anime_downloader.util import eval_in_node
2018-07-17 07:50:29 -07:00
2019-07-12 05:04:43 -07:00
logger = logging.getLogger(__name__)
2018-07-17 07:50:29 -07:00
# Don't warn if not using fuzzywuzzy[speedup]
with warnings.catch_warnings():
warnings.simplefilter('ignore')
from fuzzywuzzy import process
BLOCK_SIZE = 16
2019-02-23 06:57:37 -08:00
class TwistMoeEpisode(AnimeEpisode, sitename='twist.moe'):
2018-07-17 07:50:29 -07:00
def _get_sources(self):
2021-01-17 02:59:48 -08:00
self.headers["Referer"] = self._parent.url + str(self.ep_no)
2018-07-17 07:50:29 -07:00
return [('no_extractor', self.url)]
2019-02-23 06:57:37 -08:00
class TwistMoe(Anime, sitename='twist.moe'):
2019-07-12 10:41:36 -07:00
"""
Twist.moe
"""
2019-05-08 09:49:12 -07:00
sitename = 'twist.moe'
2018-07-17 07:50:29 -07:00
QUALITIES = ['360p', '480p', '720p', '1080p']
_api_url = "https://twist.moe/api/anime/{}/sources"
@classmethod
def search(self, query):
2019-07-12 04:53:54 -07:00
headers = {
2021-05-23 13:50:55 -07:00
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/605.1.15 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/605.1.15',
'x-access-token': '0df14814b9e590a1f26d3071a4ed7974'
2019-07-12 04:53:54 -07:00
}
2020-04-03 02:06:37 -07:00
# soup = helpers.soupify(helpers.get('https://twist.moe/', allow_redirects=True, headers=headers))
req = helpers.get('https://twist.moe/api/anime', headers=headers)
if 'being redirected' in req.text:
2020-03-23 04:08:04 -07:00
logger.debug('Tring to extract cookie')
2020-04-03 02:06:37 -07:00
cookie = get_cookie(req)
2020-03-23 04:08:04 -07:00
logger.debug('Got cookie: ' + cookie)
headers['cookie'] = cookie
# XXX: Can't use helpers.get here becuse that one is cached. Investigate
2020-04-03 02:06:37 -07:00
req = helpers.get('https://twist.moe/api/anime', headers=headers)
all_anime = req.json()
2018-07-17 07:50:29 -07:00
animes = []
for anime in all_anime:
animes.append(SearchResult(
2020-03-18 03:47:42 -07:00
title=anime['title'],
url='https://twist.moe/a/' + anime['slug']['slug'] + '/',
2018-07-17 07:50:29 -07:00
))
animes = [ani[0] for ani in process.extract(query, animes)]
return animes
def get_data(self):
anime_name = self.url.split('/a/')[-1].split('/')[0]
url = self._api_url.format(anime_name)
episodes = helpers.get(
2018-07-17 07:50:29 -07:00
url,
headers={
2021-01-16 13:04:10 -08:00
'x-access-token': '0df14814b9e590a1f26d3071a4ed7974'
2018-10-10 01:23:21 -07:00
}
2018-07-17 07:50:29 -07:00
)
episodes = episodes.json()
2019-07-12 05:04:43 -07:00
logging.debug(episodes)
2018-07-17 07:50:29 -07:00
self.title = anime_name
2021-01-29 04:33:43 -08:00
episode_urls = ['https://cdn.twist.moe' +
2021-01-17 08:19:46 -08:00
decrypt(episode['source'].encode('utf-8')).decode('utf-8')
2018-07-17 07:50:29 -07:00
for episode in episodes]
self._episode_urls = [(i + 1, episode_url)
for i, episode_url in enumerate(episode_urls)]
2018-07-17 07:50:29 -07:00
self._len = len(self._episode_urls)
return self._episode_urls
# From stackoverflow https://stackoverflow.com/questions/36762098/how-to-decrypt-password-from-javascript-cryptojs-aes-encryptpassword-passphras
def pad(data):
length = BLOCK_SIZE - (len(data) % BLOCK_SIZE)
return data + (chr(length) * length).encode()
2018-07-17 07:50:29 -07:00
def unpad(data):
return data[:-(data[-1] if type(data[-1]) == int else ord(data[-1]))]
2021-01-17 08:19:46 -08:00
def bytes_to_key(salt, output=48):
2018-07-17 07:50:29 -07:00
# extended from https://gist.github.com/gsakkis/4546068
assert len(salt) == 8, len(salt)
2021-01-17 08:11:02 -08:00
data = b"267041df55ca2b36f2e322d05ee2c9cf"
2018-07-17 07:50:29 -07:00
data += salt
key = md5(data).digest()
final_key = key
while len(final_key) < output:
key = md5(key + data).digest()
final_key += key
return final_key[:output]
2021-01-17 08:19:46 -08:00
def decrypt(encrypted):
2018-07-17 07:50:29 -07:00
encrypted = base64.b64decode(encrypted)
assert encrypted[0:8] == b"Salted__"
salt = encrypted[8:16]
2021-01-17 08:19:46 -08:00
key_iv = bytes_to_key(salt, 32 + 16)
2018-07-17 07:50:29 -07:00
key = key_iv[:32]
iv = key_iv[32:]
aes = AES.new(key, AES.MODE_CBC, iv)
return unpad(aes.decrypt(encrypted[16:]))
def get_cookie(soup):
js = soup.select_one('script').text
2020-08-27 05:02:22 -07:00
js = "location = {'reload': ()=>true};document = {}; \n" + js + f"console.log(document.cookie)"
cookie = eval_in_node(js).strip()
return cookie