2015-05-14 00:41:45 -07:00
|
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
|
|
2017-12-10 06:43:07 -08:00
|
|
|
|
"""scdl allows you to download music from Soundcloud
|
2014-10-20 13:23:46 -07:00
|
|
|
|
|
2014-10-14 10:43:30 -07:00
|
|
|
|
Usage:
|
2017-08-15 06:20:42 -07:00
|
|
|
|
scdl -l <track_url> [-a | -f | -C | -t | -p][-c][-o <offset>]\
|
2017-12-26 03:05:15 -08:00
|
|
|
|
[--hidewarnings][--debug | --error][--path <path>][--addtofile][--addtimestamp]
|
|
|
|
|
[--onlymp3][--hide-progress][--min-size <size>][--max-size <size>][--remove]
|
2018-04-10 04:10:08 -07:00
|
|
|
|
[--no-playlist-folder][--download-archive <file>][--extract-artist][--flac]
|
2016-09-27 08:42:13 -07:00
|
|
|
|
scdl me (-s | -a | -f | -t | -p | -m)[-c][-o <offset>]\
|
2017-12-26 03:05:15 -08:00
|
|
|
|
[--hidewarnings][--debug | --error][--path <path>][--addtofile][--addtimestamp]
|
|
|
|
|
[--onlymp3][--hide-progress][--min-size <size>][--max-size <size>][--remove]
|
2018-04-10 04:10:08 -07:00
|
|
|
|
[--no-playlist-folder][--download-archive <file>][--extract-artist][--flac]
|
2015-01-14 08:55:14 -08:00
|
|
|
|
scdl -h | --help
|
|
|
|
|
scdl --version
|
2014-10-14 10:43:30 -07:00
|
|
|
|
|
2014-10-20 13:23:46 -07:00
|
|
|
|
|
2014-10-14 10:43:30 -07:00
|
|
|
|
Options:
|
2017-12-26 03:05:15 -08:00
|
|
|
|
-h --help Show this screen
|
|
|
|
|
--version Show version
|
|
|
|
|
me Use the user profile from the auth_token
|
|
|
|
|
-l [url] URL can be track/playlist/user
|
|
|
|
|
-s Download the stream of a user (token needed)
|
|
|
|
|
-a Download all tracks of user (including reposts)
|
|
|
|
|
-t Download all uploads of a user (no reposts)
|
|
|
|
|
-f Download all favorites of a user
|
|
|
|
|
-C Download all commented by a user
|
|
|
|
|
-p Download all playlists of a user
|
|
|
|
|
-m Download all liked and owned playlists of user
|
|
|
|
|
-c Continue if a downloaded file already exists
|
|
|
|
|
-o [offset] Begin with a custom offset
|
|
|
|
|
--addtimestamp Add track creation timestamp to filename,
|
|
|
|
|
which allows for chronological sorting
|
|
|
|
|
--addtofile Add artist to filename if missing
|
|
|
|
|
--debug Set log level to DEBUG
|
|
|
|
|
--download-archive [file] Keep track of track IDs in an archive file,
|
|
|
|
|
and skip already-downloaded files
|
|
|
|
|
--error Set log level to ERROR
|
2018-01-08 05:54:04 -08:00
|
|
|
|
--extract-artist Set artist tag from title instead of username
|
2017-12-26 03:05:15 -08:00
|
|
|
|
--hide-progress Hide the wget progress bar
|
|
|
|
|
--hidewarnings Hide Warnings. (use with precaution)
|
|
|
|
|
--max-size [max-size] Skip tracks larger than size (k/m/g)
|
|
|
|
|
--min-size [min-size] Skip tracks smaller than size (k/m/g)
|
|
|
|
|
--no-playlist-folder Download playlist tracks into main directory,
|
|
|
|
|
instead of making a playlist subfolder
|
|
|
|
|
--onlymp3 Download only the streamable mp3 file,
|
|
|
|
|
even if track has a Downloadable file
|
|
|
|
|
--path [path] Use a custom path for downloaded files
|
|
|
|
|
--remove Remove any files not downloaded from execution
|
2018-04-10 04:10:08 -07:00
|
|
|
|
--flac Convert downloaded .wav files to .flac
|
2014-10-14 10:43:30 -07:00
|
|
|
|
"""
|
2015-05-09 04:01:49 -07:00
|
|
|
|
|
|
|
|
|
import logging
|
2014-10-20 13:23:46 -07:00
|
|
|
|
import os
|
|
|
|
|
import signal
|
|
|
|
|
import sys
|
2014-10-23 08:12:24 -07:00
|
|
|
|
import time
|
2015-05-09 04:10:15 -07:00
|
|
|
|
import warnings
|
2015-08-26 14:21:15 -07:00
|
|
|
|
import math
|
2016-02-07 17:12:43 -08:00
|
|
|
|
import shutil
|
|
|
|
|
import requests
|
2016-02-07 17:35:51 -08:00
|
|
|
|
import re
|
2016-03-01 14:12:34 -08:00
|
|
|
|
import tempfile
|
2016-04-24 07:31:45 -07:00
|
|
|
|
import codecs
|
2018-04-10 04:10:08 -07:00
|
|
|
|
import shlex
|
2015-01-19 13:11:55 -08:00
|
|
|
|
|
2015-05-09 04:10:15 -07:00
|
|
|
|
import configparser
|
|
|
|
|
import mutagen
|
|
|
|
|
from docopt import docopt
|
2016-02-07 17:12:43 -08:00
|
|
|
|
from clint.textui import progress
|
2014-11-12 08:00:27 -08:00
|
|
|
|
|
2016-08-27 07:33:16 -07:00
|
|
|
|
from scdl import __version__, CLIENT_ID, ALT_CLIENT_ID
|
2016-04-17 06:00:53 -07:00
|
|
|
|
from scdl import client, utils
|
2015-05-09 04:01:49 -07:00
|
|
|
|
|
2017-08-30 16:19:37 -07:00
|
|
|
|
from datetime import datetime
|
2016-08-27 07:33:16 -07:00
|
|
|
|
|
2015-06-28 13:24:38 -07:00
|
|
|
|
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
2016-01-31 05:29:57 -08:00
|
|
|
|
logging.getLogger('requests').setLevel(logging.WARNING)
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
logger.setLevel(logging.INFO)
|
2015-05-14 00:36:19 -07:00
|
|
|
|
logger.addFilter(utils.ColorizeFilter())
|
2015-05-09 04:01:49 -07:00
|
|
|
|
|
2015-01-14 08:55:14 -08:00
|
|
|
|
arguments = None
|
2014-10-20 13:23:46 -07:00
|
|
|
|
token = ''
|
2014-12-02 17:16:04 -08:00
|
|
|
|
path = ''
|
2017-09-19 06:02:59 -07:00
|
|
|
|
offset = 1
|
2015-01-19 13:11:55 -08:00
|
|
|
|
|
2016-02-29 09:08:41 -08:00
|
|
|
|
url = {
|
2016-09-27 08:42:13 -07:00
|
|
|
|
'playlists-liked': ('https://api-v2.soundcloud.com/users/{0}/playlists'
|
2017-09-19 05:37:26 -07:00
|
|
|
|
'/liked_and_owned?limit=200'),
|
2016-02-29 09:08:41 -08:00
|
|
|
|
'favorites': ('https://api.soundcloud.com/users/{0}/favorites?'
|
2017-09-19 05:37:26 -07:00
|
|
|
|
'limit=200'),
|
2017-08-15 06:20:42 -07:00
|
|
|
|
'commented': ('https://api.soundcloud.com/users/{0}/comments'),
|
2016-02-29 09:08:41 -08:00
|
|
|
|
'tracks': ('https://api.soundcloud.com/users/{0}/tracks?'
|
2017-09-19 05:37:26 -07:00
|
|
|
|
'limit=200'),
|
2016-02-29 09:08:41 -08:00
|
|
|
|
'all': ('https://api-v2.soundcloud.com/profile/soundcloud:users:{0}?'
|
2017-09-19 05:37:26 -07:00
|
|
|
|
'limit=200'),
|
2016-02-29 09:08:41 -08:00
|
|
|
|
'playlists': ('https://api.soundcloud.com/users/{0}/playlists?'
|
2017-09-19 05:37:26 -07:00
|
|
|
|
'limit=200'),
|
2016-03-01 14:58:59 -08:00
|
|
|
|
'resolve': ('https://api.soundcloud.com/resolve?url={0}'),
|
2017-08-15 06:20:42 -07:00
|
|
|
|
'trackinfo': ('https://api.soundcloud.com/tracks/{0}'),
|
2016-04-17 06:00:53 -07:00
|
|
|
|
'user': ('https://api.soundcloud.com/users/{0}'),
|
2016-03-01 14:58:59 -08:00
|
|
|
|
'me': ('https://api.soundcloud.com/me?oauth_token={0}')
|
2016-02-29 09:08:41 -08:00
|
|
|
|
}
|
2016-04-17 06:00:53 -07:00
|
|
|
|
client = client.Client()
|
2014-10-22 10:29:56 -07:00
|
|
|
|
|
2017-10-27 14:47:25 -07:00
|
|
|
|
fileToKeep = []
|
2014-10-12 15:16:18 -07:00
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
|
2014-10-14 10:43:30 -07:00
|
|
|
|
def main():
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Main function, parses the URL from command line arguments
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
|
|
|
global offset
|
2015-01-14 08:55:14 -08:00
|
|
|
|
global arguments
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
|
|
|
|
# Parse argument
|
2015-01-19 11:23:46 -08:00
|
|
|
|
arguments = docopt(__doc__, version=__version__)
|
2015-01-14 08:55:14 -08:00
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['--debug']:
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.level = logging.DEBUG
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['--error']:
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.level = logging.ERROR
|
2015-01-14 08:55:14 -08:00
|
|
|
|
|
2017-01-27 08:27:53 -08:00
|
|
|
|
# import conf file
|
|
|
|
|
get_config()
|
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
logger.info('Soundcloud Downloader')
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.debug(arguments)
|
2014-11-26 10:45:28 -08:00
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['-o'] is not None:
|
2014-11-16 09:19:42 -08:00
|
|
|
|
try:
|
2017-09-19 06:02:59 -07:00
|
|
|
|
offset = int(arguments['-o'])
|
|
|
|
|
if offset < 0:
|
|
|
|
|
raise
|
2014-11-16 09:19:42 -08:00
|
|
|
|
except:
|
2017-09-19 06:02:59 -07:00
|
|
|
|
logger.error('Offset should be a positive integer...')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
sys.exit()
|
2015-08-24 18:19:28 -07:00
|
|
|
|
logger.debug('offset: %d', offset)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2016-07-20 04:01:22 -07:00
|
|
|
|
if arguments['--min-size'] is not None:
|
|
|
|
|
try:
|
|
|
|
|
arguments['--min-size'] = utils.size_in_bytes(
|
|
|
|
|
arguments['--min-size']
|
|
|
|
|
)
|
|
|
|
|
except:
|
|
|
|
|
logger.exception(
|
|
|
|
|
'Min size should be an integer with a possible unit suffix'
|
|
|
|
|
)
|
|
|
|
|
sys.exit()
|
|
|
|
|
logger.debug('min-size: %d', arguments['--min-size'])
|
|
|
|
|
|
|
|
|
|
if arguments['--max-size'] is not None:
|
|
|
|
|
try:
|
|
|
|
|
arguments['--max-size'] = utils.size_in_bytes(
|
|
|
|
|
arguments['--max-size']
|
|
|
|
|
)
|
|
|
|
|
except:
|
|
|
|
|
logger.error(
|
|
|
|
|
'Max size should be an integer with a possible unit suffix'
|
|
|
|
|
)
|
|
|
|
|
sys.exit()
|
|
|
|
|
logger.debug('max-size: %d', arguments['--max-size'])
|
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['--hidewarnings']:
|
|
|
|
|
warnings.filterwarnings('ignore')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['--path'] is not None:
|
|
|
|
|
if os.path.exists(arguments['--path']):
|
|
|
|
|
os.chdir(arguments['--path'])
|
2014-12-02 17:16:04 -08:00
|
|
|
|
else:
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.error('Invalid path in arguments...')
|
2014-12-07 15:15:04 -08:00
|
|
|
|
sys.exit()
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.debug('Downloading to '+os.getcwd()+'...')
|
2014-12-07 15:15:04 -08:00
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['-l']:
|
|
|
|
|
parse_url(arguments['-l'])
|
|
|
|
|
elif arguments['me']:
|
2016-02-29 09:08:41 -08:00
|
|
|
|
if arguments['-f']:
|
|
|
|
|
download(who_am_i(), 'favorites', 'likes')
|
2017-08-15 06:20:42 -07:00
|
|
|
|
if arguments['-C']:
|
|
|
|
|
download(who_am_i(), 'commented', 'commented tracks')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['-t']:
|
2016-02-29 09:08:41 -08:00
|
|
|
|
download(who_am_i(), 'tracks', 'uploaded tracks')
|
|
|
|
|
elif arguments['-a']:
|
|
|
|
|
download(who_am_i(), 'all', 'tracks and reposts')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['-p']:
|
2016-02-29 09:08:41 -08:00
|
|
|
|
download(who_am_i(), 'playlists', 'playlists')
|
2016-09-27 08:42:13 -07:00
|
|
|
|
elif arguments['-m']:
|
|
|
|
|
download(who_am_i(), 'playlists-liked', 'my and liked playlists')
|
2014-10-22 10:29:56 -07:00
|
|
|
|
|
2017-10-27 14:47:25 -07:00
|
|
|
|
if arguments['--remove']:
|
2017-12-10 06:43:07 -08:00
|
|
|
|
remove_files()
|
2017-10-27 14:47:25 -07:00
|
|
|
|
|
2014-10-14 10:43:30 -07:00
|
|
|
|
|
2014-10-20 13:23:46 -07:00
|
|
|
|
def get_config():
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Reads the music download filepath from scdl.cfg
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
|
|
|
|
global token
|
|
|
|
|
config = configparser.ConfigParser()
|
|
|
|
|
config.read(os.path.join(os.path.expanduser('~'), '.config/scdl/scdl.cfg'))
|
|
|
|
|
try:
|
|
|
|
|
token = config['scdl']['auth_token']
|
|
|
|
|
path = config['scdl']['path']
|
|
|
|
|
except:
|
2017-12-26 03:05:15 -08:00
|
|
|
|
logger.error('Are you sure scdl.cfg is in $HOME/.config/scdl/ ?')
|
|
|
|
|
logger.error('Are both "auth_token" and "path" defined there?')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
sys.exit()
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
|
os.chdir(path)
|
|
|
|
|
else:
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.error('Invalid path in scdl.cfg...')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
sys.exit()
|
|
|
|
|
|
2014-10-20 13:23:46 -07:00
|
|
|
|
|
2016-08-27 07:33:16 -07:00
|
|
|
|
def get_item(track_url, client_id=CLIENT_ID):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Fetches metadata for a track or playlist
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
|
|
|
|
try:
|
2016-03-01 14:58:59 -08:00
|
|
|
|
item_url = url['resolve'].format(track_url)
|
2016-04-17 05:32:04 -07:00
|
|
|
|
|
2016-09-05 14:02:22 -07:00
|
|
|
|
r = requests.get(item_url, params={'client_id': client_id})
|
|
|
|
|
logger.debug(r.url)
|
2016-04-17 05:32:04 -07:00
|
|
|
|
if r.status_code == 403:
|
2016-08-27 07:33:16 -07:00
|
|
|
|
return get_item(track_url, ALT_CLIENT_ID)
|
2016-04-17 05:32:04 -07:00
|
|
|
|
|
2016-03-01 14:58:59 -08:00
|
|
|
|
item = r.json()
|
2016-04-17 05:32:04 -07:00
|
|
|
|
no_tracks = item['kind'] == 'playlist' and not item['tracks']
|
2016-08-27 07:33:16 -07:00
|
|
|
|
if no_tracks and client_id != ALT_CLIENT_ID:
|
|
|
|
|
return get_item(track_url, ALT_CLIENT_ID)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
except Exception:
|
2016-08-27 07:33:16 -07:00
|
|
|
|
if client_id == ALT_CLIENT_ID:
|
2017-12-10 06:43:07 -08:00
|
|
|
|
logger.error('Failed to get item...')
|
2016-04-23 09:16:53 -07:00
|
|
|
|
return
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.error('Error resolving url, retrying...')
|
2014-11-26 10:56:53 -08:00
|
|
|
|
time.sleep(5)
|
|
|
|
|
try:
|
2016-08-27 07:33:16 -07:00
|
|
|
|
return get_item(track_url, ALT_CLIENT_ID)
|
2014-11-26 10:56:53 -08:00
|
|
|
|
except Exception as e:
|
2015-05-09 15:13:11 -07:00
|
|
|
|
logger.error('Could not resolve url {0}'.format(track_url))
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.exception(e)
|
2014-11-26 10:56:53 -08:00
|
|
|
|
sys.exit(0)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
return item
|
2014-10-23 08:22:58 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_url(track_url):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Detects if a URL is a track or a playlist, and parses the track(s)
|
2016-02-08 05:32:59 -08:00
|
|
|
|
to the track downloader
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2015-01-14 08:55:14 -08:00
|
|
|
|
global arguments
|
2014-11-16 09:19:42 -08:00
|
|
|
|
item = get_item(track_url)
|
2016-03-01 14:58:59 -08:00
|
|
|
|
logger.debug(item)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
if not item:
|
|
|
|
|
return
|
2016-03-01 14:58:59 -08:00
|
|
|
|
elif item['kind'] == 'track':
|
2015-05-09 15:13:11 -07:00
|
|
|
|
logger.info('Found a track')
|
2016-03-01 14:58:59 -08:00
|
|
|
|
download_track(item)
|
|
|
|
|
elif item['kind'] == 'playlist':
|
2015-05-09 15:13:11 -07:00
|
|
|
|
logger.info('Found a playlist')
|
2016-03-01 14:58:59 -08:00
|
|
|
|
download_playlist(item)
|
|
|
|
|
elif item['kind'] == 'user':
|
2016-01-10 04:49:09 -08:00
|
|
|
|
logger.info('Found a user profile')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if arguments['-f']:
|
2016-03-01 13:27:47 -08:00
|
|
|
|
download(item, 'favorites', 'likes')
|
2017-08-15 06:20:42 -07:00
|
|
|
|
elif arguments['-C']:
|
|
|
|
|
download(item, 'commented', 'commented tracks')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['-t']:
|
2016-03-01 13:27:47 -08:00
|
|
|
|
download(item, 'tracks', 'uploaded tracks')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['-a']:
|
2016-03-01 13:27:47 -08:00
|
|
|
|
download(item, 'all', 'tracks and reposts')
|
2015-05-09 15:13:11 -07:00
|
|
|
|
elif arguments['-p']:
|
2016-03-01 13:27:47 -08:00
|
|
|
|
download(item, 'playlists', 'playlists')
|
2016-09-27 08:42:13 -07:00
|
|
|
|
elif arguments['-m']:
|
|
|
|
|
download(item, 'playlists-liked', 'my and liked playlists')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
else:
|
2015-05-09 04:01:49 -07:00
|
|
|
|
logger.error('Please provide a download type...')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
else:
|
2017-12-10 06:43:07 -08:00
|
|
|
|
logger.error('Unknown item type {0}'.format(item['kind']))
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2014-10-23 08:22:58 -07:00
|
|
|
|
|
2014-10-23 07:14:29 -07:00
|
|
|
|
def who_am_i():
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Display username from current token and check for validity
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2016-03-01 14:58:59 -08:00
|
|
|
|
me = url['me'].format(token)
|
2016-09-05 14:02:22 -07:00
|
|
|
|
r = requests.get(me, params={'client_id': CLIENT_ID})
|
2016-09-27 11:06:15 -07:00
|
|
|
|
r.raise_for_status()
|
2016-03-01 14:58:59 -08:00
|
|
|
|
current_user = r.json()
|
|
|
|
|
logger.debug(me)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2016-03-01 14:58:59 -08:00
|
|
|
|
logger.info('Hello {0}!'.format(current_user['username']))
|
2014-11-16 09:19:42 -08:00
|
|
|
|
return current_user
|
|
|
|
|
|
2017-10-27 14:47:25 -07:00
|
|
|
|
|
2017-12-10 06:43:07 -08:00
|
|
|
|
def remove_files():
|
2017-10-27 14:47:25 -07:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Removes any pre-existing tracks that were not just downloaded
|
2017-10-27 14:47:25 -07:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
logger.info("Removing local track files that were not downloaded...")
|
2017-10-27 14:47:25 -07:00
|
|
|
|
files = [f for f in os.listdir('.') if os.path.isfile(f)]
|
|
|
|
|
for f in files:
|
2017-12-26 03:05:15 -08:00
|
|
|
|
if f not in fileToKeep:
|
2017-10-27 14:47:25 -07:00
|
|
|
|
os.remove(f)
|
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
|
2017-12-10 06:43:07 -08:00
|
|
|
|
def get_track_info(track_id):
|
2017-08-15 06:20:42 -07:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Fetches track info from Soundcloud, given a track_id
|
2017-08-15 06:20:42 -07:00
|
|
|
|
"""
|
|
|
|
|
logger.info('Retrieving more info on the track')
|
2017-12-10 06:43:07 -08:00
|
|
|
|
info_url = url["trackinfo"].format(track_id)
|
2017-08-15 06:20:42 -07:00
|
|
|
|
r = requests.get(info_url, params={'client_id': CLIENT_ID}, stream=True)
|
|
|
|
|
item = r.json()
|
|
|
|
|
logger.debug(item)
|
|
|
|
|
return item
|
2014-10-23 07:14:29 -07:00
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
|
2016-02-29 09:08:41 -08:00
|
|
|
|
def download(user, dl_type, name):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Download user items of dl_type (ie. all, playlists, liked, commented, etc.)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2016-02-13 09:14:30 -08:00
|
|
|
|
username = user['username']
|
2016-02-29 09:08:41 -08:00
|
|
|
|
user_id = user['id']
|
2016-02-08 05:32:59 -08:00
|
|
|
|
logger.info(
|
2016-09-27 08:42:13 -07:00
|
|
|
|
'Retrieving all {0} of user {1}...'.format(name, username)
|
2016-01-31 05:15:27 -08:00
|
|
|
|
)
|
2017-09-19 05:37:26 -07:00
|
|
|
|
dl_url = url[dl_type].format(user_id)
|
2016-09-27 08:42:13 -07:00
|
|
|
|
logger.debug(dl_url)
|
2017-12-10 06:43:07 -08:00
|
|
|
|
resources = client.get_collection(dl_url, token)
|
|
|
|
|
del resources[:offset - 1]
|
|
|
|
|
logger.debug(resources)
|
|
|
|
|
total = len(resources)
|
2016-02-13 09:14:30 -08:00
|
|
|
|
logger.info('Retrieved {0} {1}'.format(total, name))
|
2017-12-10 06:43:07 -08:00
|
|
|
|
for counter, item in enumerate(resources, offset):
|
2015-08-24 18:19:28 -07:00
|
|
|
|
try:
|
2016-02-13 09:14:30 -08:00
|
|
|
|
logger.debug(item)
|
2016-02-08 05:32:59 -08:00
|
|
|
|
logger.info('{0} n°{1} of {2}'.format(
|
2017-09-19 06:02:59 -07:00
|
|
|
|
name.capitalize(), counter, total)
|
2016-02-08 05:32:59 -08:00
|
|
|
|
)
|
2016-09-27 08:42:13 -07:00
|
|
|
|
if dl_type == 'all':
|
|
|
|
|
item_name = item['type'].split('-')[0] # remove the '-repost'
|
2016-04-10 07:50:45 -07:00
|
|
|
|
uri = item[item_name]['uri']
|
2016-02-13 09:14:30 -08:00
|
|
|
|
parse_url(uri)
|
2016-09-27 08:42:13 -07:00
|
|
|
|
elif dl_type == 'playlists':
|
2016-02-13 09:14:30 -08:00
|
|
|
|
download_playlist(item)
|
2016-09-27 08:42:13 -07:00
|
|
|
|
elif dl_type == 'playlists-liked':
|
|
|
|
|
parse_url(item['playlist']['uri'])
|
2017-08-15 06:20:42 -07:00
|
|
|
|
elif dl_type == 'commented':
|
2017-12-26 03:05:15 -08:00
|
|
|
|
item = get_track_info(item['track_id'])
|
2017-08-15 06:20:42 -07:00
|
|
|
|
download_track(item)
|
2016-02-13 09:14:30 -08:00
|
|
|
|
else:
|
|
|
|
|
download_track(item)
|
2015-05-09 15:51:22 -07:00
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.exception(e)
|
2016-02-13 09:14:30 -08:00
|
|
|
|
logger.info('Downloaded all {0} {1} of user {2}!'.format(
|
|
|
|
|
total, name, username)
|
2016-02-08 05:32:59 -08:00
|
|
|
|
)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2014-10-12 15:16:18 -07:00
|
|
|
|
|
2014-10-23 07:14:29 -07:00
|
|
|
|
def download_playlist(playlist):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Downloads a playlist
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-22 00:55:19 -08:00
|
|
|
|
global arguments
|
2015-01-19 13:11:55 -08:00
|
|
|
|
invalid_chars = '\/:*?|<>"'
|
2016-04-23 08:25:19 -07:00
|
|
|
|
playlist_name = playlist['title'].encode('utf-8', 'ignore')
|
2016-04-24 07:31:45 -07:00
|
|
|
|
playlist_name = playlist_name.decode('utf8')
|
2015-01-19 13:11:55 -08:00
|
|
|
|
playlist_name = ''.join(c for c in playlist_name if c not in invalid_chars)
|
|
|
|
|
|
2017-12-22 00:55:19 -08:00
|
|
|
|
if not arguments['--no-playlist-folder']:
|
|
|
|
|
if not os.path.exists(playlist_name):
|
|
|
|
|
os.makedirs(playlist_name)
|
|
|
|
|
os.chdir(playlist_name)
|
2015-01-19 13:11:55 -08:00
|
|
|
|
|
2017-03-08 02:05:23 -08:00
|
|
|
|
try:
|
|
|
|
|
with codecs.open(playlist_name + '.m3u', 'w+', 'utf8') as playlist_file:
|
|
|
|
|
playlist_file.write('#EXTM3U' + os.linesep)
|
2017-09-19 06:02:59 -07:00
|
|
|
|
del playlist['tracks'][:offset - 1]
|
|
|
|
|
for counter, track_raw in enumerate(playlist['tracks'], offset):
|
2017-03-08 02:05:23 -08:00
|
|
|
|
logger.debug(track_raw)
|
|
|
|
|
logger.info('Track n°{0}'.format(counter))
|
|
|
|
|
download_track(track_raw, playlist['title'], playlist_file)
|
|
|
|
|
finally:
|
|
|
|
|
os.chdir('..')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2014-10-23 07:14:29 -07:00
|
|
|
|
|
2016-02-13 09:14:30 -08:00
|
|
|
|
def download_my_stream():
|
2015-01-05 14:22:14 -08:00
|
|
|
|
"""
|
2016-02-13 09:14:30 -08:00
|
|
|
|
DONT WORK FOR NOW
|
|
|
|
|
Download the stream of the current user
|
|
|
|
|
"""
|
2016-04-17 06:00:53 -07:00
|
|
|
|
# TODO
|
|
|
|
|
# Use Token
|
2016-02-13 09:14:30 -08:00
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
|
2017-08-30 16:19:37 -07:00
|
|
|
|
def try_utime(path, filetime):
|
|
|
|
|
try:
|
|
|
|
|
os.utime(path, (time.time(), filetime))
|
|
|
|
|
except:
|
|
|
|
|
logger.warn("Cannot update utime of file")
|
|
|
|
|
|
|
|
|
|
|
2018-01-24 05:01:15 -08:00
|
|
|
|
def get_filename(track, original_filename=None):
|
2017-11-21 08:38:14 -08:00
|
|
|
|
invalid_chars = '\/:*?|<>"'
|
2017-01-27 08:01:31 -08:00
|
|
|
|
username = track['user']['username']
|
2018-01-24 05:01:15 -08:00
|
|
|
|
title = track['title'].encode('utf-8', 'ignore').decode('utf8')
|
2017-12-24 02:35:27 -08:00
|
|
|
|
|
2017-12-12 03:08:05 -08:00
|
|
|
|
if arguments['--addtofile']:
|
2017-12-15 02:41:57 -08:00
|
|
|
|
if username not in title and '-' not in title:
|
2017-12-12 03:08:05 -08:00
|
|
|
|
title = '{0} - {1}'.format(username, title)
|
2017-12-24 02:35:27 -08:00
|
|
|
|
logger.debug('Adding "{0}" to filename'.format(username))
|
2017-10-13 07:52:41 -07:00
|
|
|
|
|
|
|
|
|
if arguments['--addtimestamp']:
|
|
|
|
|
# created_at sample: 2017/03/03 09:29:33 +0000
|
|
|
|
|
ts = datetime\
|
|
|
|
|
.strptime(track['created_at'], "%Y/%m/%d %H:%M:%S %z")\
|
|
|
|
|
.timestamp()
|
|
|
|
|
|
|
|
|
|
title = str(int(ts)) + "_" + title
|
|
|
|
|
|
2018-01-24 05:01:15 -08:00
|
|
|
|
ext = ".mp3"
|
|
|
|
|
if original_filename is not None:
|
|
|
|
|
original_filename.encode('utf-8', 'ignore').decode('utf8')
|
|
|
|
|
ext = os.path.splitext(original_filename)[1]
|
|
|
|
|
filename = title[:251] + ext.lower()
|
2017-11-21 08:38:14 -08:00
|
|
|
|
filename = ''.join(c for c in filename if c not in invalid_chars)
|
|
|
|
|
return filename
|
2017-01-27 08:01:31 -08:00
|
|
|
|
|
2015-01-05 14:22:14 -08:00
|
|
|
|
|
2015-08-26 14:21:15 -07:00
|
|
|
|
def download_track(track, playlist_name=None, playlist_file=None):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
|
|
|
|
Downloads a track
|
|
|
|
|
"""
|
2015-01-14 08:55:14 -08:00
|
|
|
|
global arguments
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2016-03-01 14:58:59 -08:00
|
|
|
|
title = track['title']
|
2016-04-24 07:31:45 -07:00
|
|
|
|
title = title.encode('utf-8', 'ignore').decode('utf8')
|
2017-12-24 02:35:27 -08:00
|
|
|
|
logger.info('Downloading {0}'.format(title))
|
|
|
|
|
|
|
|
|
|
# Not streamable
|
|
|
|
|
if not track['streamable']:
|
2016-02-07 17:35:51 -08:00
|
|
|
|
logger.error('{0} is not streamable...'.format(title))
|
2014-11-16 09:19:42 -08:00
|
|
|
|
return
|
|
|
|
|
|
2016-09-05 14:02:22 -07:00
|
|
|
|
r = None
|
2017-12-24 02:35:27 -08:00
|
|
|
|
# Downloadable track
|
2016-02-07 16:04:16 -08:00
|
|
|
|
if track['downloadable'] and not arguments['--onlymp3']:
|
2016-04-11 12:17:17 -07:00
|
|
|
|
logger.info('Downloading the original file.')
|
2016-12-07 12:39:38 -08:00
|
|
|
|
original_url = track['download_url']
|
2017-12-26 03:05:15 -08:00
|
|
|
|
r = requests.get(
|
|
|
|
|
original_url, params={'client_id': CLIENT_ID}, stream=True
|
|
|
|
|
)
|
2016-12-07 12:39:38 -08:00
|
|
|
|
if r.status_code == 401:
|
|
|
|
|
logger.info('The original file has no download left.')
|
2018-01-24 05:01:15 -08:00
|
|
|
|
filename = get_filename(track)
|
2016-12-07 12:39:38 -08:00
|
|
|
|
else:
|
2018-01-24 05:01:15 -08:00
|
|
|
|
d = r.headers.get('content-disposition')
|
|
|
|
|
filename = re.findall("filename=(.+)", d)[0][1:-1]
|
|
|
|
|
filename = get_filename(track, filename)
|
|
|
|
|
|
2014-11-16 09:19:42 -08:00
|
|
|
|
else:
|
2018-01-24 05:01:15 -08:00
|
|
|
|
filename = get_filename(track)
|
2017-12-24 02:35:27 -08:00
|
|
|
|
logger.debug("filename : {0}".format(filename))
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
# Skip if file ID or filename already exists
|
2017-12-26 03:05:15 -08:00
|
|
|
|
if already_downloaded(track, title, filename):
|
|
|
|
|
return
|
2017-11-21 08:38:14 -08:00
|
|
|
|
|
2015-08-26 14:21:15 -07:00
|
|
|
|
# Add the track to the generated m3u playlist file
|
|
|
|
|
if playlist_file:
|
2016-02-07 16:04:16 -08:00
|
|
|
|
duration = math.floor(track['duration'] / 1000)
|
2016-02-08 05:32:59 -08:00
|
|
|
|
playlist_file.write(
|
|
|
|
|
'#EXTINF:{0},{1}{3}{2}{3}'.format(
|
|
|
|
|
duration, title, filename, os.linesep
|
|
|
|
|
)
|
|
|
|
|
)
|
2015-08-26 14:21:15 -07:00
|
|
|
|
|
2017-10-27 14:47:25 -07:00
|
|
|
|
if arguments['--remove']:
|
|
|
|
|
fileToKeep.append(filename)
|
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
# Streamable track download
|
|
|
|
|
if r is None or r.status_code == 401:
|
|
|
|
|
url = track['stream_url']
|
|
|
|
|
r = requests.get(url, params={'client_id': CLIENT_ID}, stream=True)
|
|
|
|
|
logger.debug(r.url)
|
|
|
|
|
if r.status_code == 401 or r.status_code == 429:
|
2017-12-26 03:05:15 -08:00
|
|
|
|
r = requests.get(
|
|
|
|
|
url, params={'client_id': ALT_CLIENT_ID}, stream=True
|
|
|
|
|
)
|
2016-12-07 13:25:22 -08:00
|
|
|
|
logger.debug(r.url)
|
2017-12-24 02:35:27 -08:00
|
|
|
|
r.raise_for_status()
|
|
|
|
|
temp = tempfile.NamedTemporaryFile(delete=False)
|
2016-07-20 04:01:22 -07:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
total_length = int(r.headers.get('content-length'))
|
2016-07-20 04:01:22 -07:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
min_size = arguments.get('--min-size')
|
|
|
|
|
max_size = arguments.get('--max-size')
|
2016-07-20 04:01:22 -07:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
if min_size is not None and total_length < min_size:
|
|
|
|
|
logging.info('{0} not large enough, skipping'.format(title))
|
|
|
|
|
return
|
2016-07-20 04:01:22 -07:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
if max_size is not None and total_length > max_size:
|
|
|
|
|
logging.info('{0} too large, skipping'.format(title))
|
|
|
|
|
return
|
2016-07-20 04:01:22 -07:00
|
|
|
|
|
2018-01-24 04:07:45 -08:00
|
|
|
|
received=0
|
2017-12-24 02:35:27 -08:00
|
|
|
|
with temp as f:
|
|
|
|
|
for chunk in progress.bar(
|
|
|
|
|
r.iter_content(chunk_size=1024),
|
|
|
|
|
expected_size=(total_length/1024) + 1,
|
|
|
|
|
hide=True if arguments["--hide-progress"] else False
|
|
|
|
|
):
|
|
|
|
|
if chunk:
|
2018-01-24 04:07:45 -08:00
|
|
|
|
received+=len(chunk)
|
2017-12-24 02:35:27 -08:00
|
|
|
|
f.write(chunk)
|
|
|
|
|
f.flush()
|
|
|
|
|
|
2018-01-24 04:07:45 -08:00
|
|
|
|
if received != total_length:
|
|
|
|
|
logger.error('connection closed prematurely, download incomplete')
|
|
|
|
|
sys.exit()
|
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
shutil.move(temp.name, os.path.join(os.getcwd(), filename))
|
2018-04-10 04:10:08 -07:00
|
|
|
|
if arguments['--flac'] and filename.endswith('.wav'):
|
2018-04-10 06:06:51 -07:00
|
|
|
|
logger.info('Converting from .wav to .flac...')
|
2018-04-10 04:10:08 -07:00
|
|
|
|
newfilename = filename[:-4] + ".flac"
|
|
|
|
|
new = shlex.quote(newfilename)
|
|
|
|
|
old = shlex.quote(filename)
|
|
|
|
|
logger.debug("ffmpeg -i {0} {1} -loglevel fatal".format(old, new))
|
|
|
|
|
os.system("ffmpeg -i {0} {1} -loglevel fatal".format(old, new))
|
2018-04-10 06:06:51 -07:00
|
|
|
|
os.remove(filename)
|
2018-04-10 04:10:08 -07:00
|
|
|
|
filename = newfilename
|
|
|
|
|
|
2018-04-10 06:06:51 -07:00
|
|
|
|
if not filename.endswith('.wav'):
|
2017-12-24 02:35:27 -08:00
|
|
|
|
try:
|
|
|
|
|
set_metadata(track, filename, playlist_name)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error('Error trying to set the tags...')
|
|
|
|
|
logger.debug(e)
|
|
|
|
|
else:
|
|
|
|
|
logger.error("This type of audio doesn't support tagging...")
|
2017-08-30 16:19:37 -07:00
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
# Try to change the real creation date
|
2017-12-24 02:35:27 -08:00
|
|
|
|
created_at = track['created_at']
|
2017-12-26 03:05:15 -08:00
|
|
|
|
timestamp = datetime.strptime(created_at, '%Y/%m/%d %H:%M:%S %z')
|
|
|
|
|
filetime = int(time.mktime(timestamp.timetuple()))
|
|
|
|
|
try_utime(filename, filetime)
|
2017-08-30 16:19:37 -07:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
logger.info('{0} Downloaded.\n'.format(filename))
|
|
|
|
|
record_download_archive(track)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def already_downloaded(track, title, filename=None):
|
|
|
|
|
"""
|
|
|
|
|
Returns True if the file has already been downloaded
|
|
|
|
|
"""
|
|
|
|
|
global arguments
|
|
|
|
|
already_downloaded = False
|
|
|
|
|
|
|
|
|
|
if filename and os.path.isfile(filename):
|
|
|
|
|
already_downloaded = True
|
|
|
|
|
if arguments['--download-archive'] and in_download_archive(track):
|
|
|
|
|
already_downloaded = True
|
|
|
|
|
|
|
|
|
|
if already_downloaded:
|
2017-10-27 14:47:25 -07:00
|
|
|
|
if arguments['-c'] or arguments['--remove']:
|
2017-12-24 02:35:27 -08:00
|
|
|
|
logger.info('Track "{0}" already downloaded.'.format(title))
|
|
|
|
|
return True
|
2014-11-16 09:19:42 -08:00
|
|
|
|
else:
|
2017-12-26 03:05:15 -08:00
|
|
|
|
logger.error('Track "{0}" already exists!'.format(title))
|
|
|
|
|
logger.error('Exiting... (run again with -c to continue)')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
sys.exit(0)
|
2017-12-24 02:35:27 -08:00
|
|
|
|
return False
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2017-12-24 02:35:27 -08:00
|
|
|
|
|
|
|
|
|
def in_download_archive(track):
|
|
|
|
|
"""
|
|
|
|
|
Returns True if a track_id exists in the download archive
|
|
|
|
|
"""
|
|
|
|
|
global arguments
|
2017-12-26 03:05:15 -08:00
|
|
|
|
if not arguments['--download-archive']:
|
|
|
|
|
return
|
2017-12-24 02:35:27 -08:00
|
|
|
|
|
|
|
|
|
archive_filename = arguments.get('--download-archive')
|
|
|
|
|
try:
|
|
|
|
|
with open(archive_filename, 'a+', encoding='utf-8') as file:
|
|
|
|
|
logger.debug('Contents of {0}:'.format(archive_filename))
|
|
|
|
|
file.seek(0)
|
|
|
|
|
track_id = '{0}'.format(track['id'])
|
|
|
|
|
for line in file:
|
|
|
|
|
logger.debug('"'+line.strip()+'"')
|
|
|
|
|
if line.strip() == track_id:
|
|
|
|
|
return True
|
|
|
|
|
except IOError as ioe:
|
|
|
|
|
logger.error('Error trying to read download archive...')
|
|
|
|
|
logger.debug(ioe)
|
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def record_download_archive(track):
|
|
|
|
|
"""
|
|
|
|
|
Write the track_id in the download archive
|
|
|
|
|
"""
|
|
|
|
|
global arguments
|
2017-12-26 03:05:15 -08:00
|
|
|
|
if not arguments['--download-archive']:
|
|
|
|
|
return
|
2017-12-24 02:35:27 -08:00
|
|
|
|
|
|
|
|
|
archive_filename = arguments.get('--download-archive')
|
|
|
|
|
try:
|
|
|
|
|
with open(archive_filename, 'a', encoding='utf-8') as file:
|
|
|
|
|
file.write('{0}'.format(track['id'])+'\n')
|
|
|
|
|
except IOError as ioe:
|
|
|
|
|
logger.error('Error trying to write to download archive...')
|
|
|
|
|
logger.debug(ioe)
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2014-10-20 13:23:46 -07:00
|
|
|
|
|
2017-12-10 06:43:07 -08:00
|
|
|
|
def set_metadata(track, filename, album=None):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Sets the mp3 file metadata using the Python module Mutagen
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
logger.info('Setting tags...')
|
2018-01-08 05:54:04 -08:00
|
|
|
|
global arguments
|
2016-02-07 16:04:16 -08:00
|
|
|
|
artwork_url = track['artwork_url']
|
2016-04-17 06:00:53 -07:00
|
|
|
|
user = track['user']
|
2016-02-08 05:32:59 -08:00
|
|
|
|
if not artwork_url:
|
2016-04-17 06:00:53 -07:00
|
|
|
|
artwork_url = user['avatar_url']
|
2014-11-16 09:19:42 -08:00
|
|
|
|
artwork_url = artwork_url.replace('large', 't500x500')
|
2016-02-07 17:12:43 -08:00
|
|
|
|
response = requests.get(artwork_url, stream=True)
|
2016-03-01 14:12:34 -08:00
|
|
|
|
with tempfile.NamedTemporaryFile() as out_file:
|
2016-02-07 17:12:43 -08:00
|
|
|
|
shutil.copyfileobj(response.raw, out_file)
|
2016-03-01 14:12:34 -08:00
|
|
|
|
out_file.seek(0)
|
|
|
|
|
|
2017-12-26 03:05:15 -08:00
|
|
|
|
track_created = track['created_at']
|
|
|
|
|
track_date = datetime.strptime(track_created, "%Y/%m/%d %H:%M:%S %z")
|
|
|
|
|
debug_extract_dates = '{0} {1}'.format(track_created, track_date)
|
|
|
|
|
logger.debug('Extracting date: {0}'.format(debug_extract_dates))
|
2018-04-10 06:06:51 -07:00
|
|
|
|
track['date'] = track_date.strftime("%Y-%m-%d %H::%M::%S")
|
2017-08-30 16:19:37 -07:00
|
|
|
|
|
2018-01-08 05:54:04 -08:00
|
|
|
|
track['artist'] = user['username']
|
|
|
|
|
if arguments['--extract-artist']:
|
2018-02-09 06:49:12 -08:00
|
|
|
|
for dash in [' - ', ' − ', ' – ', ' — ', ' ― ']:
|
|
|
|
|
if dash in track['title']:
|
|
|
|
|
artist_title = track['title'].split(dash)
|
|
|
|
|
track['artist'] = artist_title[0].strip()
|
|
|
|
|
track['title'] = artist_title[1].strip()
|
|
|
|
|
break
|
2018-01-08 05:54:04 -08:00
|
|
|
|
|
2018-04-10 06:06:51 -07:00
|
|
|
|
audio = mutagen.File(filename, easy=True)
|
|
|
|
|
audio['title'] = track['title']
|
|
|
|
|
audio['artist'] = track['artist']
|
|
|
|
|
if album: audio['album'] = album
|
|
|
|
|
if track['genre']: audio['genre'] = track['genre']
|
|
|
|
|
if track['permalink_url']: audio['website'] = track['permalink_url']
|
|
|
|
|
if track['date']: audio['date'] = track['date']
|
2018-01-02 07:02:37 -08:00
|
|
|
|
if track['description']:
|
2018-04-10 06:06:51 -07:00
|
|
|
|
if audio.__class__ is mutagen.flac:
|
|
|
|
|
audio['comment'] = track['description']
|
|
|
|
|
elif audio.__class__ is mutagen.easyid3:
|
|
|
|
|
a = mutagen.File(filename)
|
|
|
|
|
a['COMM'] = mutagen.id3.COMM(
|
|
|
|
|
encoding=3, lang=u'ENG', text=track['description']
|
|
|
|
|
)
|
2016-03-01 14:12:34 -08:00
|
|
|
|
if artwork_url:
|
2018-04-10 06:06:51 -07:00
|
|
|
|
if audio.__class__ is mutagen.flac:
|
|
|
|
|
p = mutagen.flac.Picture(out_file.read())
|
|
|
|
|
p.type = 3
|
|
|
|
|
audio.add_picture(p)
|
|
|
|
|
elif audio.__class__ is mutagen.easyid3:
|
|
|
|
|
a = mutagen.File(filename)
|
|
|
|
|
a['APIC'] = mutagen.id3.APIC(
|
|
|
|
|
encoding=3, mime='image/jpeg', type=3, data=out_file.read()
|
2016-03-01 14:12:34 -08:00
|
|
|
|
)
|
2018-04-10 06:06:51 -07:00
|
|
|
|
|
2018-04-10 04:10:08 -07:00
|
|
|
|
audio.save()
|
2014-11-16 09:19:42 -08:00
|
|
|
|
|
2014-10-23 07:14:29 -07:00
|
|
|
|
|
2014-10-20 13:23:46 -07:00
|
|
|
|
def signal_handler(signal, frame):
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-12-10 06:43:07 -08:00
|
|
|
|
Handle keyboard interrupt
|
2014-11-16 09:19:42 -08:00
|
|
|
|
"""
|
2017-01-27 08:27:53 -08:00
|
|
|
|
logger.info('\nGood bye!')
|
2014-11-16 09:19:42 -08:00
|
|
|
|
sys.exit(0)
|
2014-10-12 15:16:18 -07:00
|
|
|
|
|
2015-05-09 15:13:11 -07:00
|
|
|
|
if __name__ == '__main__':
|
2014-11-16 09:19:42 -08:00
|
|
|
|
main()
|