1
0
mirror of https://github.com/spaam/svtplay-dl.git synced 2024-11-27 21:54:17 +01:00
svtplay-dl/lib/svtplay_dl/__init__.py

547 lines
21 KiB
Python
Raw Normal View History

2013-03-23 15:02:41 +01:00
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
from __future__ import absolute_import, unicode_literals
2018-01-13 20:53:02 +01:00
import argparse
2013-03-23 15:02:41 +01:00
import sys
import os
import logging
import copy
import re
2013-03-23 15:02:41 +01:00
from svtplay_dl.error import UIException
2013-03-23 15:02:41 +01:00
from svtplay_dl.log import log
2018-01-13 20:27:40 +01:00
from svtplay_dl.utils import select_quality, list_quality, ensure_unicode
2013-03-23 16:11:36 +01:00
from svtplay_dl.service import service_handler, Generic
2014-04-21 20:05:36 +02:00
from svtplay_dl.fetcher import VideoRetriever
2014-08-19 15:21:27 +02:00
from svtplay_dl.subtitle import subtitle
from svtplay_dl.output import filename
from svtplay_dl.postprocess import postprocess
2013-03-23 15:02:41 +01:00
from svtplay_dl.service.aftonbladet import Aftonbladet, Aftonbladettv
2018-01-27 20:10:30 +01:00
from svtplay_dl.service.atg import Atg
from svtplay_dl.service.bambuser import Bambuser
from svtplay_dl.service.bigbrother import Bigbrother
2017-09-16 23:45:29 +02:00
from svtplay_dl.service.cmore import Cmore
from svtplay_dl.service.dbtv import Dbtv
from svtplay_dl.service.disney import Disney
2015-10-04 14:40:00 +02:00
from svtplay_dl.service.dplay import Dplay
from svtplay_dl.service.dr import Dr
2015-07-05 21:08:11 +02:00
from svtplay_dl.service.efn import Efn
2018-02-12 00:55:51 +01:00
from svtplay_dl.service.eurosport import Eurosport
from svtplay_dl.service.expressen import Expressen
from svtplay_dl.service.facebook import Facebook
from svtplay_dl.service.filmarkivet import Filmarkivet
from svtplay_dl.service.flowonline import Flowonline
from svtplay_dl.service.hbo import Hbo
from svtplay_dl.service.twitch import Twitch
from svtplay_dl.service.lemonwhale import Lemonwhale
2018-01-27 00:03:42 +01:00
from svtplay_dl.service.mtvnn import MtvMusic
from svtplay_dl.service.mtvnn import Mtvnn
from svtplay_dl.service.mtvservices import Mtvservices
2017-02-18 21:13:01 +01:00
from svtplay_dl.service.nhl import NHL
2018-05-02 00:12:36 +02:00
from svtplay_dl.service.npo import Npo
from svtplay_dl.service.nrk import Nrk
from svtplay_dl.service.oppetarkiv import OppetArkiv
from svtplay_dl.service.picsearch import Picsearch
from svtplay_dl.service.pokemon import Pokemon
from svtplay_dl.service.qbrick import Qbrick
from svtplay_dl.service.radioplay import Radioplay
2016-09-02 01:20:54 +02:00
from svtplay_dl.service.riksdagen import Riksdagen
from svtplay_dl.service.ruv import Ruv
from svtplay_dl.service.raw import Raw
2015-09-10 23:40:48 +02:00
from svtplay_dl.service.solidtango import Solidtango
2018-01-04 00:49:33 +01:00
from svtplay_dl.service.sportlib import Sportlib
from svtplay_dl.service.sr import Sr
2017-05-08 00:14:05 +02:00
from svtplay_dl.service.svt import Svt
from svtplay_dl.service.barnkanalen import Barnkanalen
from svtplay_dl.service.svtplay import Svtplay
from svtplay_dl.service.tv4play import Tv4play
from svtplay_dl.service.urplay import Urplay
from svtplay_dl.service.vg import Vg
from svtplay_dl.service.viaplay import Viaplay
from svtplay_dl.service.viasatsport import Viasatsport
from svtplay_dl.service.vimeo import Vimeo
from svtplay_dl.service.youplay import Youplay
2018-03-11 23:40:02 +01:00
__version__ = "1.9.11"
2013-03-23 15:02:41 +01:00
sites = [
Aftonbladet,
Aftonbladettv,
2018-01-27 20:10:30 +01:00
Atg,
Bambuser,
Barnkanalen,
Bigbrother,
2017-09-16 23:45:29 +02:00
Cmore,
Dbtv,
Disney,
2015-10-04 14:40:00 +02:00
Dplay,
Dr,
2015-07-05 21:08:11 +02:00
Efn,
2018-02-12 00:55:51 +01:00
Eurosport,
Expressen,
Facebook,
Filmarkivet,
Flowonline,
Hbo,
Twitch,
Lemonwhale,
Mtvservices,
2018-01-27 00:03:42 +01:00
MtvMusic,
Mtvnn,
2017-02-18 21:13:01 +01:00
NHL,
2018-05-02 00:12:36 +02:00
Npo,
Nrk,
Qbrick,
Picsearch,
Pokemon,
Ruv,
Radioplay,
2015-09-10 23:40:48 +02:00
Solidtango,
2018-01-04 00:49:33 +01:00
Sportlib,
Sr,
2017-05-08 00:14:05 +02:00
Svt,
Svtplay,
OppetArkiv,
Tv4play,
Urplay,
Viaplay,
Viasatsport,
Vimeo,
Vg,
Youplay,
2016-09-02 01:20:54 +02:00
Riksdagen,
Raw]
2015-09-15 20:10:32 +02:00
2014-08-12 00:08:51 +02:00
class Options(object):
2013-03-23 15:02:41 +01:00
"""
Options used when invoking the script from another Python script.
Simple container class used when calling get_media() from another Python
script. The variables corresponds to the command line parameters parsed
in main() when the script is called directly.
When called from a script there are a few more things to consider:
* Logging is done to 'log'. main() calls setup_log() which sets the
logging to either stdout or stderr depending on the silent level.
A user calling get_media() directly can either also use setup_log()
or configure the log manually.
* Progress information is printed to 'progress_stream' which defaults to
sys.stderr but can be changed to any stream.
* Many errors results in calls to system.exit() so catch 'SystemExit'-
Exceptions to prevent the entire application from exiting if that happens.
"""
def __init__(self):
self.output = None
self.resume = False
self.live = False
2017-09-10 12:36:46 +02:00
self.capture_time = -1
2013-03-23 15:02:41 +01:00
self.silent = False
self.force = False
2013-12-11 17:22:46 +01:00
self.quality = 0
self.flexibleq = 0
2015-01-05 22:40:40 +01:00
self.list_quality = False
2013-03-23 15:02:41 +01:00
self.other = None
self.subtitle = False
self.username = None
self.password = None
self.thumbnail = False
self.all_episodes = False
self.all_last = -1
self.merge_subtitle = False
self.force_subtitle = False
self.require_subtitle = False
self.get_all_subtitles = False
self.get_raw_subtitles = False
self.convert_subtitle_colors = False
self.preferred = None
self.verbose = False
self.output_auto = False
self.service = None
2014-11-25 18:43:40 +01:00
self.cookies = None
self.exclude = None
2015-10-25 18:16:30 +01:00
self.get_url = False
2015-12-26 12:14:14 +01:00
self.ssl_verify = True
self.http_headers = None
self.stream_prio = None
self.remux = False
self.silent_semi = False
2018-01-09 23:33:21 +01:00
self.proxy = None
self.hls_time_stamp = False
2013-03-23 15:02:41 +01:00
2018-01-30 20:11:37 +01:00
def get_multiple_media(urls, options):
if options.output and os.path.isfile(options.output):
log.error("Output must be a directory if used with multiple URLs")
sys.exit(2)
elif options.output and not os.path.exists(options.output):
try:
os.makedirs(options.output)
except OSError as e:
log.error("%s: %s", e.strerror, e.filename)
return
for url in urls:
get_media(url, copy.copy(options))
2018-01-30 20:11:37 +01:00
2013-03-23 15:02:41 +01:00
def get_media(url, options):
if "http" not in url[:4]:
url = "http://%s" % url
if options.silent_semi:
options.silent = True
if options.verbose:
2018-01-30 20:11:37 +01:00
log.debug("version: {0}".format(__version__))
stream = service_handler(sites, options, url)
if not stream:
generic = Generic(options, url)
url, stream = generic.get(sites)
2013-03-23 15:02:41 +01:00
if not stream:
if re.search(".f4m", url) or re.search(".m3u8", url) or re.search(".mpd", url):
stream = Raw(options, url)
if not stream:
log.error("That site is not supported. Make a ticket or send a message")
sys.exit(2)
2013-03-23 15:02:41 +01:00
if options.all_episodes:
get_all_episodes(stream, copy.copy(options), url)
else:
get_one_media(stream, copy.copy(options))
def get_all_episodes(stream, options, url):
if options.output and os.path.isfile(options.output):
log.error("Output must be a directory if used with --all-episodes")
sys.exit(2)
elif options.output and not os.path.exists(options.output):
try:
os.makedirs(options.output)
except OSError as e:
log.error("%s: %s", e.strerror, e.filename)
return
episodes = stream.find_all_episodes(options)
if episodes is None:
return
for idx, o in enumerate(episodes):
if o == url:
substream = stream
else:
substream = service_handler(sites, copy.copy(options), o)
log.info("Episode %d of %d", idx + 1, len(episodes))
2018-01-30 20:11:37 +01:00
log.info("Url: %s", o)
# get_one_media overwrites options.output...
get_one_media(substream, copy.copy(options))
2015-09-15 20:10:32 +02:00
def get_one_media(stream, options):
# Make an automagic filename
if not filename(stream):
return
if options.merge_subtitle:
from svtplay_dl.utils import which
if not which('ffmpeg'):
log.error("--merge-subtitle needs ffmpeg. Please install ffmpeg.")
log.info("https://ffmpeg.org/download.html")
sys.exit(2)
2014-04-21 20:05:36 +02:00
videos = []
subs = []
subfixes = []
error = []
streams = stream.get()
try:
for i in streams:
if isinstance(i, VideoRetriever):
if options.preferred:
if options.preferred.lower() == i.name():
videos.append(i)
else:
videos.append(i)
if isinstance(i, subtitle):
subs.append(i)
if isinstance(i, Exception):
error.append(i)
except Exception as e:
if options.verbose:
raise
else:
log.error("svtplay-dl crashed")
log.error("Run again and add --verbose as an argument, to get more information")
log.error("If the error persists, you can report it at https://github.com/spaam/svtplay-dl/issues")
log.error("Include the URL used, the stack trace and the output of svtplay-dl --version in the issue")
sys.exit(3)
if options.require_subtitle and not subs:
log.info("No subtitles available")
return
if options.subtitle and options.get_url:
if subs:
if options.get_all_subtitles:
for sub in subs:
print(sub.url)
else:
print(subs[0].url)
2018-01-30 20:11:37 +01:00
if options.force_subtitle:
return
def options_subs_dl(subfixes):
if subs:
if options.get_all_subtitles:
2016-04-27 19:37:32 +02:00
for sub in subs:
sub.download()
if options.merge_subtitle:
if sub.subfix:
subfixes += [sub.subfix]
else:
options.get_all_subtitles = False
2018-01-30 20:11:37 +01:00
else:
subs[0].download()
elif options.merge_subtitle:
options.merge_subtitle = False
2018-01-14 00:04:07 +01:00
if options.subtitle and not options.get_url:
options_subs_dl(subfixes)
if options.force_subtitle:
return
if options.merge_subtitle and not options.subtitle:
options_subs_dl(subfixes)
2017-10-07 14:24:12 +02:00
if not videos:
log.error("No videos found.")
for exc in error:
log.error(str(exc))
else:
2015-01-05 22:40:40 +01:00
if options.list_quality:
list_quality(videos)
return
try:
stream = select_quality(options, videos)
if options.get_url:
print(stream.url)
return
log.info("Selected to download %s, bitrate: %s",
stream.name(), stream.bitrate)
stream.download()
except UIException as e:
if options.verbose:
raise e
log.error(e)
sys.exit(2)
if options.thumbnail and hasattr(stream, "get_thumbnail"):
2018-01-14 00:04:07 +01:00
stream.get_thumbnail(options)
post = postprocess(stream, options, subfixes)
2017-09-20 11:22:57 +02:00
2018-02-04 22:59:38 +01:00
if stream.name() == "dash" or (stream.name() == "hls" and stream.options.segments) and post.detect:
2016-03-26 21:38:31 +01:00
post.merge()
elif (stream.name() == "dash" or (stream.name() == "hls" and stream.options.segments)) and not post.detect and stream.finished:
2016-03-26 21:38:31 +01:00
log.warning("Cant find ffmpeg/avconv. audio and video is in seperate files. if you dont want this use -P hls or hds")
elif stream.name() == "hls" or options.remux:
2017-09-20 11:22:57 +02:00
if post.detect:
post.remux()
else:
log.warning("Cant find ffmpeg/avconv. File may be unplayable.")
if options.silent_semi and stream.finished:
log.log(25, "Download of %s was completed" % stream.options.output)
2013-03-23 15:02:41 +01:00
2014-01-05 16:30:45 +01:00
def setup_log(silent, verbose=False):
logging.addLevelName(25, "INFO")
fmt = logging.Formatter('%(levelname)s: %(message)s')
2013-03-23 15:02:41 +01:00
if silent:
stream = sys.stderr
level = 25
2014-01-05 16:30:45 +01:00
elif verbose:
stream = sys.stderr
level = logging.DEBUG
fmt = logging.Formatter('%(levelname)s [%(created)s] %(pathname)s/%(funcName)s: %(message)s')
2013-03-23 15:02:41 +01:00
else:
stream = sys.stdout
level = logging.INFO
hdlr = logging.StreamHandler(stream)
hdlr.setFormatter(fmt)
log.addHandler(hdlr)
log.setLevel(level)
2015-09-15 20:10:32 +02:00
2013-03-23 15:02:41 +01:00
def main():
""" Main program """
2018-01-13 20:53:02 +01:00
parser = argparse.ArgumentParser(prog="svtplay-dl")
general = parser.add_argument_group()
general.add_argument('--version', action='version', version='%(prog)s {0}'.format(__version__))
general.add_argument("-o", "--output", metavar="output", help="outputs to the given filename or folder")
general.add_argument("-f", "--force", action="store_true", dest="force", default=False,
help="overwrite if file exists already")
general.add_argument("-r", "--resume", action="store_true", dest="resume", default=False,
help="resume a download (RTMP based ones)")
general.add_argument("-l", "--live", action="store_true", dest="live", default=False,
help="enable for live streams (RTMP based ones)")
general.add_argument("-c", "--capture_time", default=-1, type=int, metavar = "capture_time",
help = "define capture time in minutes of a live stream")
general.add_argument("-s", "--silent", action="store_true", dest="silent", default=False, help="be less verbose")
general.add_argument("--silent-semi", action="store_true", dest="silent_semi", default=False,
help="only show a message when the file is downloaded")
general.add_argument("-u", "--username", default=None, help="username")
general.add_argument("-p", "--password", default=None, help="password")
general.add_argument("-t", "--thumbnail", action="store_true", dest="thumbnail", default=False,
help="download thumbnail from the site if available")
general.add_argument("-g", "--get-url", action="store_true", dest="get_url", default=False,
help="do not download any video, but instead print the URL.")
general.add_argument("--dont-verify-ssl-cert", action="store_false", dest="ssl_verify", default=True,
help="Don't attempt to verify SSL certificates.")
general.add_argument("--http-header", dest="http_headers", default=None, metavar="header1=value;header2=value2",
help="A header to add to each HTTP request.")
general.add_argument("--remux", dest="remux", default=False, action="store_true",
help="Remux from one container to mp4 using ffmpeg or avconv")
general.add_argument("--exclude", dest="exclude", default=None, metavar="WORD1,WORD2,...",
help="exclude videos with the WORD(s) in the filename. comma separated.")
general.add_argument("--proxy", dest="proxy", default=None,
metavar="proxy", help="Use the specified HTTP/HTTPS/SOCKS proxy. To enable experimental "
"SOCKS proxy, specify a proper scheme. For example "
"socks5://127.0.0.1:1080/.")
general.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False,
help="explain what is going on")
quality = parser.add_argument_group("Quality")
quality.add_argument("-q", "--quality", default=0, metavar="quality",
help="choose what format to download based on bitrate / video resolution."
"it will download the best format by default")
quality.add_argument("-Q", "--flexible-quality", default=0, metavar="amount", dest="flexibleq",
help="allow given quality (as above) to differ by an amount")
quality.add_argument("-P", "--preferred", default=None,metavar="preferred",
help="preferred download method (dash, hls, hds, http or rtmp)")
quality.add_argument("--list-quality", dest="list_quality", action="store_true", default=False,
help="list the quality for a video")
quality.add_argument("--stream-priority", dest="stream_prio", default=None, metavar="dash,hls,hds,http,rtmp",
help="If two streams have the same quality, choose the one you prefer")
subtitle = parser.add_argument_group("Subtitle")
subtitle.add_argument("-S", "--subtitle", action="store_true", dest="subtitle", default=False,
help="download subtitle from the site if available")
subtitle.add_argument("-M", "--merge-subtitle", action="store_true", dest="merge_subtitle", default=False,
help="merge subtitle with video/audio file with corresponding ISO639-3 language code. this invokes --remux automatically. use with -S for external also.")
subtitle.add_argument("--force-subtitle", dest="force_subtitle", default=False, action="store_true",
help="download only subtitle if its used with -S")
subtitle.add_argument("--require-subtitle", dest="require_subtitle", default=False, action="store_true",
help="download only if a subtitle is available")
subtitle.add_argument("--all-subtitles", dest="get_all_subtitles", default=False, action="store_true",
help="Download all available subtitles for the video")
subtitle.add_argument("--raw-subtitles", dest="get_raw_subtitles", default=False, action="store_true",
help="also download the subtitles in their native format")
subtitle.add_argument("--convert-subtitle-colors", dest="convert_subtitle_colors", default=False, action="store_true",
help='converts the color information in subtitles, to <font color=""> tags')
alleps = parser.add_argument_group("All")
alleps.add_argument("-A", "--all-episodes", action="store_true", dest="all_episodes", default=False,
help="try to download all episodes")
alleps.add_argument("--all-last", dest="all_last", default=-1, type=int, metavar="NN",
help="get last NN episodes instead of all episodes")
alleps.add_argument("--include-clips", dest="include_clips", default=False, action="store_true",
help="include clips from websites when using -A")
cmorep = parser.add_argument_group("C More")
cmorep.add_argument("--cmore-operatorlist", dest="cmoreoperatorlist", default=False, action="store_true",
help="show operatorlist for cmore")
cmorep.add_argument("--cmore-operator", dest="cmoreoperator", default=None, metavar="operator")
parser.add_argument('urls', nargs="*")
options = parser.parse_args()
if len(options.urls) == 0:
parser.print_help()
sys.exit(0)
2018-01-13 20:53:02 +01:00
urls = options.urls
if len(urls) < 1:
2014-11-23 10:31:33 +01:00
parser.error("Incorrect number of arguments")
if options.exclude:
options.exclude = options.exclude.split(",")
if options.require_subtitle:
if options.merge_subtitle:
options.merge_subtitle = True
else:
options.subtitle = True
if options.merge_subtitle:
options.remux = True
2014-08-12 00:15:46 +02:00
options = mergeParserOption(Options(), options)
if options.silent_semi:
options.silent = True
2014-01-05 16:30:45 +01:00
setup_log(options.silent, options.verbose)
2013-03-23 15:02:41 +01:00
2017-09-19 00:30:11 +02:00
if options.cmoreoperatorlist:
2018-01-13 20:53:02 +01:00
c = Cmore(options, urls)
2017-09-19 00:30:11 +02:00
c.operatorlist()
sys.exit(0)
if options.proxy:
options.proxy = options.proxy.replace("socks5", "socks5h", 1)
options.proxy = dict(http=options.proxy,
https=options.proxy)
2013-03-23 18:26:48 +01:00
if options.flexibleq and not options.quality:
log.error("flexible-quality requires a quality")
sys.exit(4)
try:
if len(urls) == 1:
get_media(urls[0], options)
else:
get_multiple_media(urls, options)
except KeyboardInterrupt:
2014-05-02 12:06:53 +02:00
print("")
2015-09-15 20:10:32 +02:00
def mergeParserOption(options, parser):
options.output = parser.output
options.resume = parser.resume
options.live = parser.live
2017-09-10 12:36:46 +02:00
options.capture_time = parser.capture_time
options.silent = parser.silent
options.force = parser.force
options.quality = parser.quality
options.flexibleq = parser.flexibleq
2015-01-05 22:40:40 +01:00
options.list_quality = parser.list_quality
options.subtitle = parser.subtitle
options.merge_subtitle = parser.merge_subtitle
options.silent_semi = parser.silent_semi
options.username = parser.username
options.password = parser.password
options.thumbnail = parser.thumbnail
options.all_episodes = parser.all_episodes
2014-12-21 13:01:09 +01:00
options.all_last = parser.all_last
options.force_subtitle = parser.force_subtitle
options.require_subtitle = parser.require_subtitle
options.preferred = parser.preferred
options.verbose = parser.verbose
options.exclude = parser.exclude
options.get_url = parser.get_url
2015-12-26 12:14:14 +01:00
options.ssl_verify = parser.ssl_verify
options.http_headers = parser.http_headers
options.stream_prio = parser.stream_prio
options.remux = parser.remux
options.get_all_subtitles = parser.get_all_subtitles
options.get_raw_subtitles = parser.get_raw_subtitles
options.convert_subtitle_colors = parser.convert_subtitle_colors
options.include_clips = parser.include_clips
2017-09-19 00:30:11 +02:00
options.cmoreoperatorlist = parser.cmoreoperatorlist
options.cmoreoperator = parser.cmoreoperator
options.proxy = parser.proxy
return options