2018-01-14 00:49:26 +01:00
|
|
|
import copy
|
2018-06-03 12:49:49 +02:00
|
|
|
import logging
|
2019-08-25 00:40:39 +02:00
|
|
|
import os
|
|
|
|
import sys
|
2018-08-10 14:23:17 +02:00
|
|
|
from datetime import datetime
|
2019-08-25 00:40:39 +02:00
|
|
|
from shutil import which
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.error import UIException
|
2018-01-14 00:49:26 +01:00
|
|
|
from svtplay_dl.fetcher import VideoRetriever
|
|
|
|
from svtplay_dl.postprocess import postprocess
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.service import Generic
|
|
|
|
from svtplay_dl.service import service_handler
|
|
|
|
from svtplay_dl.service.services import Raw
|
|
|
|
from svtplay_dl.service.services import sites
|
|
|
|
from svtplay_dl.subtitle import subtitle
|
|
|
|
from svtplay_dl.utils.nfo import write_nfo_episode
|
|
|
|
from svtplay_dl.utils.nfo import write_nfo_tvshow
|
|
|
|
from svtplay_dl.utils.output import filename
|
2021-05-03 01:43:37 +02:00
|
|
|
from svtplay_dl.utils.output import find_dupes
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.utils.output import formatname
|
|
|
|
from svtplay_dl.utils.stream import list_quality
|
|
|
|
from svtplay_dl.utils.stream import select_quality
|
2021-05-22 22:57:43 +02:00
|
|
|
from svtplay_dl.utils.stream import subtitle_decider
|
2018-05-12 15:38:10 +02:00
|
|
|
from svtplay_dl.utils.text import exclude
|
2018-01-14 00:49:26 +01:00
|
|
|
|
|
|
|
|
2018-07-16 21:21:18 +02:00
|
|
|
def get_multiple_media(urls, config):
|
2018-10-13 13:52:10 +02:00
|
|
|
if config.get("output") and os.path.isfile(config.get("output")):
|
2018-11-18 12:47:19 +01:00
|
|
|
logging.error("Output must be a directory if used with multiple URLs")
|
2018-01-14 00:49:26 +01:00
|
|
|
sys.exit(2)
|
2018-07-16 21:21:18 +02:00
|
|
|
elif config.get("output") and not os.path.exists(config.get("output")):
|
2018-01-14 00:49:26 +01:00
|
|
|
try:
|
2018-07-16 21:21:18 +02:00
|
|
|
os.makedirs(config.get("output"))
|
2018-01-14 00:49:26 +01:00
|
|
|
except OSError as e:
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error("%s: %s", e.strerror, e.filename)
|
2018-01-14 00:49:26 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
for url in urls:
|
2018-07-16 21:21:18 +02:00
|
|
|
get_media(url, copy.copy(config))
|
2018-01-14 00:49:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_media(url, options, version="Unknown"):
|
|
|
|
if "http" not in url[:4]:
|
2021-12-18 19:52:08 +01:00
|
|
|
url = f"http://{url}"
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2018-05-13 13:06:45 +02:00
|
|
|
if options.get("verbose"):
|
2021-12-18 21:36:16 +01:00
|
|
|
logging.debug("version: %s", version)
|
2018-05-13 02:35:14 +02:00
|
|
|
|
2018-01-14 00:49:26 +01:00
|
|
|
stream = service_handler(sites, options, url)
|
|
|
|
if not stream:
|
|
|
|
generic = Generic(options, url)
|
|
|
|
url, stream = generic.get(sites)
|
|
|
|
if not stream:
|
2018-10-07 19:54:31 +02:00
|
|
|
if url.find(".f4m") > 0 or url.find(".m3u8") > 0 or url.find(".mpd") > 1:
|
2018-01-14 00:49:26 +01:00
|
|
|
stream = Raw(options, url)
|
|
|
|
if not stream:
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error("That site is not supported. Make a ticket or send a message")
|
2018-01-14 00:49:26 +01:00
|
|
|
sys.exit(2)
|
|
|
|
|
2018-12-11 01:03:12 +01:00
|
|
|
if options.get("all_episodes") or stream.config.get("all_episodes"):
|
2019-07-25 09:01:40 +02:00
|
|
|
get_all_episodes(stream, url, options)
|
2018-01-14 00:49:26 +01:00
|
|
|
else:
|
2018-05-20 21:09:49 +02:00
|
|
|
get_one_media(stream)
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2019-07-25 09:39:50 +02:00
|
|
|
|
2019-07-25 09:01:40 +02:00
|
|
|
def get_all_episodes(stream, url, options):
|
2018-07-22 11:15:01 +02:00
|
|
|
name = os.path.dirname(formatname({"basedir": True}, stream.config))
|
2018-05-20 21:09:49 +02:00
|
|
|
|
|
|
|
if name and os.path.isfile(name):
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error("Output must be a directory if used with --all-episodes")
|
2018-01-14 00:49:26 +01:00
|
|
|
sys.exit(2)
|
2018-05-20 21:09:49 +02:00
|
|
|
elif name and not os.path.exists(name):
|
2018-01-14 00:49:26 +01:00
|
|
|
try:
|
2018-05-20 21:09:49 +02:00
|
|
|
os.makedirs(name)
|
2018-01-14 00:49:26 +01:00
|
|
|
except OSError as e:
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error("%s: %s", e.strerror, e.filename)
|
2018-01-14 00:49:26 +01:00
|
|
|
return
|
|
|
|
|
2018-05-20 21:09:49 +02:00
|
|
|
episodes = stream.find_all_episodes(stream.config)
|
2018-01-14 00:49:26 +01:00
|
|
|
if episodes is None:
|
|
|
|
return
|
|
|
|
for idx, o in enumerate(episodes):
|
|
|
|
if o == url:
|
|
|
|
substream = stream
|
|
|
|
else:
|
2018-05-20 21:09:49 +02:00
|
|
|
substream = service_handler(sites, copy.copy(stream.config), o)
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.info("Episode %d of %d", idx + 1, len(episodes))
|
|
|
|
logging.info("Url: %s", o)
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2019-07-25 09:01:40 +02:00
|
|
|
if not (options.get("get_url") and options.get("get_only_episode_url")):
|
|
|
|
# get_one_media overwrites options.output...
|
|
|
|
get_one_media(substream)
|
2018-01-14 00:49:26 +01:00
|
|
|
|
|
|
|
|
2018-05-20 21:09:49 +02:00
|
|
|
def get_one_media(stream):
|
2018-01-14 00:49:26 +01:00
|
|
|
# Make an automagic filename
|
|
|
|
if not filename(stream):
|
|
|
|
return
|
2018-05-13 02:35:14 +02:00
|
|
|
|
2018-05-20 21:09:49 +02:00
|
|
|
if stream.config.get("merge_subtitle"):
|
2019-08-25 00:27:31 +02:00
|
|
|
if not which("ffmpeg"):
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error("--merge-subtitle needs ffmpeg. Please install ffmpeg.")
|
|
|
|
logging.info("https://ffmpeg.org/download.html")
|
2018-01-14 00:49:26 +01:00
|
|
|
sys.exit(2)
|
|
|
|
|
|
|
|
videos = []
|
2021-05-22 22:57:43 +02:00
|
|
|
subtitles = []
|
2018-01-14 00:49:26 +01:00
|
|
|
error = []
|
|
|
|
streams = stream.get()
|
|
|
|
try:
|
|
|
|
for i in streams:
|
2018-05-31 00:19:34 +02:00
|
|
|
if isinstance(i, Exception):
|
|
|
|
error.append(i)
|
|
|
|
elif not exclude(stream.config, formatname(i.output, stream.config)):
|
2018-05-12 15:38:10 +02:00
|
|
|
if isinstance(i, VideoRetriever):
|
2018-05-20 21:09:49 +02:00
|
|
|
if stream.config.get("preferred"):
|
2018-05-25 22:47:26 +02:00
|
|
|
if stream.config.get("preferred").lower() == i.name:
|
2018-05-12 15:38:10 +02:00
|
|
|
videos.append(i)
|
|
|
|
else:
|
2018-01-14 00:49:26 +01:00
|
|
|
videos.append(i)
|
2018-05-12 15:38:10 +02:00
|
|
|
if isinstance(i, subtitle):
|
2021-05-22 22:57:43 +02:00
|
|
|
subtitles.append(i)
|
2018-10-28 23:16:00 +01:00
|
|
|
except Exception:
|
2018-05-20 21:09:49 +02:00
|
|
|
if stream.config.get("verbose"):
|
2018-01-14 00:49:26 +01:00
|
|
|
raise
|
|
|
|
else:
|
2018-06-03 12:49:49 +02:00
|
|
|
logging.error("svtplay-dl crashed")
|
2019-09-06 22:49:49 +02:00
|
|
|
logging.error("Run again and add --verbose as an argument, to get more information")
|
|
|
|
logging.error("If the error persists, you can report it at https://github.com/spaam/svtplay-dl/issues")
|
|
|
|
logging.error("Include the URL used, the stack trace and the output of svtplay-dl --version in the issue")
|
2018-06-03 12:49:49 +02:00
|
|
|
return
|
2018-08-10 14:23:17 +02:00
|
|
|
|
|
|
|
try:
|
2018-10-01 22:33:27 +02:00
|
|
|
after_date = datetime.strptime(stream.config.get("after_date"), "%Y-%m-%d")
|
2018-09-24 00:27:24 +02:00
|
|
|
except (ValueError, TypeError, KeyError, AttributeError): # gotta catch em all..
|
2018-08-10 14:23:17 +02:00
|
|
|
after_date = None
|
|
|
|
try:
|
|
|
|
pub_date = datetime.fromtimestamp(stream.output["publishing_datetime"])
|
|
|
|
except (ValueError, TypeError, KeyError):
|
|
|
|
pub_date = None
|
2019-09-06 22:49:49 +02:00
|
|
|
if after_date is not None and pub_date is not None and pub_date.date() < after_date.date():
|
2019-08-25 00:27:31 +02:00
|
|
|
logging.info(
|
2021-12-18 21:36:16 +01:00
|
|
|
"Video %sS%dE%d skipped since published %s.",
|
|
|
|
stream.output["title"],
|
|
|
|
stream.output["season"],
|
|
|
|
stream.output["episode"],
|
|
|
|
pub_date.date(),
|
2019-08-25 00:27:31 +02:00
|
|
|
)
|
2018-08-10 14:23:17 +02:00
|
|
|
return
|
2018-01-14 00:49:26 +01:00
|
|
|
|
2021-05-22 22:57:43 +02:00
|
|
|
if stream.config.get("require_subtitle") and not subtitles:
|
2018-06-03 12:49:49 +02:00
|
|
|
logging.info("No subtitles available")
|
2018-01-14 00:49:26 +01:00
|
|
|
return
|
|
|
|
|
2021-05-22 22:57:43 +02:00
|
|
|
if not stream.config.get("list_quality"):
|
|
|
|
subtitle_decider(stream, subtitles)
|
2018-05-20 21:09:49 +02:00
|
|
|
if stream.config.get("force_subtitle"):
|
2018-01-14 00:49:26 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if not videos:
|
2018-06-24 16:14:57 +02:00
|
|
|
errormsg = None
|
2018-01-14 00:49:26 +01:00
|
|
|
for exc in error:
|
2018-06-24 16:14:57 +02:00
|
|
|
if errormsg:
|
2021-04-27 19:44:09 +02:00
|
|
|
errormsg = f"{errormsg}. {str(exc)}"
|
2018-06-24 16:14:57 +02:00
|
|
|
else:
|
|
|
|
errormsg = str(exc)
|
2018-11-18 12:47:19 +01:00
|
|
|
if errormsg:
|
2021-12-18 21:36:16 +01:00
|
|
|
logging.error("No videos found. %s", errormsg)
|
2018-11-18 12:47:19 +01:00
|
|
|
else:
|
|
|
|
logging.error("No videos found.")
|
2018-01-14 00:49:26 +01:00
|
|
|
else:
|
2018-05-20 21:09:49 +02:00
|
|
|
if stream.config.get("list_quality"):
|
2018-01-14 00:49:26 +01:00
|
|
|
list_quality(videos)
|
|
|
|
return
|
2018-12-11 00:06:22 +01:00
|
|
|
if stream.config.get("nfo"):
|
|
|
|
# Create NFO files
|
|
|
|
write_nfo_episode(stream.output, stream.config)
|
|
|
|
write_nfo_tvshow(stream.output, stream.config)
|
2018-12-11 00:34:57 +01:00
|
|
|
if stream.config.get("force_nfo"):
|
|
|
|
return
|
2021-05-16 02:22:37 +02:00
|
|
|
try:
|
|
|
|
fstream = select_quality(stream.config, videos)
|
|
|
|
except UIException as e:
|
|
|
|
logging.error(e)
|
|
|
|
return
|
|
|
|
|
2021-05-03 01:43:37 +02:00
|
|
|
if fstream.config.get("get_url"):
|
|
|
|
print(fstream.url)
|
|
|
|
return
|
|
|
|
|
|
|
|
dupe, fileame = find_dupes(fstream.output, stream.config)
|
|
|
|
if dupe and not stream.config.get("force"):
|
2021-12-18 21:36:16 +01:00
|
|
|
logging.warning("File (%s) already exists. Use --force to overwrite", fileame.name)
|
2021-05-03 01:43:37 +02:00
|
|
|
return
|
2021-06-25 09:52:25 +02:00
|
|
|
if fstream.config.get("output_format") and fstream.config.get("output_format").lower() not in ["mkv", "mp4"]:
|
|
|
|
logging.error("Unknown output format. please choose mp4 or mkv")
|
|
|
|
sys.exit(2)
|
2018-01-14 00:49:26 +01:00
|
|
|
try:
|
2020-07-28 21:30:24 +02:00
|
|
|
logging.info("Selected to download %s, bitrate: %s format: %s", fstream.name, fstream.bitrate, fstream.format)
|
2018-07-22 11:02:33 +02:00
|
|
|
fstream.download()
|
2018-01-14 00:49:26 +01:00
|
|
|
except UIException as e:
|
2018-07-22 11:02:33 +02:00
|
|
|
if fstream.config.get("verbose"):
|
2018-01-14 00:49:26 +01:00
|
|
|
raise e
|
2018-09-30 23:02:18 +02:00
|
|
|
logging.error(e)
|
2018-01-14 00:49:26 +01:00
|
|
|
sys.exit(2)
|
|
|
|
|
2018-07-22 11:02:33 +02:00
|
|
|
if fstream.config.get("thumbnail") and hasattr(stream, "get_thumbnail"):
|
2018-05-20 21:09:49 +02:00
|
|
|
stream.get_thumbnail(stream.config)
|
2018-07-22 11:17:04 +02:00
|
|
|
|
2020-01-03 19:28:25 +01:00
|
|
|
if fstream.config.get("silent_semi") and fstream.finished:
|
2022-02-21 13:57:23 +01:00
|
|
|
logging.log(25, "Download of %s was completed", formatname(fstream.output, fstream.config))
|
2020-01-03 19:28:25 +01:00
|
|
|
|
2020-01-02 20:53:23 +01:00
|
|
|
if fstream.config.get("no_postprocess") is True or all(fstream.config.get(x) for x in ["no_remux", "no_merge"]) is True:
|
|
|
|
logging.info("All done. Not postprocessing files, leaving them completely untouched.")
|
2020-01-03 19:21:31 +01:00
|
|
|
return
|
2020-01-02 20:53:23 +01:00
|
|
|
|
2021-06-13 01:22:16 +02:00
|
|
|
post = postprocess(fstream, fstream.config, subtitles)
|
2020-01-03 19:21:31 +01:00
|
|
|
if fstream.audio and not post.detect and fstream.finished:
|
|
|
|
logging.warning("Can't find ffmpeg/avconv. audio and video is in seperate files. if you dont want this use -P hls or hds")
|
2021-06-13 01:22:16 +02:00
|
|
|
if post.detect and fstream.config.get("no_merge") is False:
|
2020-01-03 19:21:31 +01:00
|
|
|
post.merge()
|
|
|
|
else:
|
|
|
|
logging.info("All done. Not postprocessing files, leaving them completely untouched.")
|