mirror of
https://github.com/spaam/svtplay-dl.git
synced 2024-11-23 11:45:38 +01:00
Compare commits
20 Commits
b47107a242
...
54ac64bfac
Author | SHA1 | Date | |
---|---|---|---|
|
54ac64bfac | ||
|
2383b26e1b | ||
|
1f55f8cb3f | ||
|
7e10813ae3 | ||
|
a69c8c8ccc | ||
|
ace235c6c8 | ||
|
2083ee9978 | ||
|
ce5859346c | ||
|
583a2edf4d | ||
|
f8a450208c | ||
|
e65e90c62e | ||
|
da6654396b | ||
|
6f6d970f0b | ||
|
759feec608 | ||
|
634411666e | ||
|
74c7d25293 | ||
|
c4afda5919 | ||
|
85d93464e2 | ||
|
515ab99c7a | ||
|
77149d1f7d |
20
.github/ISSUE_TEMPLATE.md
vendored
20
.github/ISSUE_TEMPLATE.md
vendored
@ -1,20 +0,0 @@
|
||||
<!--
|
||||
If you are posting a question/feature requests, all prefilled information can be removed.
|
||||
-->
|
||||
|
||||
<!-- BUG TEMPLATE -->
|
||||
|
||||
### svtplay-dl versions:
|
||||
|
||||
Run `svtplay-dl --version`
|
||||
|
||||
### Operating system and Python version:
|
||||
|
||||
Name and version of the operating system and python version (run `python --version`)
|
||||
|
||||
### What is the issue:
|
||||
|
||||
Always include the URL you want to download and all switches you are using.
|
||||
You should also add `--verbose` because it makes it much easier for use to find the issue :)
|
||||
|
||||
svtplay-dl --verbose https://www.example.com
|
@ -1,5 +1,5 @@
|
||||
# using edge to get ffmpeg-4.x
|
||||
FROM alpine:edge
|
||||
FROM alpine:latest
|
||||
LABEL maintainer="j@i19.se"
|
||||
|
||||
COPY dist/*.whl .
|
||||
|
@ -97,7 +97,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
||||
if "CHARACTERISTICS" in i:
|
||||
role = f'{role}-{i["CHARACTERISTICS"].replace("se.svt.accessibility.", "")}'
|
||||
|
||||
media[i["GROUP-ID"]].append([uri, chans, language, role])
|
||||
media[i["GROUP-ID"]].append([uri, chans, language, role, segments])
|
||||
|
||||
if i["TYPE"] == "SUBTITLES":
|
||||
if "URI" in i:
|
||||
@ -161,7 +161,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
||||
role=group[3],
|
||||
video_role=video_role,
|
||||
output=loutput,
|
||||
segments=bool(segments),
|
||||
segments=bool(group[4]),
|
||||
channels=chans,
|
||||
codec=codec,
|
||||
resolution=resolution,
|
||||
@ -187,7 +187,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
||||
audio=audio_url,
|
||||
video_role=video_role,
|
||||
output=loutput,
|
||||
segments=bool(segments),
|
||||
segments=bool(group[4]),
|
||||
channels=chans,
|
||||
codec=codec,
|
||||
resolution=resolution,
|
||||
|
@ -31,7 +31,7 @@ class postprocess:
|
||||
if os.path.isfile(path):
|
||||
self.detect = path
|
||||
|
||||
def merge(self):
|
||||
def merge(self, merge_subtitle):
|
||||
if self.detect is None:
|
||||
logging.error("Cant detect ffmpeg or avconv. Cant mux files without it.")
|
||||
return
|
||||
@ -60,7 +60,7 @@ class postprocess:
|
||||
streams = _streams(stderr)
|
||||
videotrack, audiotrack = _checktracks(streams)
|
||||
|
||||
if self.config.get("merge_subtitle"):
|
||||
if merge_subtitle:
|
||||
logging.info("Merge audio, video and subtitle into %s", new_name.name)
|
||||
else:
|
||||
logging.info(f"Merge audio and video into {str(new_name.name).replace('.audio', '')}")
|
||||
@ -89,7 +89,7 @@ class postprocess:
|
||||
arguments += ["-map", f"{videotrack}"]
|
||||
if audiotrack:
|
||||
arguments += ["-map", f"{audiotrack}"]
|
||||
if self.config.get("merge_subtitle"):
|
||||
if merge_subtitle:
|
||||
langs = _sublanguage(self.stream, self.config, self.subfixes)
|
||||
tracks = [x for x in [videotrack, audiotrack] if x]
|
||||
subs_nr = 0
|
||||
@ -135,7 +135,7 @@ class postprocess:
|
||||
os.remove(audio_filename)
|
||||
|
||||
# This if statement is for use cases where both -S and -M are specified to not only merge the subtitle but also store it separately.
|
||||
if self.config.get("merge_subtitle") and not self.config.get("subtitle"):
|
||||
if merge_subtitle and not self.config.get("subtitle"):
|
||||
if self.subfixes and len(self.subfixes) >= 2 and self.config.get("get_all_subtitles"):
|
||||
for subfix in self.subfixes:
|
||||
subfile = orig_filename.parent / (orig_filename.stem + "." + subfix + ".srt")
|
||||
|
@ -14,7 +14,6 @@ class Aftonbladettv(Service):
|
||||
|
||||
def get(self):
|
||||
data = self.get_urldata()
|
||||
|
||||
match = re.search('data-player-config="([^"]+)"', data)
|
||||
if not match:
|
||||
match = re.search('data-svpPlayer-video="([^"]+)"', data)
|
||||
@ -24,7 +23,27 @@ class Aftonbladettv(Service):
|
||||
yield ServiceError("Can't find video info")
|
||||
return
|
||||
data = json.loads(decode_html_entities(match.group(1)))
|
||||
yield from hlsparse(self.config, self.http.request("get", data["streamUrls"]["hls"]), data["streamUrls"]["hls"], output=self.output)
|
||||
hdnea = self._login()
|
||||
url = data["streamUrls"]["hls"] + hdnea if hdnea else data["streamUrls"]["hls"]
|
||||
yield from hlsparse(config=self.config, res=self.http.request("get", url), url=url, output=self.output)
|
||||
|
||||
def _login(self):
|
||||
if (token := self.config.get("token")) is None:
|
||||
return None
|
||||
if (match := re.search(r"^.*tv.aftonbladet.+video/([a-zA-Z0-9]+)/.*$", self.url)) is None:
|
||||
return None
|
||||
|
||||
service = match.group(1)
|
||||
res = self.http.request("get", f"https://svp-token-api.aftonbladet.se/svp/token/{service}?access=plus", headers={"x-sp-id": token})
|
||||
if res.status_code != 200:
|
||||
return None
|
||||
expires = res.json()["expiry"]
|
||||
hmac = res.json()["value"]
|
||||
|
||||
res = self.http.request("get", f"https://svp.vg.no/svp/token/v1/?vendor=ab&assetId={service}&expires={expires}&hmac={hmac}")
|
||||
if res.status_code != 200:
|
||||
return None
|
||||
return f"?hdnea={res.text.replace('/', '%2F').replace('=', '%3D').replace(',', '%2C')}"
|
||||
|
||||
|
||||
class Aftonbladet(Service):
|
||||
|
@ -3,7 +3,6 @@ import json
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from svtplay_dl.error import ServiceError
|
||||
from svtplay_dl.fetcher.hls import hlsparse
|
||||
@ -31,6 +30,11 @@ class Dr(Service, OpenGraphThumbMixin):
|
||||
else:
|
||||
yield from hlsparse(self.config, res, match.group(1), output=self.output)
|
||||
return
|
||||
apimatch = re.search("CLIENT_SERVICE_URL='([^']+)", data)
|
||||
if not apimatch:
|
||||
yield ServiceError("Can't find api server.")
|
||||
return
|
||||
apiserver = apimatch.group(1)
|
||||
janson = json.loads(match.group(1))
|
||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||
resolution = None
|
||||
@ -55,7 +59,7 @@ class Dr(Service, OpenGraphThumbMixin):
|
||||
|
||||
offerlist = []
|
||||
for i in offers:
|
||||
if i["deliveryType"] == "Stream":
|
||||
if i["deliveryType"] == "StreamOrDownload":
|
||||
offerlist.append([i["scopes"][0], i["resolution"]])
|
||||
|
||||
deviceid = uuid.uuid1()
|
||||
@ -73,10 +77,13 @@ class Dr(Service, OpenGraphThumbMixin):
|
||||
for i in offerlist:
|
||||
vid, resolution = i
|
||||
url = (
|
||||
f"https://isl.dr-massive.com/api/account/items/{vid}/videos?delivery=stream&device=web_browser&"
|
||||
f"{apiserver}/account/items/{vid}/videos?delivery=stream&device=web_browser&"
|
||||
f"ff=idp%2Cldp&lang=da&resolution={resolution}&sub=Anonymous"
|
||||
)
|
||||
res = self.http.request("get", url, headers={"authorization": f"Bearer {token}"})
|
||||
if res.status_code > 400:
|
||||
yield ServiceError("Can't find the video or its geoblocked")
|
||||
return
|
||||
for video in res.json():
|
||||
if video["accessService"] == "StandardVideo" and video["format"] == "video/hls":
|
||||
res = self.http.request("get", video["url"])
|
||||
@ -93,26 +100,25 @@ class Dr(Service, OpenGraphThumbMixin):
|
||||
data = self.get_urldata()
|
||||
match = re.search("__data = ([^<]+)</script>", data)
|
||||
if not match:
|
||||
if "bonanza" in self.url:
|
||||
parse = urlparse(self.url)
|
||||
match = re.search(r"(\/bonanza\/serie\/[0-9]+\/[\-\w]+)", parse.path)
|
||||
if match:
|
||||
match = re.findall(rf"a href=\"({match.group(1)}\/\d+[^\"]+)\"", data)
|
||||
if not match:
|
||||
logging.error("Can't find video info.")
|
||||
for url in match:
|
||||
episodes.append(f"https://www.dr.dk{url}")
|
||||
else:
|
||||
logging.error("Can't find video info.")
|
||||
return episodes
|
||||
else:
|
||||
logging.error("Can't find video info.")
|
||||
return episodes
|
||||
|
||||
janson = json.loads(match.group(1))
|
||||
|
||||
if "/saeson/" in self.url:
|
||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||
for i in page["item"]["show"]["seasons"]["items"]:
|
||||
seasons.append(f'https://www.dr.dk/drtv{i["path"]}')
|
||||
|
||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||
|
||||
if "show" in page["item"] and "seasons" in page["item"]["show"]:
|
||||
for i in page["item"]["show"]["seasons"]["items"]:
|
||||
if (
|
||||
"item" in page["entries"][0]
|
||||
and "season" in page["entries"][0]["item"]
|
||||
and "show" in page["entries"][0]["item"]["season"]
|
||||
and "seasons" in page["entries"][0]["item"]["season"]["show"]
|
||||
):
|
||||
for i in page["entries"][0]["item"]["season"]["show"]["seasons"]["items"]:
|
||||
seasons.append(f'https://www.dr.dk/drtv{i["path"]}')
|
||||
|
||||
if seasons:
|
||||
|
@ -114,13 +114,16 @@ class Svtplay(Service, MetadataThumbMixin):
|
||||
subfix = lang
|
||||
yield from subtitle_probe(copy.copy(self.config), i["url"], subfix=subfix, output=self.output)
|
||||
|
||||
if not janson["videoReferences"]:
|
||||
yield ServiceError("Media doesn't have any associated videos.")
|
||||
return
|
||||
drm = janson["rights"]["drmCopyProtection"]
|
||||
if not drm and "variants" in janson and "default" in janson["variants"]:
|
||||
if len(janson["variants"]["default"]["videoReferences"]) == 0:
|
||||
if len(janson["videoReferences"]) == 0:
|
||||
yield ServiceError("Media doesn't have any associated videos.")
|
||||
return
|
||||
|
||||
for videorfc in janson["variants"]["default"]["videoReferences"]:
|
||||
for videorfc in janson["videoReferences"]:
|
||||
params = {}
|
||||
special = False
|
||||
params["manifestUrl"] = quote_plus(videorfc["url"])
|
||||
|
@ -4,6 +4,7 @@ import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from urllib.parse import parse_qs
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from svtplay_dl.error import ServiceError
|
||||
@ -54,7 +55,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
|
||||
item = jansson["metadata"]
|
||||
if item["isDrmProtected"]:
|
||||
yield ServiceError("We can't download DRM protected content from this site.")
|
||||
yield ServiceError("We can't download DRM protected content from this site. This isn't a svtplay-dl issue.")
|
||||
return
|
||||
|
||||
if item["isLive"]:
|
||||
@ -107,6 +108,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
def find_all_episodes(self, config):
|
||||
episodes = []
|
||||
items = []
|
||||
seasonq = None
|
||||
|
||||
parse = urlparse(self.url)
|
||||
if parse.path.startswith("/klipp"):
|
||||
@ -130,6 +132,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
episodes = self._graphlista(token, show)
|
||||
return episodes
|
||||
|
||||
query = parse_qs(parse.query)
|
||||
seasonq = query.get("season", None)
|
||||
showid, jansson, kind = self._get_seriesid(self.get_urldata(), dict())
|
||||
if showid is None:
|
||||
logging.error("Cant find any videos")
|
||||
@ -139,9 +143,17 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
return episodes
|
||||
if kind == "Movie":
|
||||
return [f"https://www.tv4play.se/video/{showid}"]
|
||||
|
||||
jansson = self._graphdetails(token, showid)
|
||||
for season in jansson["data"]["media"]["allSeasonLinks"]:
|
||||
graph_list = self._graphql(season["seasonId"])
|
||||
graph_list = None
|
||||
for season in reversed(jansson["data"]["media"]["allSeasonLinks"]):
|
||||
if seasonq:
|
||||
if seasonq[0] == season["seasonId"]:
|
||||
graph_list = self._graphql(token, season["seasonId"])
|
||||
else:
|
||||
graph_list = self._graphql(token, season["seasonId"])
|
||||
|
||||
if graph_list:
|
||||
for i in graph_list:
|
||||
if i not in items:
|
||||
items.append(i)
|
||||
@ -197,8 +209,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
}
|
||||
res = self.http.request(
|
||||
"post",
|
||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||
json=data,
|
||||
)
|
||||
janson = res.json()
|
||||
@ -241,13 +253,13 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
}
|
||||
res = self.http.request(
|
||||
"post",
|
||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||
json=data,
|
||||
)
|
||||
return res.json()
|
||||
|
||||
def _graphql(self, show):
|
||||
def _graphql(self, token, show):
|
||||
items = []
|
||||
nr = 0
|
||||
total = 100
|
||||
@ -260,8 +272,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
||||
|
||||
res = self.http.request(
|
||||
"post",
|
||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json"},
|
||||
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||
json=data,
|
||||
)
|
||||
janson = res.json()
|
||||
|
@ -236,25 +236,25 @@ class formatnameTest(unittest.TestCase):
|
||||
[
|
||||
"{episodename}a{title}-{service}",
|
||||
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||
"episodenameatitle-service",
|
||||
"episodenameatitle-service.mp4",
|
||||
],
|
||||
[
|
||||
"{episodename}a{title}-{service}",
|
||||
{"title": "title", "season": 99, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||
"episodenameatitle-service",
|
||||
"episodenameatitle-service.mp4",
|
||||
],
|
||||
["{episodename}a{title}-{service}", {"title": "title", "season": 99, "id": "0xdeadface", "ext": "ext"}, "atitle-service"],
|
||||
["{episodename}a{title}-{service}", {"title": "title", "season": 99, "id": "0xdeadface", "ext": "ext"}, "atitle-service.mp4"],
|
||||
[
|
||||
"{episodename}a{title}-{service}",
|
||||
{"title": "title", "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||
"episodenameatitle-service",
|
||||
"episodenameatitle-service.mp4",
|
||||
],
|
||||
[
|
||||
"{episodename}a{title}-{service}",
|
||||
{"title": "title", "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||
"episodenameatitle-service",
|
||||
"episodenameatitle-service.mp4",
|
||||
],
|
||||
["{episodename}a{title}-{service}", {"title": "title", "episodename": "episodename", "id": "0xdeadface"}, "episodenameatitle-service"],
|
||||
["{episodename}a{title}-{service}", {"title": "title", "episodename": "episodename", "id": "0xdeadface"}, "episodenameatitle-service.mp4"],
|
||||
[
|
||||
"{title}.{episode}.{episodename}-{id}-{service}.{ext}",
|
||||
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||
|
@ -153,8 +153,9 @@ def get_one_media(stream):
|
||||
logging.info("No subtitles available")
|
||||
return
|
||||
|
||||
merge_subtitle = False
|
||||
if not stream.config.get("list_quality"):
|
||||
subtitle_decider(stream, subtitles)
|
||||
merge_subtitle = subtitle_decider(stream, subtitles)
|
||||
if stream.config.get("force_subtitle"):
|
||||
return
|
||||
|
||||
@ -219,6 +220,6 @@ def get_one_media(stream):
|
||||
if fstream.audio and not post.detect and fstream.finished:
|
||||
logging.warning("Can't find ffmpeg/avconv. audio and video is in seperate files. if you dont want this use -P hls or hds")
|
||||
if post.detect and fstream.config.get("no_merge") is False:
|
||||
post.merge()
|
||||
post.merge(merge_subtitle)
|
||||
else:
|
||||
logging.info("All done. Not postprocessing files, leaving them completely untouched.")
|
||||
|
@ -194,7 +194,10 @@ def _formatname(output, config):
|
||||
if key == "service" and output[key]:
|
||||
name = name.replace("{service}", output[key])
|
||||
if key == "ext" and output[key]:
|
||||
if "{ext}" in name:
|
||||
name = name.replace("{ext}", output[key])
|
||||
else:
|
||||
name = f"{name}.{output[key]}"
|
||||
|
||||
# Remove all {text} we cant replace with something
|
||||
for item in re.findall(r"([\.\-]?(([^\.\-]+\w+)?\{[\w\-]+\}))", name):
|
||||
|
@ -119,8 +119,8 @@ def subtitle_decider(stream, subtitles):
|
||||
print(subtitles[0].url)
|
||||
else:
|
||||
subtitles[0].download()
|
||||
elif stream.config.get("merge_subtitle"):
|
||||
stream.config.set("merge_subtitle", False)
|
||||
return stream.config.get("merge_subtitle")
|
||||
return False
|
||||
|
||||
|
||||
def resolution(streams, resolutions: List) -> List:
|
||||
|
Loading…
Reference in New Issue
Block a user