mirror of
https://github.com/spaam/svtplay-dl.git
synced 2024-11-27 05:34:15 +01:00
Compare commits
20 Commits
73234b95b7
...
ff8ac8a813
Author | SHA1 | Date | |
---|---|---|---|
|
ff8ac8a813 | ||
|
2383b26e1b | ||
|
1f55f8cb3f | ||
|
7e10813ae3 | ||
|
a69c8c8ccc | ||
|
ace235c6c8 | ||
|
2083ee9978 | ||
|
ce5859346c | ||
|
583a2edf4d | ||
|
f8a450208c | ||
|
e65e90c62e | ||
|
da6654396b | ||
|
6f6d970f0b | ||
|
759feec608 | ||
|
634411666e | ||
|
74c7d25293 | ||
|
c4afda5919 | ||
|
85d93464e2 | ||
|
515ab99c7a | ||
|
590823c008 |
24
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: enhancement
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
## Feature Request
|
||||||
|
### Is your feature request related to a problem? Please describe
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
<!--- Put your text below this line -->
|
||||||
|
|
||||||
|
### Describe the solution you'd like
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
<!--- Put your text below this line -->
|
||||||
|
|
||||||
|
### Describe alternatives you've considered
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
<!--- Put your text below this line -->
|
||||||
|
|
||||||
|
### Additional context
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
|
<!--- Put your text below this line -->
|
@ -1,5 +1,5 @@
|
|||||||
# using edge to get ffmpeg-4.x
|
# using edge to get ffmpeg-4.x
|
||||||
FROM alpine:edge
|
FROM alpine:latest
|
||||||
LABEL maintainer="j@i19.se"
|
LABEL maintainer="j@i19.se"
|
||||||
|
|
||||||
COPY dist/*.whl .
|
COPY dist/*.whl .
|
||||||
|
@ -97,7 +97,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
|||||||
if "CHARACTERISTICS" in i:
|
if "CHARACTERISTICS" in i:
|
||||||
role = f'{role}-{i["CHARACTERISTICS"].replace("se.svt.accessibility.", "")}'
|
role = f'{role}-{i["CHARACTERISTICS"].replace("se.svt.accessibility.", "")}'
|
||||||
|
|
||||||
media[i["GROUP-ID"]].append([uri, chans, language, role])
|
media[i["GROUP-ID"]].append([uri, chans, language, role, segments])
|
||||||
|
|
||||||
if i["TYPE"] == "SUBTITLES":
|
if i["TYPE"] == "SUBTITLES":
|
||||||
if "URI" in i:
|
if "URI" in i:
|
||||||
@ -161,7 +161,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
|||||||
role=group[3],
|
role=group[3],
|
||||||
video_role=video_role,
|
video_role=video_role,
|
||||||
output=loutput,
|
output=loutput,
|
||||||
segments=bool(segments),
|
segments=bool(group[4]),
|
||||||
channels=chans,
|
channels=chans,
|
||||||
codec=codec,
|
codec=codec,
|
||||||
resolution=resolution,
|
resolution=resolution,
|
||||||
@ -187,7 +187,7 @@ def _hlsparse(config, text, url, output, **kwargs):
|
|||||||
audio=audio_url,
|
audio=audio_url,
|
||||||
video_role=video_role,
|
video_role=video_role,
|
||||||
output=loutput,
|
output=loutput,
|
||||||
segments=bool(segments),
|
segments=bool(group[4]),
|
||||||
channels=chans,
|
channels=chans,
|
||||||
codec=codec,
|
codec=codec,
|
||||||
resolution=resolution,
|
resolution=resolution,
|
||||||
|
@ -31,7 +31,7 @@ class postprocess:
|
|||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
self.detect = path
|
self.detect = path
|
||||||
|
|
||||||
def merge(self):
|
def merge(self, merge_subtitle):
|
||||||
if self.detect is None:
|
if self.detect is None:
|
||||||
logging.error("Cant detect ffmpeg or avconv. Cant mux files without it.")
|
logging.error("Cant detect ffmpeg or avconv. Cant mux files without it.")
|
||||||
return
|
return
|
||||||
@ -60,7 +60,7 @@ class postprocess:
|
|||||||
streams = _streams(stderr)
|
streams = _streams(stderr)
|
||||||
videotrack, audiotrack = _checktracks(streams)
|
videotrack, audiotrack = _checktracks(streams)
|
||||||
|
|
||||||
if self.config.get("merge_subtitle"):
|
if merge_subtitle:
|
||||||
logging.info("Merge audio, video and subtitle into %s", new_name.name)
|
logging.info("Merge audio, video and subtitle into %s", new_name.name)
|
||||||
else:
|
else:
|
||||||
logging.info(f"Merge audio and video into {str(new_name.name).replace('.audio', '')}")
|
logging.info(f"Merge audio and video into {str(new_name.name).replace('.audio', '')}")
|
||||||
@ -89,7 +89,7 @@ class postprocess:
|
|||||||
arguments += ["-map", f"{videotrack}"]
|
arguments += ["-map", f"{videotrack}"]
|
||||||
if audiotrack:
|
if audiotrack:
|
||||||
arguments += ["-map", f"{audiotrack}"]
|
arguments += ["-map", f"{audiotrack}"]
|
||||||
if self.config.get("merge_subtitle"):
|
if merge_subtitle:
|
||||||
langs = _sublanguage(self.stream, self.config, self.subfixes)
|
langs = _sublanguage(self.stream, self.config, self.subfixes)
|
||||||
tracks = [x for x in [videotrack, audiotrack] if x]
|
tracks = [x for x in [videotrack, audiotrack] if x]
|
||||||
subs_nr = 0
|
subs_nr = 0
|
||||||
@ -135,7 +135,7 @@ class postprocess:
|
|||||||
os.remove(audio_filename)
|
os.remove(audio_filename)
|
||||||
|
|
||||||
# This if statement is for use cases where both -S and -M are specified to not only merge the subtitle but also store it separately.
|
# This if statement is for use cases where both -S and -M are specified to not only merge the subtitle but also store it separately.
|
||||||
if self.config.get("merge_subtitle") and not self.config.get("subtitle"):
|
if merge_subtitle and not self.config.get("subtitle"):
|
||||||
if self.subfixes and len(self.subfixes) >= 2 and self.config.get("get_all_subtitles"):
|
if self.subfixes and len(self.subfixes) >= 2 and self.config.get("get_all_subtitles"):
|
||||||
for subfix in self.subfixes:
|
for subfix in self.subfixes:
|
||||||
subfile = orig_filename.parent / (orig_filename.stem + "." + subfix + ".srt")
|
subfile = orig_filename.parent / (orig_filename.stem + "." + subfix + ".srt")
|
||||||
|
@ -14,7 +14,6 @@ class Aftonbladettv(Service):
|
|||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
data = self.get_urldata()
|
data = self.get_urldata()
|
||||||
|
|
||||||
match = re.search('data-player-config="([^"]+)"', data)
|
match = re.search('data-player-config="([^"]+)"', data)
|
||||||
if not match:
|
if not match:
|
||||||
match = re.search('data-svpPlayer-video="([^"]+)"', data)
|
match = re.search('data-svpPlayer-video="([^"]+)"', data)
|
||||||
@ -24,7 +23,27 @@ class Aftonbladettv(Service):
|
|||||||
yield ServiceError("Can't find video info")
|
yield ServiceError("Can't find video info")
|
||||||
return
|
return
|
||||||
data = json.loads(decode_html_entities(match.group(1)))
|
data = json.loads(decode_html_entities(match.group(1)))
|
||||||
yield from hlsparse(self.config, self.http.request("get", data["streamUrls"]["hls"]), data["streamUrls"]["hls"], output=self.output)
|
hdnea = self._login()
|
||||||
|
url = data["streamUrls"]["hls"] + hdnea if hdnea else data["streamUrls"]["hls"]
|
||||||
|
yield from hlsparse(config=self.config, res=self.http.request("get", url), url=url, output=self.output)
|
||||||
|
|
||||||
|
def _login(self):
|
||||||
|
if (token := self.config.get("token")) is None:
|
||||||
|
return None
|
||||||
|
if (match := re.search(r"^.*tv.aftonbladet.+video/([a-zA-Z0-9]+)/.*$", self.url)) is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
service = match.group(1)
|
||||||
|
res = self.http.request("get", f"https://svp-token-api.aftonbladet.se/svp/token/{service}?access=plus", headers={"x-sp-id": token})
|
||||||
|
if res.status_code != 200:
|
||||||
|
return None
|
||||||
|
expires = res.json()["expiry"]
|
||||||
|
hmac = res.json()["value"]
|
||||||
|
|
||||||
|
res = self.http.request("get", f"https://svp.vg.no/svp/token/v1/?vendor=ab&assetId={service}&expires={expires}&hmac={hmac}")
|
||||||
|
if res.status_code != 200:
|
||||||
|
return None
|
||||||
|
return f"?hdnea={res.text.replace('/', '%2F').replace('=', '%3D').replace(',', '%2C')}"
|
||||||
|
|
||||||
|
|
||||||
class Aftonbladet(Service):
|
class Aftonbladet(Service):
|
||||||
|
@ -3,7 +3,6 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
from svtplay_dl.error import ServiceError
|
from svtplay_dl.error import ServiceError
|
||||||
from svtplay_dl.fetcher.hls import hlsparse
|
from svtplay_dl.fetcher.hls import hlsparse
|
||||||
@ -31,6 +30,11 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
else:
|
else:
|
||||||
yield from hlsparse(self.config, res, match.group(1), output=self.output)
|
yield from hlsparse(self.config, res, match.group(1), output=self.output)
|
||||||
return
|
return
|
||||||
|
apimatch = re.search("CLIENT_SERVICE_URL='([^']+)", data)
|
||||||
|
if not apimatch:
|
||||||
|
yield ServiceError("Can't find api server.")
|
||||||
|
return
|
||||||
|
apiserver = apimatch.group(1)
|
||||||
janson = json.loads(match.group(1))
|
janson = json.loads(match.group(1))
|
||||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||||
resolution = None
|
resolution = None
|
||||||
@ -55,7 +59,7 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
|
|
||||||
offerlist = []
|
offerlist = []
|
||||||
for i in offers:
|
for i in offers:
|
||||||
if i["deliveryType"] == "Stream":
|
if i["deliveryType"] == "StreamOrDownload":
|
||||||
offerlist.append([i["scopes"][0], i["resolution"]])
|
offerlist.append([i["scopes"][0], i["resolution"]])
|
||||||
|
|
||||||
deviceid = uuid.uuid1()
|
deviceid = uuid.uuid1()
|
||||||
@ -73,10 +77,13 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
for i in offerlist:
|
for i in offerlist:
|
||||||
vid, resolution = i
|
vid, resolution = i
|
||||||
url = (
|
url = (
|
||||||
f"https://isl.dr-massive.com/api/account/items/{vid}/videos?delivery=stream&device=web_browser&"
|
f"{apiserver}/account/items/{vid}/videos?delivery=stream&device=web_browser&"
|
||||||
f"ff=idp%2Cldp&lang=da&resolution={resolution}&sub=Anonymous"
|
f"ff=idp%2Cldp&lang=da&resolution={resolution}&sub=Anonymous"
|
||||||
)
|
)
|
||||||
res = self.http.request("get", url, headers={"authorization": f"Bearer {token}"})
|
res = self.http.request("get", url, headers={"authorization": f"Bearer {token}"})
|
||||||
|
if res.status_code > 400:
|
||||||
|
yield ServiceError("Can't find the video or its geoblocked")
|
||||||
|
return
|
||||||
for video in res.json():
|
for video in res.json():
|
||||||
if video["accessService"] == "StandardVideo" and video["format"] == "video/hls":
|
if video["accessService"] == "StandardVideo" and video["format"] == "video/hls":
|
||||||
res = self.http.request("get", video["url"])
|
res = self.http.request("get", video["url"])
|
||||||
@ -93,26 +100,25 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
data = self.get_urldata()
|
data = self.get_urldata()
|
||||||
match = re.search("__data = ([^<]+)</script>", data)
|
match = re.search("__data = ([^<]+)</script>", data)
|
||||||
if not match:
|
if not match:
|
||||||
if "bonanza" in self.url:
|
logging.error("Can't find video info.")
|
||||||
parse = urlparse(self.url)
|
return episodes
|
||||||
match = re.search(r"(\/bonanza\/serie\/[0-9]+\/[\-\w]+)", parse.path)
|
|
||||||
if match:
|
|
||||||
match = re.findall(rf"a href=\"({match.group(1)}\/\d+[^\"]+)\"", data)
|
|
||||||
if not match:
|
|
||||||
logging.error("Can't find video info.")
|
|
||||||
for url in match:
|
|
||||||
episodes.append(f"https://www.dr.dk{url}")
|
|
||||||
else:
|
|
||||||
logging.error("Can't find video info.")
|
|
||||||
return episodes
|
|
||||||
else:
|
|
||||||
logging.error("Can't find video info.")
|
|
||||||
return episodes
|
|
||||||
janson = json.loads(match.group(1))
|
janson = json.loads(match.group(1))
|
||||||
|
|
||||||
|
if "/saeson/" in self.url:
|
||||||
|
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||||
|
for i in page["item"]["show"]["seasons"]["items"]:
|
||||||
|
seasons.append(f'https://www.dr.dk/drtv{i["path"]}')
|
||||||
|
|
||||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||||
|
|
||||||
if "show" in page["item"] and "seasons" in page["item"]["show"]:
|
if (
|
||||||
for i in page["item"]["show"]["seasons"]["items"]:
|
"item" in page["entries"][0]
|
||||||
|
and "season" in page["entries"][0]["item"]
|
||||||
|
and "show" in page["entries"][0]["item"]["season"]
|
||||||
|
and "seasons" in page["entries"][0]["item"]["season"]["show"]
|
||||||
|
):
|
||||||
|
for i in page["entries"][0]["item"]["season"]["show"]["seasons"]["items"]:
|
||||||
seasons.append(f'https://www.dr.dk/drtv{i["path"]}')
|
seasons.append(f'https://www.dr.dk/drtv{i["path"]}')
|
||||||
|
|
||||||
if seasons:
|
if seasons:
|
||||||
|
@ -114,13 +114,16 @@ class Svtplay(Service, MetadataThumbMixin):
|
|||||||
subfix = lang
|
subfix = lang
|
||||||
yield from subtitle_probe(copy.copy(self.config), i["url"], subfix=subfix, output=self.output)
|
yield from subtitle_probe(copy.copy(self.config), i["url"], subfix=subfix, output=self.output)
|
||||||
|
|
||||||
|
if not janson["videoReferences"]:
|
||||||
|
yield ServiceError("Media doesn't have any associated videos.")
|
||||||
|
return
|
||||||
drm = janson["rights"]["drmCopyProtection"]
|
drm = janson["rights"]["drmCopyProtection"]
|
||||||
if not drm and "variants" in janson and "default" in janson["variants"]:
|
if not drm and "variants" in janson and "default" in janson["variants"]:
|
||||||
if len(janson["variants"]["default"]["videoReferences"]) == 0:
|
if len(janson["videoReferences"]) == 0:
|
||||||
yield ServiceError("Media doesn't have any associated videos.")
|
yield ServiceError("Media doesn't have any associated videos.")
|
||||||
return
|
return
|
||||||
|
|
||||||
for videorfc in janson["variants"]["default"]["videoReferences"]:
|
for videorfc in janson["videoReferences"]:
|
||||||
params = {}
|
params = {}
|
||||||
special = False
|
special = False
|
||||||
params["manifestUrl"] = quote_plus(videorfc["url"])
|
params["manifestUrl"] = quote_plus(videorfc["url"])
|
||||||
|
@ -4,6 +4,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
from urllib.parse import parse_qs
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from svtplay_dl.error import ServiceError
|
from svtplay_dl.error import ServiceError
|
||||||
@ -54,7 +55,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
|
|
||||||
item = jansson["metadata"]
|
item = jansson["metadata"]
|
||||||
if item["isDrmProtected"]:
|
if item["isDrmProtected"]:
|
||||||
yield ServiceError("We can't download DRM protected content from this site.")
|
yield ServiceError("We can't download DRM protected content from this site. This isn't a svtplay-dl issue.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if item["isLive"]:
|
if item["isLive"]:
|
||||||
@ -107,6 +108,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
def find_all_episodes(self, config):
|
def find_all_episodes(self, config):
|
||||||
episodes = []
|
episodes = []
|
||||||
items = []
|
items = []
|
||||||
|
seasonq = None
|
||||||
|
|
||||||
parse = urlparse(self.url)
|
parse = urlparse(self.url)
|
||||||
if parse.path.startswith("/klipp"):
|
if parse.path.startswith("/klipp"):
|
||||||
@ -130,6 +132,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
episodes = self._graphlista(token, show)
|
episodes = self._graphlista(token, show)
|
||||||
return episodes
|
return episodes
|
||||||
|
|
||||||
|
query = parse_qs(parse.query)
|
||||||
|
seasonq = query.get("season", None)
|
||||||
showid, jansson, kind = self._get_seriesid(self.get_urldata(), dict())
|
showid, jansson, kind = self._get_seriesid(self.get_urldata(), dict())
|
||||||
if showid is None:
|
if showid is None:
|
||||||
logging.error("Cant find any videos")
|
logging.error("Cant find any videos")
|
||||||
@ -139,12 +143,20 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
return episodes
|
return episodes
|
||||||
if kind == "Movie":
|
if kind == "Movie":
|
||||||
return [f"https://www.tv4play.se/video/{showid}"]
|
return [f"https://www.tv4play.se/video/{showid}"]
|
||||||
|
|
||||||
jansson = self._graphdetails(token, showid)
|
jansson = self._graphdetails(token, showid)
|
||||||
for season in jansson["data"]["media"]["allSeasonLinks"]:
|
graph_list = None
|
||||||
graph_list = self._graphql(season["seasonId"])
|
for season in reversed(jansson["data"]["media"]["allSeasonLinks"]):
|
||||||
for i in graph_list:
|
if seasonq:
|
||||||
if i not in items:
|
if seasonq[0] == season["seasonId"]:
|
||||||
items.append(i)
|
graph_list = self._graphql(token, season["seasonId"])
|
||||||
|
else:
|
||||||
|
graph_list = self._graphql(token, season["seasonId"])
|
||||||
|
|
||||||
|
if graph_list:
|
||||||
|
for i in graph_list:
|
||||||
|
if i not in items:
|
||||||
|
items.append(i)
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
episodes.append(f"https://www.tv4play.se/video/{item}")
|
episodes.append(f"https://www.tv4play.se/video/{item}")
|
||||||
@ -197,8 +209,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
}
|
}
|
||||||
res = self.http.request(
|
res = self.http.request(
|
||||||
"post",
|
"post",
|
||||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||||
json=data,
|
json=data,
|
||||||
)
|
)
|
||||||
janson = res.json()
|
janson = res.json()
|
||||||
@ -241,13 +253,13 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
}
|
}
|
||||||
res = self.http.request(
|
res = self.http.request(
|
||||||
"post",
|
"post",
|
||||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||||
json=data,
|
json=data,
|
||||||
)
|
)
|
||||||
return res.json()
|
return res.json()
|
||||||
|
|
||||||
def _graphql(self, show):
|
def _graphql(self, token, show):
|
||||||
items = []
|
items = []
|
||||||
nr = 0
|
nr = 0
|
||||||
total = 100
|
total = 100
|
||||||
@ -260,8 +272,8 @@ class Tv4play(Service, OpenGraphThumbMixin):
|
|||||||
|
|
||||||
res = self.http.request(
|
res = self.http.request(
|
||||||
"post",
|
"post",
|
||||||
"https://client-gateway.tv4.a2d.tv/graphql",
|
"https://nordic-gateway.tv4.a2d.tv/graphql",
|
||||||
headers={"Client-Name": "tv4-web", "Client-Version": "4.0.0", "Content-Type": "application/json"},
|
headers={"Client-Name": "tv4-web", "Client-Version": "5.2.0", "Content-Type": "application/json", "Authorization": f"Bearer {token}"},
|
||||||
json=data,
|
json=data,
|
||||||
)
|
)
|
||||||
janson = res.json()
|
janson = res.json()
|
||||||
|
@ -236,25 +236,25 @@ class formatnameTest(unittest.TestCase):
|
|||||||
[
|
[
|
||||||
"{episodename}a{title}-{service}",
|
"{episodename}a{title}-{service}",
|
||||||
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||||
"episodenameatitle-service",
|
"episodenameatitle-service.mp4",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"{episodename}a{title}-{service}",
|
"{episodename}a{title}-{service}",
|
||||||
{"title": "title", "season": 99, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
{"title": "title", "season": 99, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||||
"episodenameatitle-service",
|
"episodenameatitle-service.mp4",
|
||||||
],
|
],
|
||||||
["{episodename}a{title}-{service}", {"title": "title", "season": 99, "id": "0xdeadface", "ext": "ext"}, "atitle-service"],
|
["{episodename}a{title}-{service}", {"title": "title", "season": 99, "id": "0xdeadface", "ext": "ext"}, "atitle-service.mp4"],
|
||||||
[
|
[
|
||||||
"{episodename}a{title}-{service}",
|
"{episodename}a{title}-{service}",
|
||||||
{"title": "title", "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
{"title": "title", "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||||
"episodenameatitle-service",
|
"episodenameatitle-service.mp4",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"{episodename}a{title}-{service}",
|
"{episodename}a{title}-{service}",
|
||||||
{"title": "title", "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
{"title": "title", "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||||
"episodenameatitle-service",
|
"episodenameatitle-service.mp4",
|
||||||
],
|
],
|
||||||
["{episodename}a{title}-{service}", {"title": "title", "episodename": "episodename", "id": "0xdeadface"}, "episodenameatitle-service"],
|
["{episodename}a{title}-{service}", {"title": "title", "episodename": "episodename", "id": "0xdeadface"}, "episodenameatitle-service.mp4"],
|
||||||
[
|
[
|
||||||
"{title}.{episode}.{episodename}-{id}-{service}.{ext}",
|
"{title}.{episode}.{episodename}-{id}-{service}.{ext}",
|
||||||
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
{"title": "title", "season": 99, "episode": 21, "episodename": "episodename", "id": "0xdeadface", "ext": "ext"},
|
||||||
|
@ -153,8 +153,9 @@ def get_one_media(stream):
|
|||||||
logging.info("No subtitles available")
|
logging.info("No subtitles available")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
merge_subtitle = False
|
||||||
if not stream.config.get("list_quality"):
|
if not stream.config.get("list_quality"):
|
||||||
subtitle_decider(stream, subtitles)
|
merge_subtitle = subtitle_decider(stream, subtitles)
|
||||||
if stream.config.get("force_subtitle"):
|
if stream.config.get("force_subtitle"):
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -219,6 +220,6 @@ def get_one_media(stream):
|
|||||||
if fstream.audio and not post.detect and fstream.finished:
|
if fstream.audio and not post.detect and fstream.finished:
|
||||||
logging.warning("Can't find ffmpeg/avconv. audio and video is in seperate files. if you dont want this use -P hls or hds")
|
logging.warning("Can't find ffmpeg/avconv. audio and video is in seperate files. if you dont want this use -P hls or hds")
|
||||||
if post.detect and fstream.config.get("no_merge") is False:
|
if post.detect and fstream.config.get("no_merge") is False:
|
||||||
post.merge()
|
post.merge(merge_subtitle)
|
||||||
else:
|
else:
|
||||||
logging.info("All done. Not postprocessing files, leaving them completely untouched.")
|
logging.info("All done. Not postprocessing files, leaving them completely untouched.")
|
||||||
|
@ -194,7 +194,10 @@ def _formatname(output, config):
|
|||||||
if key == "service" and output[key]:
|
if key == "service" and output[key]:
|
||||||
name = name.replace("{service}", output[key])
|
name = name.replace("{service}", output[key])
|
||||||
if key == "ext" and output[key]:
|
if key == "ext" and output[key]:
|
||||||
name = name.replace("{ext}", output[key])
|
if "{ext}" in name:
|
||||||
|
name = name.replace("{ext}", output[key])
|
||||||
|
else:
|
||||||
|
name = f"{name}.{output[key]}"
|
||||||
|
|
||||||
# Remove all {text} we cant replace with something
|
# Remove all {text} we cant replace with something
|
||||||
for item in re.findall(r"([\.\-]?(([^\.\-]+\w+)?\{[\w\-]+\}))", name):
|
for item in re.findall(r"([\.\-]?(([^\.\-]+\w+)?\{[\w\-]+\}))", name):
|
||||||
|
@ -119,8 +119,8 @@ def subtitle_decider(stream, subtitles):
|
|||||||
print(subtitles[0].url)
|
print(subtitles[0].url)
|
||||||
else:
|
else:
|
||||||
subtitles[0].download()
|
subtitles[0].download()
|
||||||
elif stream.config.get("merge_subtitle"):
|
return stream.config.get("merge_subtitle")
|
||||||
stream.config.set("merge_subtitle", False)
|
return False
|
||||||
|
|
||||||
|
|
||||||
def resolution(streams, resolutions: List) -> List:
|
def resolution(streams, resolutions: List) -> List:
|
||||||
|
Loading…
Reference in New Issue
Block a user