mirror of
https://github.com/spaam/svtplay-dl.git
synced 2024-11-27 05:34:15 +01:00
Compare commits
20 Commits
29392cc761
...
3b5da85615
Author | SHA1 | Date | |
---|---|---|---|
|
3b5da85615 | ||
|
98af383d48 | ||
|
4de27cc907 | ||
|
94dd52b28e | ||
|
9d7ec64cac | ||
|
e4e9d28e15 | ||
|
118638f979 | ||
|
8d159e795a | ||
|
6465b1d4da | ||
|
f4932e62e3 | ||
|
9939298ac8 | ||
|
347403e9e7 | ||
|
5420cff4ed | ||
|
d713d33e67 | ||
|
b01124a4ab | ||
|
471e4d6063 | ||
|
924cc0dd50 | ||
|
15a64cf198 | ||
|
9052eb3507 | ||
|
77149d1f7d |
20
.github/ISSUE_TEMPLATE.md
vendored
20
.github/ISSUE_TEMPLATE.md
vendored
@ -1,20 +0,0 @@
|
|||||||
<!--
|
|
||||||
If you are posting a question/feature requests, all prefilled information can be removed.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- BUG TEMPLATE -->
|
|
||||||
|
|
||||||
### svtplay-dl versions:
|
|
||||||
|
|
||||||
Run `svtplay-dl --version`
|
|
||||||
|
|
||||||
### Operating system and Python version:
|
|
||||||
|
|
||||||
Name and version of the operating system and python version (run `python --version`)
|
|
||||||
|
|
||||||
### What is the issue:
|
|
||||||
|
|
||||||
Always include the URL you want to download and all switches you are using.
|
|
||||||
You should also add `--verbose` because it makes it much easier for use to find the issue :)
|
|
||||||
|
|
||||||
svtplay-dl --verbose https://www.example.com
|
|
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
@ -8,11 +8,11 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
|
- {name: '3.12', python: '3.12', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
||||||
- {name: '3.11', python: '3.11', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
- {name: '3.11', python: '3.11', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
||||||
- {name: '3.10', python: '3.10', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
- {name: '3.10', python: '3.10', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
||||||
- {name: '3.9', python: '3.9', os: ubuntu-latest, architecture: 'x64', cibuild: "yes"}
|
- {name: '3.9', python: '3.9', os: ubuntu-latest, architecture: 'x64', cibuild: "yes"}
|
||||||
- {name: '3.8', python: '3.8', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
- {name: '3.8', python: '3.8', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
||||||
- {name: '3.7', python: '3.7', os: ubuntu-latest, architecture: 'x64', cibuild: "no"}
|
|
||||||
- {name: Windows, python: '3.8', os: windows-latest, architecture: 'x64', arch-cx: 'win-amd64', cx_name: 'amd64', cibuild: "yes"}
|
- {name: Windows, python: '3.8', os: windows-latest, architecture: 'x64', arch-cx: 'win-amd64', cx_name: 'amd64', cibuild: "yes"}
|
||||||
- {name: WindowsX86, python: '3.8', os: windows-latest, architecture: 'x86', arch-cx: 'win32', cx_name: 'win32', cibuild: "yes"}
|
- {name: WindowsX86, python: '3.8', os: windows-latest, architecture: 'x86', arch-cx: 'win32', cx_name: 'win32', cibuild: "yes"}
|
||||||
steps:
|
steps:
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# See https://pre-commit.com/hooks.html for more hooks
|
# See https://pre-commit.com/hooks.html for more hooks
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
@ -10,24 +10,24 @@ repos:
|
|||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
|
|
||||||
- repo: https://github.com/ambv/black
|
- repo: https://github.com/ambv/black
|
||||||
rev: 22.8.0
|
rev: 23.11.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
language_version: python3
|
language_version: python3
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
rev: 5.0.4
|
rev: 6.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.38.0
|
rev: v3.15.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args: [--py36-plus]
|
args: [--py38-plus]
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
rev: v3.8.2
|
rev: v3.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
- repo: https://github.com/asottile/add-trailing-comma
|
- repo: https://github.com/asottile/add-trailing-comma
|
||||||
rev: v2.2.3
|
rev: v3.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: add-trailing-comma
|
- id: add-trailing-comma
|
||||||
|
@ -86,7 +86,12 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||||||
dispcmd = str([command] + args)
|
dispcmd = str([command] + args)
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
[command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs
|
[command] + args,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||||
|
**popen_kwargs,
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@ -160,6 +160,8 @@ def adaptionset(attributes, elements, url, baseurl=None):
|
|||||||
codec = "h264"
|
codec = "h264"
|
||||||
elif codecs and codecs[:3] == "hvc":
|
elif codecs and codecs[:3] == "hvc":
|
||||||
codec = "hevc"
|
codec = "hevc"
|
||||||
|
elif codecs and codecs[:3] == "dvh":
|
||||||
|
codec = "dvhevc"
|
||||||
else:
|
else:
|
||||||
codec = codecs
|
codec = codecs
|
||||||
if not resolution and "maxWidth" in i.attrib and "maxHeight" in i.attrib:
|
if not resolution and "maxWidth" in i.attrib and "maxHeight" in i.attrib:
|
||||||
@ -304,7 +306,7 @@ def parse_duration(duration):
|
|||||||
def parse_dates(date_str):
|
def parse_dates(date_str):
|
||||||
match = re.search(r"(.*:.*)\.(\d{5,9})Z", date_str)
|
match = re.search(r"(.*:.*)\.(\d{5,9})Z", date_str)
|
||||||
if match:
|
if match:
|
||||||
date_str = f"{match.group(1)}.{int(int(match.group(2))/1000)}Z" # Need to translate nanoseconds to milliseconds
|
date_str = f"{match.group(1)}.{int(int(match.group(2)) / 1000)}Z" # Need to translate nanoseconds to milliseconds
|
||||||
date_patterns = ["%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%SZ"]
|
date_patterns = ["%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%SZ"]
|
||||||
dt = None
|
dt = None
|
||||||
for pattern in date_patterns:
|
for pattern in date_patterns:
|
||||||
@ -392,7 +394,6 @@ class DASH(VideoRetriever):
|
|||||||
file_d.write(data.content)
|
file_d.write(data.content)
|
||||||
eta = ETA(total_size)
|
eta = ETA(total_size)
|
||||||
while bytes_so_far < total_size:
|
while bytes_so_far < total_size:
|
||||||
|
|
||||||
if not self.config.get("silent"):
|
if not self.config.get("silent"):
|
||||||
eta.update(bytes_so_far)
|
eta.update(bytes_so_far)
|
||||||
progressbar(total_size, bytes_so_far, "".join(["ETA: ", str(eta)]))
|
progressbar(total_size, bytes_so_far, "".join(["ETA: ", str(eta)]))
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import binascii
|
import binascii
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
|
import struct
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
@ -114,6 +115,9 @@ def _hlsparse(config, text, url, output, **kwargs):
|
|||||||
vcodec = "hevc"
|
vcodec = "hevc"
|
||||||
if i["CODECS"][:3] == "avc":
|
if i["CODECS"][:3] == "avc":
|
||||||
vcodec = "h264"
|
vcodec = "h264"
|
||||||
|
if i["CODECS"][:3] == "dvh":
|
||||||
|
vcodec = "dvhevc"
|
||||||
|
|
||||||
if "AUDIO" in i:
|
if "AUDIO" in i:
|
||||||
audio_group = i["AUDIO"]
|
audio_group = i["AUDIO"]
|
||||||
urls = get_full_url(i["URI"], url)
|
urls = get_full_url(i["URI"], url)
|
||||||
@ -168,12 +172,17 @@ def _hlsparse(config, text, url, output, **kwargs):
|
|||||||
if subtitles:
|
if subtitles:
|
||||||
for sub in list(subtitles.keys()):
|
for sub in list(subtitles.keys()):
|
||||||
for n in subtitles[sub]:
|
for n in subtitles[sub]:
|
||||||
subfix = n[2]
|
subfix = n[1]
|
||||||
if len(subtitles[sub]) > 1:
|
if len(subtitles[sub]) > 1:
|
||||||
if subfix:
|
if subfix:
|
||||||
subfix = f"{n[1]}-caption"
|
subfix = f"{n[1]}-caption"
|
||||||
yield from subtitle_probe(
|
yield from subtitle_probe(
|
||||||
copy.copy(config), get_full_url(n[0], url), output=copy.copy(output), subfix=subfix, cookies=cookies, **kwargs
|
copy.copy(config),
|
||||||
|
get_full_url(n[0], url),
|
||||||
|
output=copy.copy(output),
|
||||||
|
subfix=subfix,
|
||||||
|
cookies=cookies,
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
elif m3u8.media_segment:
|
elif m3u8.media_segment:
|
||||||
@ -236,6 +245,8 @@ class HLS(VideoRetriever):
|
|||||||
total_duration = 0
|
total_duration = 0
|
||||||
duration = 0
|
duration = 0
|
||||||
max_duration = 0
|
max_duration = 0
|
||||||
|
key = None
|
||||||
|
key_iv = None
|
||||||
for index, i in enumerate(m3u8.media_segment):
|
for index, i in enumerate(m3u8.media_segment):
|
||||||
if "EXTINF" in i and "duration" in i["EXTINF"]:
|
if "EXTINF" in i and "duration" in i["EXTINF"]:
|
||||||
duration = i["EXTINF"]["duration"]
|
duration = i["EXTINF"]["duration"]
|
||||||
@ -253,11 +264,11 @@ class HLS(VideoRetriever):
|
|||||||
headers = {}
|
headers = {}
|
||||||
if "EXT-X-BYTERANGE" in i:
|
if "EXT-X-BYTERANGE" in i:
|
||||||
headers["Range"] = f'bytes={i["EXT-X-BYTERANGE"]["o"]}-{i["EXT-X-BYTERANGE"]["o"] + i["EXT-X-BYTERANGE"]["n"] - 1}'
|
headers["Range"] = f'bytes={i["EXT-X-BYTERANGE"]["o"]}-{i["EXT-X-BYTERANGE"]["o"] + i["EXT-X-BYTERANGE"]["n"] - 1}'
|
||||||
data = self.http.request("get", item, cookies=cookies, headers=headers)
|
resb = self.http.request("get", item, cookies=cookies, headers=headers)
|
||||||
if data.status_code == 404:
|
if resb.status_code == 404:
|
||||||
break
|
break
|
||||||
data = data.content
|
|
||||||
|
|
||||||
|
data = resb.content
|
||||||
if m3u8.encrypted:
|
if m3u8.encrypted:
|
||||||
headers = {}
|
headers = {}
|
||||||
if self.keycookie:
|
if self.keycookie:
|
||||||
@ -273,7 +284,10 @@ class HLS(VideoRetriever):
|
|||||||
if keyurl and keyurl[:4] == "skd:":
|
if keyurl and keyurl[:4] == "skd:":
|
||||||
raise HLSException(keyurl, "Can't decrypt beacuse of DRM")
|
raise HLSException(keyurl, "Can't decrypt beacuse of DRM")
|
||||||
key = self.http.request("get", keyurl, cookies=keycookies, headers=headers).content
|
key = self.http.request("get", keyurl, cookies=keycookies, headers=headers).content
|
||||||
iv = binascii.unhexlify(i["EXT-X-KEY"]["IV"][2:].zfill(32)) if "IV" in i["EXT-X-KEY"] else random_iv()
|
key_iv = binascii.unhexlify(i["EXT-X-KEY"]["IV"][2:].zfill(32)) if "IV" in i["EXT-X-KEY"] else None
|
||||||
|
|
||||||
|
if key:
|
||||||
|
iv = key_iv if key_iv else struct.pack(">8xq", index)
|
||||||
backend = default_backend()
|
backend = default_backend()
|
||||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
|
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
|
||||||
decryptor = cipher.decryptor()
|
decryptor = cipher.decryptor()
|
||||||
@ -281,11 +295,12 @@ class HLS(VideoRetriever):
|
|||||||
# In some cases the playlist say its encrypted but the files is not.
|
# In some cases the playlist say its encrypted but the files is not.
|
||||||
# This happen on svtplay 5.1ch stream where it started with ID3..
|
# This happen on svtplay 5.1ch stream where it started with ID3..
|
||||||
# Adding the other ones is header for mpeg-ts files. third byte is 10 or 11..
|
# Adding the other ones is header for mpeg-ts files. third byte is 10 or 11..
|
||||||
if data[:3] != b"ID3" and data[:3] != b"\x47\x40\x11" and data[:3] != b"\x47\x40\x10" and data[4:12] != b"ftypisom":
|
if data[:3] != b"ID3" and data[:3] != b"\x47\x40\x11" and data[:3] != b"\x47\x40\x10" and data[4:12] != b"ftyp":
|
||||||
if decryptor:
|
if decryptor:
|
||||||
data = decryptor.update(data)
|
data = _unpad(decryptor.update(data))
|
||||||
else:
|
else:
|
||||||
raise ValueError("No decryptor found for encrypted hls steam.")
|
if key:
|
||||||
|
raise ValueError("No decryptor found for encrypted hls steam.")
|
||||||
file_d.write(data)
|
file_d.write(data)
|
||||||
|
|
||||||
if self.config.get("capture_time") > 0 and total_duration >= self.config.get("capture_time") * 60:
|
if self.config.get("capture_time") > 0 and total_duration >= self.config.get("capture_time") * 60:
|
||||||
@ -321,3 +336,7 @@ class HLS(VideoRetriever):
|
|||||||
if not self.config.get("silent"):
|
if not self.config.get("silent"):
|
||||||
progress_stream.write("\n")
|
progress_stream.write("\n")
|
||||||
self.finished = True
|
self.finished = True
|
||||||
|
|
||||||
|
|
||||||
|
def _unpad(data):
|
||||||
|
return data[: -data[-1]]
|
||||||
|
@ -20,7 +20,6 @@ class M3U8:
|
|||||||
TAG_TYPES = {"MEDIA_SEGMENT": 0, "MEDIA_PLAYLIST": 1, "MASTER_PLAYLIST": 2}
|
TAG_TYPES = {"MEDIA_SEGMENT": 0, "MEDIA_PLAYLIST": 1, "MASTER_PLAYLIST": 2}
|
||||||
|
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
|
|
||||||
self.version = None
|
self.version = None
|
||||||
|
|
||||||
self.media_segment = []
|
self.media_segment = []
|
||||||
@ -55,7 +54,6 @@ class M3U8:
|
|||||||
if not l:
|
if not l:
|
||||||
continue
|
continue
|
||||||
elif l.startswith("#EXT"):
|
elif l.startswith("#EXT"):
|
||||||
|
|
||||||
info = {}
|
info = {}
|
||||||
tag, attr = _get_tag_attribute(l)
|
tag, attr = _get_tag_attribute(l)
|
||||||
if tag == "EXT-X-VERSION":
|
if tag == "EXT-X-VERSION":
|
||||||
@ -63,7 +61,6 @@ class M3U8:
|
|||||||
|
|
||||||
# 4.3.2. Media Segment Tags
|
# 4.3.2. Media Segment Tags
|
||||||
elif tag in M3U8.MEDIA_SEGMENT_TAGS:
|
elif tag in M3U8.MEDIA_SEGMENT_TAGS:
|
||||||
|
|
||||||
tag_type = M3U8.TAG_TYPES["MEDIA_SEGMENT"]
|
tag_type = M3U8.TAG_TYPES["MEDIA_SEGMENT"]
|
||||||
# 4.3.2.1. EXTINF
|
# 4.3.2.1. EXTINF
|
||||||
if tag == "EXTINF":
|
if tag == "EXTINF":
|
||||||
@ -124,7 +121,6 @@ class M3U8:
|
|||||||
|
|
||||||
# 4.3.3. Media Playlist Tags
|
# 4.3.3. Media Playlist Tags
|
||||||
elif tag in M3U8.MEDIA_PLAYLIST_TAGS:
|
elif tag in M3U8.MEDIA_PLAYLIST_TAGS:
|
||||||
|
|
||||||
tag_type = M3U8.TAG_TYPES["MEDIA_PLAYLIST"]
|
tag_type = M3U8.TAG_TYPES["MEDIA_PLAYLIST"]
|
||||||
# 4.3.3.1. EXT-X-TARGETDURATION
|
# 4.3.3.1. EXT-X-TARGETDURATION
|
||||||
if tag == "EXT-X-TARGETDURATION":
|
if tag == "EXT-X-TARGETDURATION":
|
||||||
@ -154,7 +150,6 @@ class M3U8:
|
|||||||
|
|
||||||
# 4.3.4. Master Playlist Tags
|
# 4.3.4. Master Playlist Tags
|
||||||
elif tag in M3U8.MASTER_PLAYLIST_TAGS:
|
elif tag in M3U8.MASTER_PLAYLIST_TAGS:
|
||||||
|
|
||||||
tag_type = M3U8.TAG_TYPES["MASTER_PLAYLIST"]
|
tag_type = M3U8.TAG_TYPES["MASTER_PLAYLIST"]
|
||||||
# 4.3.4.1. EXT-X-MEDIA
|
# 4.3.4.1. EXT-X-MEDIA
|
||||||
if tag == "EXT-X-MEDIA":
|
if tag == "EXT-X-MEDIA":
|
||||||
@ -185,7 +180,6 @@ class M3U8:
|
|||||||
|
|
||||||
# 4.3.5. Media or Master Playlist Tags
|
# 4.3.5. Media or Master Playlist Tags
|
||||||
elif tag in M3U8.MEDIA_OR_MASTER_PLAYLIST_TAGS:
|
elif tag in M3U8.MEDIA_OR_MASTER_PLAYLIST_TAGS:
|
||||||
|
|
||||||
tag_type = M3U8.TAG_TYPES["MEDIA_PLAYLIST"]
|
tag_type = M3U8.TAG_TYPES["MEDIA_PLAYLIST"]
|
||||||
# 4.3.5.1. EXT-X-INDEPENDENT-SEGMENTS
|
# 4.3.5.1. EXT-X-INDEPENDENT-SEGMENTS
|
||||||
if tag == "EXT-X-INDEPENDENT-SEGMENTS":
|
if tag == "EXT-X-INDEPENDENT-SEGMENTS":
|
||||||
|
@ -186,7 +186,8 @@ def _sublanguage(stream, config, subfixes):
|
|||||||
_ = parse(self)
|
_ = parse(self)
|
||||||
random_sentences = " ".join(sample(_, len(_) if len(_) < 8 else 8)).replace("\r\n", "")
|
random_sentences = " ".join(sample(_, len(_) if len(_) < 8 else 8)).replace("\r\n", "")
|
||||||
url = "https://svtplay-dl.se/langdetect/"
|
url = "https://svtplay-dl.se/langdetect/"
|
||||||
headers = {"User-Agent": f"{FIREFOX_UA} {platform.machine()}"}
|
bits = "64" if sys.maxsize > 2**32 else "32"
|
||||||
|
headers = {"User-Agent": f"{FIREFOX_UA} {platform.machine()} {platform.platform()} {bits}"}
|
||||||
try:
|
try:
|
||||||
r = post(url, json={"query": random_sentences}, headers=headers, timeout=30)
|
r = post(url, json={"query": random_sentences}, headers=headers, timeout=30)
|
||||||
if r.status_code == codes.ok:
|
if r.status_code == codes.ok:
|
||||||
|
@ -35,11 +35,24 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
page = janson["cache"]["page"][list(janson["cache"]["page"].keys())[0]]
|
||||||
resolution = None
|
resolution = None
|
||||||
vid = None
|
vid = None
|
||||||
|
|
||||||
|
if page["key"] != "Watch":
|
||||||
|
yield ServiceError("Wrong url, need to be video url")
|
||||||
|
return
|
||||||
if "item" in page["entries"][0]:
|
if "item" in page["entries"][0]:
|
||||||
offers = page["entries"][0]["item"]["offers"]
|
offers = page["entries"][0]["item"]["offers"]
|
||||||
elif "item" in page:
|
elif "item" in page:
|
||||||
offers = page["item"]["offers"]
|
offers = page["item"]["offers"]
|
||||||
|
|
||||||
|
self.output["id"] = page["entries"][0]["item"]["id"]
|
||||||
|
if "season" in page["entries"][0]["item"]:
|
||||||
|
self.output["title"] = page["entries"][0]["item"]["season"]["title"]
|
||||||
|
self.output["season"] = page["entries"][0]["item"]["season"]["seasonNumber"]
|
||||||
|
self.output["episode"] = page["entries"][0]["item"]["episodeNumber"]
|
||||||
|
self.output["episodename"] = page["entries"][0]["item"]["contextualTitle"]
|
||||||
|
elif "title" in page["entries"][0]["item"]:
|
||||||
|
self.output["title"] = page["entries"][0]["item"]["title"]
|
||||||
|
|
||||||
offerlist = []
|
offerlist = []
|
||||||
for i in offers:
|
for i in offers:
|
||||||
if i["deliveryType"] == "Stream":
|
if i["deliveryType"] == "Stream":
|
||||||
@ -70,7 +83,6 @@ class Dr(Service, OpenGraphThumbMixin):
|
|||||||
if res.status_code > 400:
|
if res.status_code > 400:
|
||||||
yield ServiceError("Can't play this because the video is geoblocked or not available.")
|
yield ServiceError("Can't play this because the video is geoblocked or not available.")
|
||||||
else:
|
else:
|
||||||
logging.info("suuubu")
|
|
||||||
yield from hlsparse(self.config, res, video["url"], output=self.output)
|
yield from hlsparse(self.config, res, video["url"], output=self.output)
|
||||||
if len(video["subtitles"]) > 0:
|
if len(video["subtitles"]) > 0:
|
||||||
yield from subtitle_probe(copy.copy(self.config), video["subtitles"][0]["link"], output=self.output)
|
yield from subtitle_probe(copy.copy(self.config), video["subtitles"][0]["link"], output=self.output)
|
||||||
|
@ -56,7 +56,6 @@ class Mtvnn(Service, OpenGraphThumbMixin):
|
|||||||
and xml.find("./video").find("item").find("rendition") is not None
|
and xml.find("./video").find("item").find("rendition") is not None
|
||||||
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
||||||
):
|
):
|
||||||
|
|
||||||
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
||||||
stream = hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|
stream = hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|
||||||
for key in list(stream.keys()):
|
for key in list(stream.keys()):
|
||||||
@ -144,6 +143,5 @@ class MtvMusic(Service, OpenGraphThumbMixin):
|
|||||||
and xml.find("./video").find("item").find("rendition") is not None
|
and xml.find("./video").find("item").find("rendition") is not None
|
||||||
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
||||||
):
|
):
|
||||||
|
|
||||||
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
||||||
yield from hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|
yield from hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
@ -12,6 +13,7 @@ from svtplay_dl.service import Service
|
|||||||
class Plutotv(Service, OpenGraphThumbMixin):
|
class Plutotv(Service, OpenGraphThumbMixin):
|
||||||
supported_domains = ["pluto.tv"]
|
supported_domains = ["pluto.tv"]
|
||||||
urlreg = r"/on-demand/(movies|series)/([^/]+)(/season/\d+/episode/([^/]+))?"
|
urlreg = r"/on-demand/(movies|series)/([^/]+)(/season/\d+/episode/([^/]+))?"
|
||||||
|
urlreg2 = r"/on-demand/(movies|series)/([^/]+)(/episode/([^/]+))?"
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
self.data = self.get_urldata()
|
self.data = self.get_urldata()
|
||||||
@ -24,30 +26,30 @@ class Plutotv(Service, OpenGraphThumbMixin):
|
|||||||
|
|
||||||
self.slug = urlmatch.group(2)
|
self.slug = urlmatch.group(2)
|
||||||
episodename = urlmatch.group(4)
|
episodename = urlmatch.group(4)
|
||||||
|
if episodename is None:
|
||||||
|
urlmatch = re.search(self.urlreg2, parse.path)
|
||||||
|
if not urlmatch:
|
||||||
|
yield ServiceError("Can't find what video it is or live is not supported")
|
||||||
|
return
|
||||||
|
self.slug = urlmatch.group(2)
|
||||||
|
episodename = urlmatch.group(4)
|
||||||
self._janson()
|
self._janson()
|
||||||
HLSplaylist = None
|
HLSplaylist = None
|
||||||
found = False
|
|
||||||
|
|
||||||
servicevod = f"https://service-vod.clusters.pluto.tv/v4/vod/slugs/{self.slug}"
|
for vod in self.janson["VOD"]:
|
||||||
res = self.http.request("get", servicevod, params=self.query, headers={"Authorization": f"Bearer {self.sessionToken}"})
|
self.output["title"] = vod["name"]
|
||||||
janson2 = res.json()
|
if "seasons" in vod:
|
||||||
if janson2["type"] == "series":
|
for season in vod["seasons"]:
|
||||||
self.output["title"] = janson2["name"]
|
if "episodes" in season:
|
||||||
for season in janson2["seasons"]:
|
for episode in season["episodes"]:
|
||||||
for episode in season["episodes"]:
|
if episode["_id"] == episodename:
|
||||||
if episode["slug"] == episodename and not found:
|
self.output["season"] = season["number"]
|
||||||
self.output["season"] = episode["season"]
|
self.output["episodename"] = episode["name"]
|
||||||
self.output["episode"] = episode["number"]
|
for stich in episode["stitched"]["paths"]:
|
||||||
for stich in episode["stitched"]["paths"]:
|
if stich["type"] == "hls":
|
||||||
if stich["type"] == "hls":
|
HLSplaylist = f"{self.mediaserver}{stich['path']}?{self.stitcherParams}"
|
||||||
HLSplaylist = f"{self.mediaserver}{stich['path']}?{self.stitcherParams}"
|
if self.http.request("get", HLSplaylist).status_code < 400:
|
||||||
if self.http.request("get", HLSplaylist, headers={"Authorization": f"Bearer {self.sessionToken}"}).status_code < 400:
|
break
|
||||||
found = True
|
|
||||||
else:
|
|
||||||
self.output["title"] == janson2["name"]
|
|
||||||
for stich in janson2["stitched"]["paths"]:
|
|
||||||
if stich["type"] == "hls":
|
|
||||||
HLSplaylist = f"{self.mediaserver}{stich['path']}?{self.stitcherParams}"
|
|
||||||
|
|
||||||
if not HLSplaylist:
|
if not HLSplaylist:
|
||||||
yield ServiceError("Can't find video info")
|
yield ServiceError("Can't find video info")
|
||||||
@ -66,6 +68,9 @@ class Plutotv(Service, OpenGraphThumbMixin):
|
|||||||
self.data = self.get_urldata()
|
self.data = self.get_urldata()
|
||||||
parse = urlparse(self.url)
|
parse = urlparse(self.url)
|
||||||
urlmatch = re.search(self.urlreg, parse.path)
|
urlmatch = re.search(self.urlreg, parse.path)
|
||||||
|
if urlmatch is None:
|
||||||
|
logging.error("Can't find what video it is or live is not supported")
|
||||||
|
return episodes
|
||||||
if urlmatch.group(1) != "series":
|
if urlmatch.group(1) != "series":
|
||||||
return episodes
|
return episodes
|
||||||
self.slug = urlmatch.group(2)
|
self.slug = urlmatch.group(2)
|
||||||
@ -74,13 +79,13 @@ class Plutotv(Service, OpenGraphThumbMixin):
|
|||||||
match = re.search(r"^/([^\/]+)/", parse.path)
|
match = re.search(r"^/([^\/]+)/", parse.path)
|
||||||
language = match.group(1)
|
language = match.group(1)
|
||||||
|
|
||||||
servicevod = f"https://service-vod.clusters.pluto.tv/v4/vod/slugs/{self.slug}"
|
for vod in self.janson["VOD"]:
|
||||||
res = self.http.request("get", servicevod, params=self.query, headers={"Authorization": f"Bearer {self.sessionToken}"})
|
if "seasons" in vod:
|
||||||
janson2 = res.json()
|
for season in vod["seasons"]:
|
||||||
for season in janson2["seasons"]:
|
seasonnr = season["number"]
|
||||||
seasonnr = season["number"]
|
if "episodes" in season:
|
||||||
for episode in season["episodes"]:
|
for episode in season["episodes"]:
|
||||||
episodes.append(f"https://pluto.tv/{language}/on-demand/series/{self.slug}/season/{seasonnr}/episode/{episode['slug']}")
|
episodes.append(f"https://pluto.tv/{language}/on-demand/series/{self.slug}/season/{seasonnr}/episode/{episode['_id']}")
|
||||||
return episodes
|
return episodes
|
||||||
|
|
||||||
def _janson(self) -> None:
|
def _janson(self) -> None:
|
||||||
@ -88,20 +93,20 @@ class Plutotv(Service, OpenGraphThumbMixin):
|
|||||||
self.query = {
|
self.query = {
|
||||||
"appName": "web",
|
"appName": "web",
|
||||||
"appVersion": self.appversion.group(1) if self.appversion else "na",
|
"appVersion": self.appversion.group(1) if self.appversion else "na",
|
||||||
"deviceVersion": "100.0.0",
|
"deviceVersion": "119.0.0",
|
||||||
"deviceModel": "web",
|
"deviceModel": "web",
|
||||||
"deviceMake": "firefox",
|
"deviceMake": "firefox",
|
||||||
"deviceType": "web",
|
"deviceType": "web",
|
||||||
"clientID": uuid.uuid1(),
|
"clientID": uuid.uuid1(),
|
||||||
"clientModelNumber": "1.0.0",
|
"clientModelNumber": "1.0.0",
|
||||||
"episodeSlugs": self.slug,
|
"seriesIDs": self.slug,
|
||||||
"serverSideAds": "false",
|
"serverSideAds": "false",
|
||||||
"constraints": "",
|
"constraints": "",
|
||||||
"drmCapabilities": "widevine%3AL3",
|
"drmCapabilities": "widevine%3AL3",
|
||||||
"clientTime": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
"clientTime": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||||
}
|
}
|
||||||
res = self.http.request("get", "https://boot.pluto.tv/v4/start", params=self.query)
|
res = self.http.request("get", "https://boot.pluto.tv/v4/start", params=self.query)
|
||||||
janson = res.json()
|
self.janson = res.json()
|
||||||
self.mediaserver = janson["servers"]["stitcher"]
|
self.mediaserver = self.janson["servers"]["stitcher"]
|
||||||
self.stitcherParams = janson["stitcherParams"]
|
self.stitcherParams = self.janson["stitcherParams"]
|
||||||
self.sessionToken = janson["sessionToken"]
|
self.sessionToken = self.janson["sessionToken"]
|
||||||
|
@ -14,7 +14,7 @@ class Svt(Svtplay):
|
|||||||
def get(self):
|
def get(self):
|
||||||
vid = None
|
vid = None
|
||||||
data = self.get_urldata()
|
data = self.get_urldata()
|
||||||
match = re.search("n.urqlState=(.*);", data)
|
match = re.search("urqlState = (.*);", data)
|
||||||
if not match:
|
if not match:
|
||||||
match = re.search(r"stateData = JSON.parse\(\"(.*)\"\)\<\/script", data)
|
match = re.search(r"stateData = JSON.parse\(\"(.*)\"\)\<\/script", data)
|
||||||
if not match:
|
if not match:
|
||||||
|
@ -119,8 +119,6 @@ class Svtplay(Service, MetadataThumbMixin):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for i in janson["videoReferences"]:
|
for i in janson["videoReferences"]:
|
||||||
if i["format"] == "hls-cmaf-full":
|
|
||||||
continue
|
|
||||||
if i["url"].find(".m3u8") > 0:
|
if i["url"].find(".m3u8") > 0:
|
||||||
yield from hlsparse(self.config, self.http.request("get", i["url"]), i["url"], output=self.output)
|
yield from hlsparse(self.config, self.http.request("get", i["url"]), i["url"], output=self.output)
|
||||||
elif i["url"].find(".mpd") > 0:
|
elif i["url"].find(".mpd") > 0:
|
||||||
|
@ -32,7 +32,6 @@ class Vimeo(Service, OpenGraphThumbMixin):
|
|||||||
player_data = self.http.request("get", player_url).text
|
player_data = self.http.request("get", player_url).text
|
||||||
|
|
||||||
if player_data:
|
if player_data:
|
||||||
|
|
||||||
jsondata = json.loads(player_data)
|
jsondata = json.loads(player_data)
|
||||||
|
|
||||||
if ("hls" in jsondata["request"]["files"]) and ("fastly_skyfire" in jsondata["request"]["files"]["hls"]["cdns"]):
|
if ("hls" in jsondata["request"]["files"]) and ("fastly_skyfire" in jsondata["request"]["files"]["hls"]["cdns"]):
|
||||||
|
@ -353,7 +353,7 @@ def _wrstsegments(entries: list, convert=False) -> str:
|
|||||||
time = 0
|
time = 0
|
||||||
subs = []
|
subs = []
|
||||||
for cont in entries:
|
for cont in entries:
|
||||||
cont = re.sub(r"\n\n\d+\n", "\n", cont) # remove sequence numbers
|
cont = re.sub(r"\n\n[-0-9a-f\d]+\n", "\n", cont) # remove sequence numbers
|
||||||
text = cont.split("\n")
|
text = cont.split("\n")
|
||||||
for t in text: # is in text[1] for tv4play, but this should be more future proof
|
for t in text: # is in text[1] for tv4play, but this should be more future proof
|
||||||
if "X-TIMESTAMP-MAP=MPEGTS" in t:
|
if "X-TIMESTAMP-MAP=MPEGTS" in t:
|
||||||
@ -479,7 +479,9 @@ def tt_text(node, data):
|
|||||||
|
|
||||||
|
|
||||||
def strdate(datestring):
|
def strdate(datestring):
|
||||||
match = re.search(r"^((\d+:\d+:\d+[\.,]*[0-9]*)?(\d+:\d+[\.,]*[0-9]*)?) --> ((\d+:\d+:\d+[\.,]*[0-9]*)?(\d+:\d+[\.,]*[0-9]*)?)$", datestring)
|
match = re.search(r"^((\d+:\d+:\d+[\.,]*[0-9]*)?(\d+:\d+[\.,]*[0-9]*)?) --> ((\d+:\d+:\d+[\.,]*[0-9]*)?(\d+:\d+[\.,]*[0-9]*)?)[ ]*", datestring)
|
||||||
|
if match and match.group(5) is None and match.group(6) is not None:
|
||||||
|
return None
|
||||||
return match
|
return match
|
||||||
|
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@ class streamSubtile(unittest.TestCase):
|
|||||||
subtitle(config, "wrst", "http://example.com", subfix="no"),
|
subtitle(config, "wrst", "http://example.com", subfix="no"),
|
||||||
]
|
]
|
||||||
subs = subtitle_filter(test_subs)
|
subs = subtitle_filter(test_subs)
|
||||||
assert len(subs) == 4
|
assert len(subs) == 3
|
||||||
|
|
||||||
def test_subtitleFilter3(self):
|
def test_subtitleFilter3(self):
|
||||||
config = setup_defaults()
|
config = setup_defaults()
|
||||||
@ -200,4 +200,4 @@ class streamSubtile(unittest.TestCase):
|
|||||||
subtitle(config, "wrst", "http://example.com", subfix="no"),
|
subtitle(config, "wrst", "http://example.com", subfix="no"),
|
||||||
]
|
]
|
||||||
subs = subtitle_filter(test_subs)
|
subs = subtitle_filter(test_subs)
|
||||||
assert len(subs) == 3
|
assert len(subs) == 2
|
||||||
|
@ -41,6 +41,9 @@ class HTTP(Session):
|
|||||||
if headers:
|
if headers:
|
||||||
for i in headers.keys():
|
for i in headers.keys():
|
||||||
self.headers[i] = headers[i]
|
self.headers[i] = headers[i]
|
||||||
|
else:
|
||||||
|
if "Range" in self.headers: # for some reason headers is always there for each request
|
||||||
|
del self.headers["Range"] # need to remove it because we dont want it
|
||||||
logging.debug("HTTP getting %r", url)
|
logging.debug("HTTP getting %r", url)
|
||||||
res = Session.request(self, method, url, verify=self.verify, proxies=self.proxy, *args, **kwargs)
|
res = Session.request(self, method, url, verify=self.verify, proxies=self.proxy, *args, **kwargs)
|
||||||
return res
|
return res
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import operator
|
||||||
|
import re
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
@ -10,6 +12,12 @@ from svtplay_dl.utils.http import HTTP
|
|||||||
DEFAULT_PROTOCOL_PRIO = ["dash", "hls", "http"]
|
DEFAULT_PROTOCOL_PRIO = ["dash", "hls", "http"]
|
||||||
LIVE_PROTOCOL_PRIO = ["hls", "dash", "http"]
|
LIVE_PROTOCOL_PRIO = ["hls", "dash", "http"]
|
||||||
DEFAULT_FORMAT_PRIO = ["h264", "h264-51"]
|
DEFAULT_FORMAT_PRIO = ["h264", "h264-51"]
|
||||||
|
OPERATORS = {
|
||||||
|
"<": operator.lt,
|
||||||
|
"<=": operator.le,
|
||||||
|
">": operator.gt,
|
||||||
|
">=": operator.ge,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def sort_quality(data) -> List:
|
def sort_quality(data) -> List:
|
||||||
@ -84,6 +92,8 @@ def subtitle_filter(subtitles) -> List:
|
|||||||
for sub in subtitles:
|
for sub in subtitles:
|
||||||
if sub.subfix not in languages:
|
if sub.subfix not in languages:
|
||||||
if all_subs:
|
if all_subs:
|
||||||
|
if sub.subfix is None:
|
||||||
|
continue
|
||||||
subs.append(sub)
|
subs.append(sub)
|
||||||
languages.append(sub.subfix)
|
languages.append(sub.subfix)
|
||||||
else:
|
else:
|
||||||
@ -101,13 +111,11 @@ def subtitle_decider(stream, subtitles):
|
|||||||
subtitles = subtitle_filter(subtitles)
|
subtitles = subtitle_filter(subtitles)
|
||||||
if stream.config.get("get_all_subtitles"):
|
if stream.config.get("get_all_subtitles"):
|
||||||
for sub in subtitles:
|
for sub in subtitles:
|
||||||
if stream.config.get("get_url"):
|
if sub.subfix:
|
||||||
print(sub.url)
|
if stream.config.get("get_url"):
|
||||||
else:
|
print(sub.url)
|
||||||
sub.download()
|
else:
|
||||||
if stream.config.get("merge_subtitle"):
|
sub.download()
|
||||||
if not sub.subfix:
|
|
||||||
stream.config.set("get_all_subtitles", False)
|
|
||||||
else:
|
else:
|
||||||
if stream.config.get("get_url"):
|
if stream.config.get("get_url"):
|
||||||
print(subtitles[0].url)
|
print(subtitles[0].url)
|
||||||
@ -121,8 +129,15 @@ def resolution(streams, resolutions: List) -> List:
|
|||||||
videos = []
|
videos = []
|
||||||
for stream in streams:
|
for stream in streams:
|
||||||
for resolution in resolutions:
|
for resolution in resolutions:
|
||||||
if stream.resolution.find("x") > 0 and stream.resolution.split("x")[1] == resolution:
|
match = re.match(r"(?P<op><=|>=|<|>)?(?P<res>[\d+]+)", resolution)
|
||||||
videos.append(stream)
|
op, res = match.group("op", "res")
|
||||||
|
if op:
|
||||||
|
op = OPERATORS.get(op, operator.eq)
|
||||||
|
if op(int(stream.resolution.split("x")[1]), int(res)):
|
||||||
|
videos.append(stream)
|
||||||
|
else:
|
||||||
|
if stream.resolution.find("x") > 0 and stream.resolution.split("x")[1] == resolution:
|
||||||
|
videos.append(stream)
|
||||||
return videos
|
return videos
|
||||||
|
|
||||||
|
|
||||||
|
11
setup.py
11
setup.py
@ -23,8 +23,8 @@ srcdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib/")
|
|||||||
sys.path.insert(0, srcdir)
|
sys.path.insert(0, srcdir)
|
||||||
|
|
||||||
vi = sys.version_info
|
vi = sys.version_info
|
||||||
if vi < (3, 6):
|
if vi < (3, 8):
|
||||||
raise RuntimeError("svtplay-dl requires Python 3.6 or greater")
|
raise RuntimeError("svtplay-dl requires Python 3.8 or greater")
|
||||||
|
|
||||||
about = {}
|
about = {}
|
||||||
with open(os.path.join(srcdir, "svtplay_dl", "__version__.py")) as f:
|
with open(os.path.join(srcdir, "svtplay_dl", "__version__.py")) as f:
|
||||||
@ -54,17 +54,18 @@ setup(
|
|||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
license="MIT",
|
license="MIT",
|
||||||
url="https://svtplay-dl.se",
|
url="https://svtplay-dl.se",
|
||||||
python_requires=">=3.6",
|
python_requires=">=3.8",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Operating System :: POSIX",
|
"Operating System :: POSIX",
|
||||||
"Operating System :: Microsoft :: Windows",
|
"Operating System :: Microsoft :: Windows",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.6",
|
|
||||||
"Programming Language :: Python :: 3.7",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
"Programming Language :: Python :: 3.8",
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
"Topic :: Internet :: WWW/HTTP",
|
"Topic :: Internet :: WWW/HTTP",
|
||||||
"Topic :: Multimedia :: Sound/Audio",
|
"Topic :: Multimedia :: Sound/Audio",
|
||||||
"Topic :: Multimedia :: Video",
|
"Topic :: Multimedia :: Video",
|
||||||
|
@ -14,5 +14,5 @@ with open(initfile) as fd:
|
|||||||
data = fd.read()
|
data = fd.read()
|
||||||
|
|
||||||
newstring = re.sub("(__version__ = get_version[^\n]+)", f'__version__ = "{version}"', data)
|
newstring = re.sub("(__version__ = get_version[^\n]+)", f'__version__ = "{version}"', data)
|
||||||
with open(initfile, "wt") as fd:
|
with open(initfile, "w") as fd:
|
||||||
fd.write(newstring)
|
fd.write(newstring)
|
||||||
|
@ -446,7 +446,12 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||||||
dispcmd = str([command] + args)
|
dispcmd = str([command] + args)
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
[command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs
|
[command] + args,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||||
|
**popen_kwargs,
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except OSError:
|
except OSError:
|
||||||
|
Loading…
Reference in New Issue
Block a user