mirror of
https://github.com/spaam/svtplay-dl.git
synced 2024-11-27 13:44:14 +01:00
pyupgrade fixes
This commit is contained in:
parent
cd7fa336b1
commit
10e365f695
@ -15,16 +15,16 @@ repos:
|
||||
- id: black
|
||||
language_version: python3
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.4
|
||||
rev: 3.9.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.10.0
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v2.4.0
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
|
@ -72,5 +72,5 @@ def main():
|
||||
except KeyboardInterrupt:
|
||||
print("")
|
||||
except (yaml.YAMLError, yaml.MarkedYAMLError) as e:
|
||||
logging.error("Your settings file(s) contain invalid YAML syntax! Please fix and restart!, {}".format(str(e)))
|
||||
logging.error(f"Your settings file(s) contain invalid YAML syntax! Please fix and restart!, {str(e)}")
|
||||
sys.exit(2)
|
||||
|
@ -116,7 +116,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||
root = os.path.dirname(root) # up a level
|
||||
|
||||
if verbose:
|
||||
print("Tried directories {} but none started with prefix {}".format(str(rootdirs), parentdir_prefix))
|
||||
print(f"Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}")
|
||||
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||
|
||||
|
||||
|
@ -204,7 +204,7 @@ def dashparse(config, res, url, **kwargs):
|
||||
streams[0] = ServiceError(f"Can't read DASH playlist. {res.status_code}")
|
||||
return streams
|
||||
if len(res.text) < 1:
|
||||
streams[0] = ServiceError("Can't read DASH playlist. {}, size: {}".format(res.status_code, len(res.text)))
|
||||
streams[0] = ServiceError(f"Can't read DASH playlist. {res.status_code}, size: {len(res.text)}")
|
||||
return streams
|
||||
|
||||
return _dashparse(config, res.text, url, res.cookies, **kwargs)
|
||||
|
@ -70,7 +70,7 @@ class postprocess:
|
||||
]
|
||||
if self.subfixes and len(self.subfixes) >= 2:
|
||||
for subfix in self.subfixes:
|
||||
subfile = "{}.srt".format(name + subfix)
|
||||
subfile = f"{name + subfix}.srt"
|
||||
cmd += ["-i", subfile]
|
||||
else:
|
||||
subfile = f"{name}.srt"
|
||||
@ -86,7 +86,7 @@ class postprocess:
|
||||
logging.info("Muxing done, removing the old files.")
|
||||
if self.subfixes and len(self.subfixes) >= 2:
|
||||
for subfix in self.subfixes:
|
||||
subfile = "{}.srt".format(name + subfix)
|
||||
subfile = f"{name + subfix}.srt"
|
||||
os.remove(subfile)
|
||||
else:
|
||||
os.remove(subfile)
|
||||
@ -151,7 +151,7 @@ class postprocess:
|
||||
]
|
||||
if self.subfixes and len(self.subfixes) >= 2:
|
||||
for subfix in self.subfixes:
|
||||
subfile = "{}.srt".format(name + subfix)
|
||||
subfile = f"{name + subfix}.srt"
|
||||
cmd += ["-i", subfile]
|
||||
else:
|
||||
subfile = f"{name}.srt"
|
||||
@ -172,7 +172,7 @@ class postprocess:
|
||||
if self.config.get("merge_subtitle") and not self.config.get("subtitle"):
|
||||
if self.subfixes and len(self.subfixes) >= 2:
|
||||
for subfix in self.subfixes:
|
||||
subfile = "{}.srt".format(name + subfix)
|
||||
subfile = f"{name + subfix}.srt"
|
||||
os.remove(subfile)
|
||||
else:
|
||||
os.remove(subfile)
|
||||
@ -258,10 +258,10 @@ def _sublanguage(stream, config, subfixes):
|
||||
subfix = subfix.strip("-")
|
||||
langs += [exceptions[subfix]]
|
||||
continue
|
||||
subfile = "{}.srt".format(os.path.splitext(formatname(stream.output, config, stream.output_extention))[0] + subfix)
|
||||
subfile = f"{os.path.splitext(formatname(stream.output, config, stream.output_extention))[0] + subfix}.srt"
|
||||
langs += [query(subfile)]
|
||||
else:
|
||||
subfile = "{}.srt".format(os.path.splitext(formatname(stream.output, config, stream.output_extention))[0])
|
||||
subfile = f"{os.path.splitext(formatname(stream.output, config, stream.output_extention))[0]}.srt"
|
||||
langs += [query(subfile)]
|
||||
if len(langs) >= 2:
|
||||
logging.info("Language codes: " + ", ".join(langs))
|
||||
|
@ -43,7 +43,7 @@ class Aftonbladet(Service):
|
||||
try:
|
||||
janson = json.loads(match.group(1))
|
||||
except json.decoder.JSONDecodeError:
|
||||
yield ServiceError("Can't decode api request: {}".format(match.group(1)))
|
||||
yield ServiceError(f"Can't decode api request: {match.group(1)}")
|
||||
return
|
||||
|
||||
videos = self._get_video(janson)
|
||||
|
@ -38,7 +38,7 @@ class Dplay(Service):
|
||||
if not match:
|
||||
yield ServiceError("Can't detect 'kanaler'")
|
||||
return
|
||||
path = "/channels/{}".format(match.group(1))
|
||||
path = f"/channels/{match.group(1)}"
|
||||
url = f"https://disco-api.{self.domain}/content{path}"
|
||||
channel = True
|
||||
self.config.set("live", True)
|
||||
@ -47,7 +47,7 @@ class Dplay(Service):
|
||||
if not match:
|
||||
yield ServiceError("Can't find program url")
|
||||
return
|
||||
path = "/shows/{}".format(match.group(2))
|
||||
path = f"/shows/{match.group(2)}"
|
||||
url = f"https://disco-api.{self.domain}/content{path}"
|
||||
res = self.http.get(url, headers={"x-disco-client": "WEB:UNKNOWN:dplay-client:0.0.1"})
|
||||
programid = res.json()["data"]["id"]
|
||||
@ -70,7 +70,7 @@ class Dplay(Service):
|
||||
return
|
||||
else:
|
||||
match = re.search("(videos|videoer)/(.*)$", parse.path)
|
||||
url = "https://disco-api.{}/content/videos/{}".format(self.domain, match.group(2))
|
||||
url = f"https://disco-api.{self.domain}/content/videos/{match.group(2)}"
|
||||
res = self.http.get(url, headers={"x-disco-client": "WEB:UNKNOWN:dplay-client:0.0.1"})
|
||||
janson = res.json()
|
||||
if "errors" in janson:
|
||||
@ -142,7 +142,7 @@ class Dplay(Service):
|
||||
if self.domain in ["discoveryplus.no", "discoveryplus.dk"]:
|
||||
urllocal = "mer"
|
||||
|
||||
url = "http://disco-api.{}/cms/routes/program{}/{}?decorators=viewingHistory&include=default".format(self.domain, urllocal, match.group(2))
|
||||
url = f"http://disco-api.{self.domain}/cms/routes/program{urllocal}/{match.group(2)}?decorators=viewingHistory&include=default"
|
||||
res = self.http.get(url)
|
||||
if res.status_code > 400:
|
||||
logging.error("Cant find any videos. wrong url?")
|
||||
@ -200,7 +200,7 @@ class Dplay(Service):
|
||||
def _token(self) -> bool:
|
||||
# random device id for cookietoken
|
||||
deviceid = hashlib.sha256(bytes(int(random.random() * 1000))).hexdigest()
|
||||
url = "https://disco-api.{}/token?realm={}&deviceId={}&shortlived=true".format(self.domain, REALMS[self.domain], deviceid)
|
||||
url = f"https://disco-api.{self.domain}/token?realm={REALMS[self.domain]}&deviceId={deviceid}&shortlived=true"
|
||||
res = self.http.get(url)
|
||||
if res.status_code >= 400:
|
||||
return False
|
||||
|
@ -19,7 +19,7 @@ class Facebook(Service, OpenGraphThumbMixin):
|
||||
if not match:
|
||||
yield ServiceError("Cant find params info. video need to be public.")
|
||||
return
|
||||
data2 = json.loads('["{}"]'.format(match.group(1)))
|
||||
data2 = json.loads(f'["{match.group(1)}"]')
|
||||
data2 = json.loads(unquote_plus(data2[0]))
|
||||
if "sd_src_no_ratelimit" in data2["video_data"]["progressive"][0]:
|
||||
yield HTTP(copy.copy(self.config), data2["video_data"]["progressive"][0]["sd_src_no_ratelimit"], "240", output=self.output)
|
||||
|
@ -21,7 +21,7 @@ class Flowonline(Service, OpenGraphThumbMixin):
|
||||
return
|
||||
parse = urlparse(self.url)
|
||||
|
||||
url = "{}://{}{}".format(parse.scheme, parse.netloc, match.group(1))
|
||||
url = f"{parse.scheme}://{parse.netloc}{match.group(1)}"
|
||||
|
||||
data = self.http.get(url)
|
||||
|
||||
|
@ -28,7 +28,7 @@ class Lemonwhale(Service):
|
||||
for n in list(streams.keys()):
|
||||
yield streams[n]
|
||||
|
||||
url = "http://ljsp.lwcdn.com/web/public/video.json?id={}&delivery=hls".format(decode_html_entities(vid))
|
||||
url = f"http://ljsp.lwcdn.com/web/public/video.json?id={decode_html_entities(vid)}&delivery=hls"
|
||||
data = self.http.request("get", url).text
|
||||
jdata = json.loads(data)
|
||||
if "videos" in jdata:
|
||||
|
@ -127,7 +127,7 @@ class MtvMusic(Service, OpenGraphThumbMixin):
|
||||
try:
|
||||
janson = json.loads(match.group(1))
|
||||
except Exception:
|
||||
yield ServiceError("Can't decode api request: {}".format(match.group(1)))
|
||||
yield ServiceError(f"Can't decode api request: {match.group(1)}")
|
||||
return
|
||||
|
||||
parse = urlparse(self.url)
|
||||
|
@ -29,7 +29,7 @@ class Nrk(Service, OpenGraphThumbMixin):
|
||||
if not match:
|
||||
yield ServiceError("Cant find apiurl.")
|
||||
return
|
||||
dataurl = "{}/mediaelement/{}".format(match.group(1), video_id)
|
||||
dataurl = f"{match.group(1)}/mediaelement/{video_id}"
|
||||
data = self.http.request("get", dataurl).text
|
||||
data = json.loads(data)
|
||||
manifest_url = data["mediaUrl"]
|
||||
|
@ -126,7 +126,7 @@ class OppetArkiv(Service, OpenGraphThumbMixin):
|
||||
for match in regex.finditer(data):
|
||||
if n == self.config.get("all_last"):
|
||||
break
|
||||
episodes.append("http://www.oppetarkiv.se{}".format(match.group(1)))
|
||||
episodes.append(f"http://www.oppetarkiv.se{match.group(1)}")
|
||||
n += 1
|
||||
page += 1
|
||||
|
||||
|
@ -82,7 +82,7 @@ class Picsearch(Service, OpenGraphThumbMixin):
|
||||
if not match:
|
||||
match = re.search('iframe src="(//csp.screen9.com[^"]+)"', self.get_urldata())
|
||||
if match:
|
||||
url = "http:{}".format(match.group(1))
|
||||
url = f"http:{match.group(1)}"
|
||||
data = self.http.request("get", url)
|
||||
self.backupapi = url
|
||||
match = re.search(r"picsearch_ajax_auth = '([^']+)'", data.text)
|
||||
@ -111,7 +111,7 @@ class Picsearch(Service, OpenGraphThumbMixin):
|
||||
if not match:
|
||||
match = re.search('iframe src="(//csp.screen9.com[^"]+)"', self.get_urldata())
|
||||
if match:
|
||||
url = "http:{}".format(match.group(1))
|
||||
url = f"http:{match.group(1)}"
|
||||
data = self.http.request("get", url)
|
||||
match = re.search(r"mediaid: '([^']+)'", data.text)
|
||||
if not match:
|
||||
|
@ -19,7 +19,7 @@ class Pokemon(Service, OpenGraphThumbMixin):
|
||||
yield ServiceError("Cant county code")
|
||||
return
|
||||
|
||||
res = self.http.get("http://www.pokemon.com/api/pokemontv/channels?region={}".format(match.group(1)))
|
||||
res = self.http.get(f"http://www.pokemon.com/api/pokemontv/channels?region={match.group(1)}")
|
||||
janson = res.json()
|
||||
match = re.search('data-video-season="([0-9]+)"', data)
|
||||
season = match.group(1)
|
||||
|
@ -246,8 +246,8 @@ class Svtplay(Service, MetadataThumbMixin):
|
||||
if season_nr is None or episode_nr is None:
|
||||
return season, episode
|
||||
|
||||
season = "{:02d}".format(int(season_nr))
|
||||
episode = "{:02d}".format(int(episode_nr))
|
||||
season = f"{int(season_nr):02d}"
|
||||
episode = f"{int(episode_nr):02d}"
|
||||
|
||||
return season, episode
|
||||
|
||||
|
@ -21,7 +21,7 @@ class Youplay(Service, OpenGraphThumbMixin):
|
||||
yield ServiceError(f"Cant find video info for {self.url}")
|
||||
return
|
||||
|
||||
data = self.http.request("get", "http:{}".format(match.group(1)).content)
|
||||
data = self.http.request("get", f"http:{match.group(1)}".content)
|
||||
match = re.search(r'decodeURIComponent\("([^"]+)"\)\)', data)
|
||||
if not match:
|
||||
yield ServiceError("Can't decode video info")
|
||||
|
@ -149,7 +149,7 @@ class subtitle:
|
||||
line = ""
|
||||
for txt in text.itertext():
|
||||
line += f"{txt}"
|
||||
all += "{}\n".format(decode_html_entities(line.lstrip()))
|
||||
all += f"{decode_html_entities(line.lstrip())}\n"
|
||||
subs += "{}\n{} --> {}\n{}\n".format(n, timecolon(sub.attrib["TimeIn"]), timecolon(sub.attrib["TimeOut"]), all)
|
||||
subs = re.sub("&", r"&", subs)
|
||||
return subs
|
||||
@ -173,7 +173,7 @@ class subtitle:
|
||||
if sync:
|
||||
if int(sync.group(1)) != int(timea):
|
||||
if data and data != " ":
|
||||
subs += "{}\n{} --> {}\n".format(number, timestr(timea), timestr(sync.group(1)))
|
||||
subs += f"{number}\n{timestr(timea)} --> {timestr(sync.group(1))}\n"
|
||||
text = "%s\n" % TAG_RE.sub("", data.replace("<br>", "\n"))
|
||||
text = decode_html_entities(text)
|
||||
if text[len(text) - 2] != "\n":
|
||||
@ -310,7 +310,7 @@ class subtitle:
|
||||
ha = strdate(subs[-1][0])
|
||||
ha3 = strdate(item)
|
||||
second = str2sec(ha3.group(2)) + time
|
||||
subs[-1][0] = "{} --> {}".format(ha.group(1), sec2str(second))
|
||||
subs[-1][0] = f"{ha.group(1)} --> {sec2str(second)}"
|
||||
skip = True
|
||||
pre_date_skip = False
|
||||
continue
|
||||
@ -322,7 +322,7 @@ class subtitle:
|
||||
skip = False
|
||||
first = str2sec(has_date.group(1)) + time
|
||||
second = str2sec(has_date.group(2)) + time
|
||||
sub.append("{} --> {}".format(sec2str(first), sec2str(second)))
|
||||
sub.append(f"{sec2str(first)} --> {sec2str(second)}")
|
||||
several_items = True
|
||||
pre_date_skip = False
|
||||
elif has_date is None and skip is False and pre_date_skip is False:
|
||||
@ -420,7 +420,7 @@ def timestr(msec):
|
||||
|
||||
def timecolon(data):
|
||||
match = re.search(r"(\d+:\d+:\d+):(\d+)", data)
|
||||
return "{},{}".format(match.group(1), match.group(2))
|
||||
return f"{match.group(1)},{match.group(2)}"
|
||||
|
||||
|
||||
def norm(name):
|
||||
@ -452,7 +452,7 @@ def strdate(datestring):
|
||||
def sec2str(seconds):
|
||||
m, s = divmod(seconds, 60)
|
||||
h, m = divmod(m, 60)
|
||||
return "{:02d}:{:02d}:{:06.3f}".format(int(h), int(m), s)
|
||||
return f"{int(h):02d}:{int(m):02d}:{s:06.3f}"
|
||||
|
||||
|
||||
def str2sec(string):
|
||||
|
@ -192,7 +192,7 @@ def get_one_media(stream):
|
||||
errormsg = None
|
||||
for exc in error:
|
||||
if errormsg:
|
||||
errormsg = "{}. {}".format(errormsg, str(exc))
|
||||
errormsg = f"{errormsg}. {str(exc)}"
|
||||
else:
|
||||
errormsg = str(exc)
|
||||
if errormsg:
|
||||
|
@ -72,7 +72,7 @@ def get_full_url(url, srcurl):
|
||||
return url
|
||||
if url[0] == "/":
|
||||
baseurl = re.search(r"^(http[s]{0,1}://[^/]+)/", srcurl)
|
||||
return "{}{}".format(baseurl.group(1), url)
|
||||
return f"{baseurl.group(1)}{url}"
|
||||
|
||||
# remove everything after last / in the path of the URL
|
||||
baseurl = re.sub(r"^([^\?]+)/[^/]*(\?.*)?$", r"\1/", srcurl)
|
||||
|
@ -158,10 +158,10 @@ def _formatname(output, config, extension):
|
||||
if key == "title" and output[key]:
|
||||
name = name.replace("{title}", filenamify(output[key]))
|
||||
if key == "season" and output[key]:
|
||||
number = "{:02d}".format(int(output[key]))
|
||||
number = f"{int(output[key]):02d}"
|
||||
name = name.replace("{season}", number)
|
||||
if key == "episode" and output[key]:
|
||||
number = "{:02d}".format(int(output[key]))
|
||||
number = f"{int(output[key]):02d}"
|
||||
name = name.replace("{episode}", number)
|
||||
if key == "episodename" and output[key]:
|
||||
name = name.replace("{episodename}", filenamify(output[key]))
|
||||
|
@ -120,7 +120,7 @@ def select_quality(config, streams):
|
||||
# If none remains, the bitrate filtering was too tight.
|
||||
if len(wanted) == 0:
|
||||
data = sort_quality(streams)
|
||||
quality = ", ".join("{} ({})".format(str(x), str(y)) for x, y in data)
|
||||
quality = ", ".join(f"{str(x)} ({str(y)})" for x, y in data)
|
||||
raise error.UIException("Can't find that quality. Try one of: %s (or " "try --flexible-quality)" % quality)
|
||||
|
||||
http = HTTP(config)
|
||||
|
@ -324,7 +324,7 @@ def get_root():
|
||||
me_dir = os.path.normcase(os.path.splitext(me)[0])
|
||||
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
|
||||
if me_dir != vsr_dir:
|
||||
print("Warning: build in {} is using versioneer.py from {}".format(os.path.dirname(me), versioneer_py))
|
||||
print(f"Warning: build in {os.path.dirname(me)} is using versioneer.py from {versioneer_py}")
|
||||
except NameError:
|
||||
pass
|
||||
return root
|
||||
@ -1159,7 +1159,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||
root = os.path.dirname(root) # up a level
|
||||
|
||||
if verbose:
|
||||
print("Tried directories {} but none started with prefix {}".format(str(rootdirs), parentdir_prefix))
|
||||
print(f"Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}")
|
||||
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user