mirror of
https://github.com/spaam/svtplay-dl.git
synced 2024-11-24 12:15:40 +01:00
baa8d76551
self.get_urldata() is eqivalent to get_http_data(self.url), but also caches the data, so no additional requests are made if it is called multiple times (e.g when grabbing title or downloading thumbnail). Generic().get(url) still causes it to be fetched an extra time.
54 lines
1.6 KiB
Python
54 lines
1.6 KiB
Python
# pylint has issues with urlparse: "some types could not be inferred"
|
|
# pylint: disable=E1103
|
|
|
|
from __future__ import absolute_import
|
|
import sys
|
|
import re
|
|
import json
|
|
|
|
from svtplay_dl.utils.urllib import urlparse
|
|
from svtplay_dl.service import Service
|
|
|
|
from svtplay_dl.fetcher.rtmp import download_rtmp
|
|
from svtplay_dl.fetcher.hls import download_hls
|
|
from svtplay_dl.fetcher.http import download_http
|
|
|
|
from svtplay_dl.log import log
|
|
|
|
class Radioplay(Service):
|
|
supported_domains = ['radioplay.se']
|
|
|
|
def get(self, options):
|
|
match = re.search(r"liveStationsRedundancy = ({.*});</script>", self.get_urldata())
|
|
parse = urlparse(self.url)
|
|
station = parse.path[1:]
|
|
streams = None
|
|
if match:
|
|
data = json.loads(match.group(1))
|
|
for i in data["stations"]:
|
|
if station == i["name"].lower().replace(" ", ""):
|
|
streams = i["streams"]
|
|
break
|
|
else:
|
|
log.error("Can't find any streams.")
|
|
sys.exit(2)
|
|
if streams:
|
|
if options.hls:
|
|
try:
|
|
m3u8_url = streams["hls"]
|
|
download_hls(options, m3u8_url)
|
|
except KeyError:
|
|
log.error("Can't find any streams.")
|
|
sys.exit(2)
|
|
else:
|
|
try:
|
|
rtmp = streams["rtmp"]
|
|
download_rtmp(options, rtmp)
|
|
except KeyError:
|
|
mp3 = streams["mp3"]
|
|
download_http(options, mp3)
|
|
|
|
else:
|
|
log.error("Can't find any streams.")
|
|
sys.exit(2)
|