2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2014-06-07 20:48:44 +02:00
|
|
|
import copy
|
2019-08-25 00:40:39 +02:00
|
|
|
import json
|
|
|
|
import re
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2014-04-27 13:24:44 +02:00
|
|
|
from svtplay_dl.fetcher.hds import hdsparse
|
2015-10-04 14:37:16 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.service import OpenGraphThumbMixin
|
|
|
|
from svtplay_dl.service import Service
|
2014-08-31 01:20:36 +02:00
|
|
|
from svtplay_dl.subtitle import subtitle
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2014-01-26 01:51:35 +01:00
|
|
|
class Nrk(Service, OpenGraphThumbMixin):
|
2019-08-25 00:27:31 +02:00
|
|
|
supported_domains = ["nrk.no", "tv.nrk.no", "p3.no", "tv.nrksuper.no"]
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2016-06-09 22:21:11 +02:00
|
|
|
# First, fint the video ID from the html document
|
2021-04-02 14:06:56 +02:00
|
|
|
match = re.search('program-id" content="([^"]+)"', self.get_urldata())
|
2016-10-06 22:51:54 +02:00
|
|
|
if match:
|
|
|
|
video_id = match.group(1)
|
|
|
|
else:
|
2016-06-09 22:21:11 +02:00
|
|
|
yield ServiceError("Can't find video id.")
|
|
|
|
return
|
2014-09-21 19:12:17 +02:00
|
|
|
|
2016-06-09 22:21:11 +02:00
|
|
|
# Get media element details
|
2021-04-02 14:06:56 +02:00
|
|
|
match = re.search('psapi-base-url="([^"]+)"', self.get_urldata())
|
2017-05-07 15:19:52 +02:00
|
|
|
if not match:
|
|
|
|
yield ServiceError("Cant find apiurl.")
|
|
|
|
return
|
2019-08-25 00:33:51 +02:00
|
|
|
dataurl = "{}/mediaelement/{}".format(match.group(1), video_id)
|
2016-06-09 22:21:11 +02:00
|
|
|
data = self.http.request("get", dataurl).text
|
|
|
|
data = json.loads(data)
|
|
|
|
manifest_url = data["mediaUrl"]
|
2018-05-13 13:06:45 +02:00
|
|
|
self.config.set("live", data["isLive"])
|
2016-09-19 20:39:27 +02:00
|
|
|
if manifest_url is None:
|
|
|
|
yield ServiceError(data["messageType"])
|
|
|
|
return
|
2016-06-09 22:21:11 +02:00
|
|
|
# Check if subtitles are available
|
|
|
|
if data["subtitlesUrlPath"]:
|
2019-09-06 22:49:49 +02:00
|
|
|
yield subtitle(copy.copy(self.config), "tt", data["subtitlesUrlPath"], output=self.output)
|
2014-06-07 20:48:44 +02:00
|
|
|
|
2019-09-06 22:49:49 +02:00
|
|
|
hlsurl = manifest_url.replace("/z/", "/i/").replace("manifest.f4m", "master.m3u8")
|
2015-09-01 00:23:19 +02:00
|
|
|
data = self.http.request("get", hlsurl)
|
|
|
|
if data.status_code == 403:
|
2016-06-09 22:21:11 +02:00
|
|
|
yield ServiceError("Can't fetch the video because of geoblocking")
|
2015-09-01 00:23:19 +02:00
|
|
|
return
|
2018-05-21 00:56:22 +02:00
|
|
|
streams = hlsparse(self.config, data, hlsurl, output=self.output)
|
2018-01-09 22:32:36 +01:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|
2014-06-07 20:48:44 +02:00
|
|
|
|
2021-04-02 14:06:56 +02:00
|
|
|
else:
|
|
|
|
streams = hdsparse(
|
|
|
|
copy.copy(self.config),
|
|
|
|
self.http.request("get", manifest_url, params={"hdcore": "3.7.0"}),
|
|
|
|
manifest_url,
|
|
|
|
output=self.output,
|
|
|
|
)
|
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|