2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-02-12 19:43:37 +01:00
|
|
|
import re
|
2014-01-26 01:50:54 +01:00
|
|
|
import json
|
2014-06-07 20:48:44 +02:00
|
|
|
import copy
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2014-01-26 01:51:35 +01:00
|
|
|
from svtplay_dl.service import Service, OpenGraphThumbMixin
|
2014-04-27 13:24:44 +02:00
|
|
|
from svtplay_dl.fetcher.hds import hdsparse
|
2015-10-04 14:37:16 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2014-08-31 01:20:36 +02:00
|
|
|
from svtplay_dl.subtitle import subtitle
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2014-01-26 01:51:35 +01:00
|
|
|
class Nrk(Service, OpenGraphThumbMixin):
|
2016-03-15 21:00:44 +01:00
|
|
|
supported_domains = ['nrk.no', 'tv.nrk.no', 'p3.no', 'tv.nrksuper.no']
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2016-06-09 22:21:11 +02:00
|
|
|
# First, fint the video ID from the html document
|
2017-05-07 15:19:52 +02:00
|
|
|
match = re.search("programId: \"([^\"]+)\"", self.get_urldata())
|
2016-10-06 22:51:54 +02:00
|
|
|
if match:
|
|
|
|
video_id = match.group(1)
|
|
|
|
else:
|
2016-06-09 22:21:11 +02:00
|
|
|
yield ServiceError("Can't find video id.")
|
|
|
|
return
|
2014-09-21 19:12:17 +02:00
|
|
|
|
2016-06-09 22:21:11 +02:00
|
|
|
# Get media element details
|
2017-05-07 15:19:52 +02:00
|
|
|
match = re.search("apiBaseUrl: '([^']+)'", self.get_urldata())
|
|
|
|
if not match:
|
|
|
|
yield ServiceError("Cant find apiurl.")
|
|
|
|
return
|
|
|
|
dataurl = "{0}/mediaelement/{1}".format(match.group(1), video_id)
|
2016-06-09 22:21:11 +02:00
|
|
|
data = self.http.request("get", dataurl).text
|
|
|
|
data = json.loads(data)
|
|
|
|
manifest_url = data["mediaUrl"]
|
2018-05-13 13:06:45 +02:00
|
|
|
self.config.set("live", data["isLive"])
|
2016-09-19 20:39:27 +02:00
|
|
|
if manifest_url is None:
|
|
|
|
yield ServiceError(data["messageType"])
|
|
|
|
return
|
2016-06-09 22:21:11 +02:00
|
|
|
# Check if subtitles are available
|
|
|
|
if data["subtitlesUrlPath"]:
|
2018-05-08 22:46:11 +02:00
|
|
|
yield subtitle(copy.copy(self.config), "tt", data["subtitlesUrlPath"])
|
2014-06-07 20:48:44 +02:00
|
|
|
|
|
|
|
hlsurl = manifest_url.replace("/z/", "/i/").replace("manifest.f4m", "master.m3u8")
|
2015-09-01 00:23:19 +02:00
|
|
|
data = self.http.request("get", hlsurl)
|
|
|
|
if data.status_code == 403:
|
2016-06-09 22:21:11 +02:00
|
|
|
yield ServiceError("Can't fetch the video because of geoblocking")
|
2015-09-01 00:23:19 +02:00
|
|
|
return
|
2018-05-21 00:56:22 +02:00
|
|
|
streams = hlsparse(self.config, data, hlsurl, output=self.output)
|
2018-01-09 22:32:36 +01:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|
2014-06-07 20:48:44 +02:00
|
|
|
|
2018-05-13 13:06:45 +02:00
|
|
|
streams = hdsparse(copy.copy(self.config), self.http.request("get", manifest_url, params={"hdcore": "3.7.0"}),
|
2018-05-21 00:56:22 +02:00
|
|
|
manifest_url, output=self.output)
|
2014-10-12 23:31:02 +02:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|