2014-08-11 23:20:17 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
import re
|
|
|
|
import json
|
|
|
|
import copy
|
|
|
|
|
|
|
|
from svtplay_dl.service import Service, OpenGraphThumbMixin
|
2014-12-08 23:07:02 +01:00
|
|
|
from svtplay_dl.utils.urllib import urlparse
|
2014-08-11 23:20:17 +02:00
|
|
|
from svtplay_dl.fetcher.http import HTTP
|
2015-10-04 14:37:16 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2014-08-11 23:20:17 +02:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2014-08-11 23:20:17 +02:00
|
|
|
class Dbtv(Service, OpenGraphThumbMixin):
|
|
|
|
supported_domains = ['dbtv.no']
|
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2015-08-30 00:06:20 +02:00
|
|
|
data = self.get_urldata()
|
2014-12-22 17:41:40 +01:00
|
|
|
|
2016-05-14 22:54:30 +02:00
|
|
|
if self.exclude():
|
2015-09-06 23:04:48 +02:00
|
|
|
yield ServiceError("Excluding video")
|
2014-12-22 17:41:40 +01:00
|
|
|
return
|
|
|
|
|
2014-08-11 23:20:17 +02:00
|
|
|
parse = urlparse(self.url)
|
|
|
|
vidoid = parse.path[parse.path.rfind("/")+1:]
|
|
|
|
match = re.search(r'JSONdata = ({.*});', data)
|
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Cant find json data")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2014-08-11 23:20:17 +02:00
|
|
|
janson = json.loads(match.group(1))
|
|
|
|
playlist = janson["playlist"]
|
|
|
|
for i in playlist:
|
2015-11-15 12:46:58 +01:00
|
|
|
if i["brightcoveId"] == int(vidoid):
|
2014-08-11 23:20:17 +02:00
|
|
|
if i["HLSURL"]:
|
2015-12-26 11:46:14 +01:00
|
|
|
streams = hlsparse(self.options, self.http.request("get", i["HLSURL"]), i["HLSURL"])
|
2014-08-11 23:20:17 +02:00
|
|
|
for n in list(streams.keys()):
|
2015-10-04 14:37:16 +02:00
|
|
|
yield streams[n]
|
2014-08-11 23:20:17 +02:00
|
|
|
for n in i["renditions"]:
|
|
|
|
if n["container"] == "MP4":
|
2015-12-26 11:46:14 +01:00
|
|
|
yield HTTP(copy.copy(self.options), n["URL"], int(n["rate"])/1000)
|