2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2016-06-29 23:58:37 +02:00
|
|
|
import base64
|
2013-02-12 19:43:37 +01:00
|
|
|
import re
|
|
|
|
import json
|
2014-06-07 20:43:40 +02:00
|
|
|
import copy
|
2018-01-30 22:07:21 +01:00
|
|
|
from urllib.parse import urljoin, urlparse
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2014-01-26 01:51:53 +01:00
|
|
|
from svtplay_dl.service import Service, OpenGraphThumbMixin
|
2014-04-21 18:26:43 +02:00
|
|
|
from svtplay_dl.fetcher.rtmp import RTMP
|
2015-10-04 14:37:16 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2014-06-02 21:43:22 +02:00
|
|
|
from svtplay_dl.fetcher.hds import hdsparse
|
2015-03-01 21:46:22 +01:00
|
|
|
from svtplay_dl.subtitle import subtitle
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2018-01-30 20:11:37 +01:00
|
|
|
|
2014-01-26 01:51:53 +01:00
|
|
|
class Dr(Service, OpenGraphThumbMixin):
|
2014-01-01 14:57:17 +01:00
|
|
|
supported_domains = ['dr.dk']
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2015-08-30 00:06:20 +02:00
|
|
|
data = self.get_urldata()
|
2014-12-22 17:41:40 +01:00
|
|
|
|
2013-01-17 00:21:47 +01:00
|
|
|
match = re.search(r'resource:[ ]*"([^"]*)",', data)
|
2013-09-14 22:39:37 +02:00
|
|
|
if match:
|
|
|
|
resource_url = match.group(1)
|
2015-08-31 19:45:15 +02:00
|
|
|
resource_data = self.http.request("get", resource_url).content
|
2013-09-14 22:39:37 +02:00
|
|
|
resource = json.loads(resource_data)
|
2018-05-22 00:02:20 +02:00
|
|
|
streams = self.find_stream(self.config, resource)
|
2014-08-11 19:46:56 +02:00
|
|
|
for i in streams:
|
|
|
|
yield i
|
2013-09-14 21:58:55 +02:00
|
|
|
else:
|
2013-09-14 22:39:37 +02:00
|
|
|
match = re.search(r'resource="([^"]*)"', data)
|
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Cant find resource info for this video")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2015-10-08 09:52:36 +02:00
|
|
|
if match.group(1)[:4] != "http":
|
2017-10-09 22:35:13 +02:00
|
|
|
resource_url = "http:{0}".format(match.group(1))
|
2015-10-08 09:52:36 +02:00
|
|
|
else:
|
|
|
|
resource_url = match.group(1)
|
2015-09-01 00:37:06 +02:00
|
|
|
resource_data = self.http.request("get", resource_url).text
|
2013-09-14 22:39:37 +02:00
|
|
|
resource = json.loads(resource_data)
|
2013-09-14 21:58:55 +02:00
|
|
|
|
2015-10-08 09:54:07 +02:00
|
|
|
if "Links" not in resource:
|
|
|
|
yield ServiceError("Cant access this video. its geoblocked.")
|
|
|
|
return
|
2015-11-15 15:21:42 +01:00
|
|
|
if "SubtitlesList" in resource and len(resource["SubtitlesList"]) > 0:
|
2015-03-01 21:46:22 +01:00
|
|
|
suburl = resource["SubtitlesList"][0]["Uri"]
|
2018-05-08 22:46:11 +02:00
|
|
|
yield subtitle(copy.copy(self.config), "wrst", suburl)
|
2014-08-11 19:46:56 +02:00
|
|
|
if "Data" in resource:
|
2018-05-13 13:06:45 +02:00
|
|
|
streams = self.find_stream(self.config, resource)
|
2014-08-11 19:46:56 +02:00
|
|
|
for i in streams:
|
|
|
|
yield i
|
|
|
|
else:
|
|
|
|
for stream in resource['Links']:
|
|
|
|
if stream["Target"] == "HDS":
|
2018-05-13 13:06:45 +02:00
|
|
|
streams = hdsparse(copy.copy(self.config),
|
|
|
|
self.http.request("get", stream["Uri"], params={"hdcore": "3.7.0"}), stream["Uri"])
|
2014-10-12 23:31:02 +02:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|
2014-08-11 19:46:56 +02:00
|
|
|
if stream["Target"] == "HLS":
|
2018-05-13 13:06:45 +02:00
|
|
|
streams = hlsparse(self.config, self.http.request("get", stream["Uri"]), stream["Uri"])
|
2014-08-11 19:46:56 +02:00
|
|
|
for n in list(streams.keys()):
|
2015-10-04 14:37:16 +02:00
|
|
|
yield streams[n]
|
2014-08-11 19:46:56 +02:00
|
|
|
if stream["Target"] == "Streaming":
|
2018-05-13 13:06:45 +02:00
|
|
|
self.config.set("other", "-v -y '{0}'".format(stream['Uri'].replace("rtmp://vod.dr.dk/cms/", "")))
|
2014-08-11 19:46:56 +02:00
|
|
|
rtmp = "rtmp://vod.dr.dk/cms/"
|
2018-05-13 13:06:45 +02:00
|
|
|
yield RTMP(copy.copy(self.config), rtmp, stream['Bitrate'])
|
2014-08-11 19:46:56 +02:00
|
|
|
|
2018-05-22 00:02:20 +02:00
|
|
|
def find_all_episodes(self, config):
|
2016-06-29 23:58:37 +02:00
|
|
|
episodes = []
|
|
|
|
matches = re.findall(r'<button class="show-more" data-url="([^"]+)" data-partial="([^"]+)"',
|
|
|
|
self.get_urldata())
|
|
|
|
for encpath, enccomp in matches:
|
|
|
|
newstyle = '_' in encpath
|
|
|
|
if newstyle:
|
|
|
|
encbasepath = encpath.split('_')[0]
|
2018-01-13 20:27:40 +01:00
|
|
|
path = base64.b64decode(encbasepath + '===').decode('latin1')
|
2016-06-29 23:58:37 +02:00
|
|
|
else:
|
2018-01-13 20:27:40 +01:00
|
|
|
path = base64.b64decode(encpath + '===').decode('latin1')
|
2016-06-29 23:58:37 +02:00
|
|
|
|
|
|
|
if '/view/' in path:
|
|
|
|
continue
|
|
|
|
|
|
|
|
params = 'offset=0&limit=1000'
|
|
|
|
if newstyle:
|
2018-01-13 20:27:40 +01:00
|
|
|
encparams = base64.b64encode(params.encode('latin1')).decode('latin1').rstrip('=')
|
2017-10-09 22:35:13 +02:00
|
|
|
encpath = '{0}_{1}'.format(encbasepath, encparams)
|
2016-06-29 23:58:37 +02:00
|
|
|
else:
|
2017-10-09 22:35:13 +02:00
|
|
|
path = '{0}?{1}'.format(urlparse(path).path, params)
|
2018-01-13 20:27:40 +01:00
|
|
|
encpath = base64.b64encode(path.encode('latin1')).decode('latin1').rstrip('=')
|
2016-06-29 23:58:37 +02:00
|
|
|
|
|
|
|
url = urljoin('https://www.dr.dk/tv/partial/',
|
2017-10-09 22:35:13 +02:00
|
|
|
'{0}/{1}'.format(enccomp, encpath))
|
2018-01-13 20:27:40 +01:00
|
|
|
data = self.http.request('get', url).content.decode('latin1')
|
2016-06-29 23:58:37 +02:00
|
|
|
|
|
|
|
matches = re.findall(r'"program-link" href="([^"]+)">', data)
|
|
|
|
episodes = [urljoin('https://www.dr.dk/', url) for url in matches]
|
|
|
|
break
|
|
|
|
|
|
|
|
if not episodes:
|
|
|
|
prefix = '/'.join(urlparse(self.url).path.rstrip('/').split('/')[:-1])
|
|
|
|
matches = re.findall(r'"program-link" href="([^"]+)">', self.get_urldata())
|
|
|
|
episodes = [urljoin('https://www.dr.dk/', url)
|
|
|
|
for url in matches
|
|
|
|
if url.startswith(prefix)]
|
|
|
|
|
2018-05-22 00:02:20 +02:00
|
|
|
if config.get("all_last") != -1:
|
|
|
|
episodes = episodes[:config.get("all_last")]
|
2016-06-29 23:58:37 +02:00
|
|
|
else:
|
|
|
|
episodes.reverse()
|
|
|
|
|
|
|
|
return episodes
|
|
|
|
|
2018-05-13 13:06:45 +02:00
|
|
|
def find_stream(self, config, resource):
|
2015-09-01 00:37:06 +02:00
|
|
|
tempresource = resource['Data'][0]['Assets']
|
|
|
|
# To find the VideoResource, they have Images as well
|
|
|
|
for resources in tempresource:
|
|
|
|
if resources['Kind'] == 'VideoResource':
|
|
|
|
links = resources['Links']
|
|
|
|
break
|
|
|
|
for i in links:
|
|
|
|
if i["Target"] == "Ios" or i["Target"] == "HLS":
|
2018-05-21 00:56:22 +02:00
|
|
|
streams = hlsparse(config, self.http.request("get", i["Uri"]), i["Uri"], output=self.output)
|
2015-09-01 00:37:06 +02:00
|
|
|
for n in list(streams.keys()):
|
2015-10-04 14:37:16 +02:00
|
|
|
yield streams[n]
|
2015-09-01 00:37:06 +02:00
|
|
|
else:
|
|
|
|
if i["Target"] == "Streaming":
|
2018-05-13 13:06:45 +02:00
|
|
|
config.set("other", "-y '{0}'".format(i["Uri"].replace("rtmp://vod.dr.dk/cms/", "")))
|
2015-09-01 00:37:06 +02:00
|
|
|
rtmp = "rtmp://vod.dr.dk/cms/"
|
2018-05-21 00:56:22 +02:00
|
|
|
yield RTMP(copy.copy(config), rtmp, i["Bitrate"], output=self.output)
|