2015-09-10 23:40:48 +02:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
from __future__ import absolute_import
|
2019-08-25 00:40:39 +02:00
|
|
|
|
2015-09-10 23:40:48 +02:00
|
|
|
import re
|
2016-03-20 21:04:09 +01:00
|
|
|
import xml.etree.ElementTree as ET
|
2018-01-30 22:07:21 +01:00
|
|
|
from urllib.parse import urlparse
|
2015-09-10 23:40:48 +02:00
|
|
|
|
|
|
|
from svtplay_dl.error import ServiceError
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
|
|
|
from svtplay_dl.service import Service
|
2015-09-10 23:40:48 +02:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2015-09-10 23:40:48 +02:00
|
|
|
class Solidtango(Service):
|
2019-08-25 00:27:31 +02:00
|
|
|
supported_domains_re = [r"^([^.]+\.)*solidtango.com"]
|
|
|
|
supported_domains = ["mm-resource-service.herokuapp.com", "solidtango.com"]
|
2015-09-10 23:40:48 +02:00
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2015-09-10 23:40:48 +02:00
|
|
|
data = self.get_urldata()
|
|
|
|
|
2016-02-19 21:29:49 +01:00
|
|
|
match = re.search('src="(http://mm-resource-service.herokuapp.com[^"]*)"', data)
|
|
|
|
if match:
|
|
|
|
data = self.http.request("get", match.group(1)).text
|
|
|
|
match = re.search('src="(https://[^"]+solidtango[^"]+)" ', data)
|
|
|
|
if match:
|
|
|
|
data = self.http.request("get", match.group(1)).text
|
2019-08-25 00:27:31 +02:00
|
|
|
match = re.search(r"<title>(http[^<]+)</title>", data)
|
2015-09-10 23:40:48 +02:00
|
|
|
if match:
|
|
|
|
data = self.http.request("get", match.group(1)).text
|
|
|
|
|
2019-08-25 00:27:31 +02:00
|
|
|
match = re.search("is_livestream: true", data)
|
2017-01-27 01:19:47 +01:00
|
|
|
if match:
|
2018-05-13 13:06:45 +02:00
|
|
|
self.config.set("live", True)
|
2019-08-25 00:27:31 +02:00
|
|
|
match = re.search("isLivestream: true", data)
|
2016-02-19 21:29:49 +01:00
|
|
|
if match:
|
2018-05-13 13:06:45 +02:00
|
|
|
self.config.set("live", True)
|
2015-09-10 23:40:48 +02:00
|
|
|
match = re.search('html5_source: "([^"]+)"', data)
|
2017-01-27 01:19:47 +01:00
|
|
|
match2 = re.search('hlsURI: "([^"]+)"', data)
|
2015-09-10 23:40:48 +02:00
|
|
|
if match:
|
2019-09-06 22:31:52 +02:00
|
|
|
streams = hlsparse(
|
|
|
|
self.config,
|
|
|
|
self.http.request("get", match.group(1)),
|
|
|
|
match.group(1),
|
|
|
|
output=self.output,
|
|
|
|
)
|
2015-09-10 23:40:48 +02:00
|
|
|
for n in list(streams.keys()):
|
2015-10-04 14:37:16 +02:00
|
|
|
yield streams[n]
|
2017-01-27 01:19:47 +01:00
|
|
|
elif match2:
|
2019-09-06 22:31:52 +02:00
|
|
|
streams = hlsparse(
|
|
|
|
self.config,
|
|
|
|
self.http.request("get", match2.group(1)),
|
|
|
|
match2.group(1),
|
|
|
|
output=self.output,
|
|
|
|
)
|
2017-01-27 01:19:47 +01:00
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|
2015-09-10 23:40:48 +02:00
|
|
|
else:
|
2016-03-20 21:04:09 +01:00
|
|
|
parse = urlparse(self.url)
|
2019-09-06 22:31:52 +02:00
|
|
|
url2 = "https://{}/api/v1/play/{}.xml".format(
|
|
|
|
parse.netloc, parse.path[parse.path.rfind("/") + 1 :]
|
|
|
|
)
|
2016-03-20 21:04:09 +01:00
|
|
|
data = self.http.request("get", url2)
|
|
|
|
if data.status_code != 200:
|
2019-09-06 22:31:52 +02:00
|
|
|
yield ServiceError(
|
|
|
|
"Can't find video info. if there is a video on the page. its a bug."
|
|
|
|
)
|
2016-03-20 21:04:09 +01:00
|
|
|
return
|
2017-08-01 19:34:53 +02:00
|
|
|
xmldoc = data.text
|
|
|
|
xml = ET.XML(xmldoc)
|
2016-03-20 21:04:09 +01:00
|
|
|
elements = xml.findall(".//manifest")
|
2019-09-06 22:31:52 +02:00
|
|
|
streams = hlsparse(
|
|
|
|
self.config,
|
|
|
|
self.http.request("get", elements[0].text),
|
|
|
|
elements[0].text,
|
|
|
|
output=self.output,
|
|
|
|
)
|
2016-03-20 21:04:09 +01:00
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield streams[n]
|