2015-11-29 16:32:53 +01:00
|
|
|
import json
|
2018-11-18 12:47:19 +01:00
|
|
|
import logging
|
2019-08-25 00:40:39 +02:00
|
|
|
import re
|
2014-08-11 20:41:09 +02:00
|
|
|
import xml.etree.ElementTree as ET
|
2018-01-30 22:07:21 +01:00
|
|
|
from urllib.parse import urlparse
|
2014-08-11 20:41:09 +02:00
|
|
|
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2015-11-29 16:32:53 +01:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2019-08-25 00:40:39 +02:00
|
|
|
from svtplay_dl.service import OpenGraphThumbMixin
|
|
|
|
from svtplay_dl.service import Service
|
2014-08-11 20:41:09 +02:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2014-08-11 20:41:09 +02:00
|
|
|
# This is _very_ similar to mtvservices..
|
|
|
|
class Mtvnn(Service, OpenGraphThumbMixin):
|
2019-09-06 22:49:49 +02:00
|
|
|
supported_domains = ["nickelodeon.se", "nickelodeon.nl", "nickelodeon.no", "www.comedycentral.se", "nickelodeon.dk"]
|
2014-08-11 20:41:09 +02:00
|
|
|
|
2015-12-26 11:46:14 +01:00
|
|
|
def get(self):
|
2015-08-30 00:06:20 +02:00
|
|
|
data = self.get_urldata()
|
2018-02-04 19:48:23 +01:00
|
|
|
parse = urlparse(self.url)
|
|
|
|
|
|
|
|
if parse.netloc.endswith("se"):
|
2018-10-28 23:16:00 +01:00
|
|
|
match = re.search(r'<div class="video-player" (.*)>', data)
|
2018-02-04 19:48:23 +01:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
yield ServiceError("Can't find video info")
|
|
|
|
return
|
|
|
|
|
2018-10-28 23:16:00 +01:00
|
|
|
match_id = re.search(r'data-id="([0-9a-fA-F|\-]+)" ', match.group(1))
|
2018-02-04 19:48:23 +01:00
|
|
|
|
|
|
|
if not match_id:
|
|
|
|
yield ServiceError("Can't find video info")
|
|
|
|
return
|
|
|
|
|
|
|
|
wanted_id = match_id.group(1)
|
2019-08-25 00:27:31 +02:00
|
|
|
url_service = (
|
2019-08-25 00:33:51 +02:00
|
|
|
"http://feeds.mtvnservices.com/od/feed/intl-mrss-player-feed?mgid=mgid:arc:episode:nick.intl:{}"
|
2019-09-06 22:49:49 +02:00
|
|
|
"&arcEp=nickelodeon.se&imageEp=nickelodeon.se&stage=staging&accountOverride=intl.mtvi.com&ep=a9cc543c".format(wanted_id)
|
2019-08-25 00:27:31 +02:00
|
|
|
)
|
2018-02-04 19:48:23 +01:00
|
|
|
service_asset = self.http.request("get", url_service)
|
2019-09-06 22:49:49 +02:00
|
|
|
match_guid = re.search('<guid isPermaLink="false">(.*)</guid>', service_asset.text)
|
2018-02-04 19:48:23 +01:00
|
|
|
|
|
|
|
if not match_guid:
|
|
|
|
yield ServiceError("Can't find video info")
|
|
|
|
return
|
|
|
|
|
2019-08-25 00:27:31 +02:00
|
|
|
hls_url = (
|
2019-08-25 00:33:51 +02:00
|
|
|
"https://mediautilssvcs-a.akamaihd.net/services/MediaGenerator/{}?arcStage=staging&accountOverride=intl.mtvi.com&"
|
2019-09-06 22:49:49 +02:00
|
|
|
"billingSection=intl&ep=a9cc543c&acceptMethods=hls".format(match_guid.group(1))
|
2019-08-25 00:27:31 +02:00
|
|
|
)
|
2018-02-04 19:48:23 +01:00
|
|
|
hls_asset = self.http.request("get", hls_url)
|
|
|
|
xml = ET.XML(hls_asset.text)
|
|
|
|
|
2019-08-25 00:27:31 +02:00
|
|
|
if (
|
|
|
|
xml.find("./video") is not None
|
|
|
|
and xml.find("./video").find("item") is not None
|
|
|
|
and xml.find("./video").find("item").find("rendition") is not None
|
2019-09-06 22:49:49 +02:00
|
|
|
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
2019-08-25 00:27:31 +02:00
|
|
|
):
|
2018-02-04 19:48:23 +01:00
|
|
|
|
2019-09-06 22:49:49 +02:00
|
|
|
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
|
|
|
stream = hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|
2018-05-08 22:48:55 +02:00
|
|
|
for key in list(stream.keys()):
|
|
|
|
yield stream[key]
|
2018-02-04 19:48:23 +01:00
|
|
|
return
|
|
|
|
|
2019-09-06 22:49:49 +02:00
|
|
|
match = re.search(r'data-mrss=[\'"](http://gakusei-cluster.mtvnn.com/v2/mrss.xml[^\'"]+)[\'"]', data)
|
2014-08-11 20:41:09 +02:00
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Can't find id for the video")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2015-07-05 21:51:58 +02:00
|
|
|
|
2017-09-16 18:28:27 +02:00
|
|
|
mrssxmlurl = match.group(1)
|
|
|
|
data = self.http.request("get", mrssxmlurl).content
|
2014-08-11 20:41:09 +02:00
|
|
|
xml = ET.XML(data)
|
2014-08-11 21:25:10 +02:00
|
|
|
title = xml.find("channel").find("item").find("title").text
|
2018-05-13 13:06:45 +02:00
|
|
|
self.output["title"] = title
|
2014-12-22 17:41:40 +01:00
|
|
|
|
2017-09-16 18:28:27 +02:00
|
|
|
match = re.search("gon.viacom_config=([^;]+);", self.get_urldata())
|
|
|
|
if match:
|
|
|
|
countrycode = json.loads(match.group(1))["country_code"].replace("_", "/")
|
|
|
|
|
|
|
|
match = re.search("mtvnn.com:([^&]+)", mrssxmlurl)
|
|
|
|
if match:
|
2019-09-06 22:49:49 +02:00
|
|
|
urlpart = match.group(1).replace("-", "/").replace("playlist", "playlists") # it use playlists dunno from where it gets it
|
2021-02-28 22:05:15 +01:00
|
|
|
hlsapi = f"http://api.mtvnn.com/v2/{countrycode}/{urlpart}.json?video_format=m3u8&callback=&"
|
2017-09-16 18:28:27 +02:00
|
|
|
data = self.http.request("get", hlsapi).text
|
|
|
|
|
|
|
|
dataj = json.loads(data)
|
|
|
|
for i in dataj["local_playlist_videos"]:
|
2021-05-16 02:22:37 +02:00
|
|
|
yield from hlsparse(self.config, self.http.request("get", i["url"]), i["url"], output=self.output)
|
2014-08-11 22:45:59 +02:00
|
|
|
|
2018-05-13 13:06:45 +02:00
|
|
|
def find_all_episodes(self, config):
|
2015-08-30 00:06:20 +02:00
|
|
|
match = re.search(r"data-franchise='([^']+)'", self.get_urldata())
|
2014-08-11 22:45:59 +02:00
|
|
|
if match is None:
|
2018-11-18 12:47:19 +01:00
|
|
|
logging.error("Couldn't program id")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2014-08-11 22:45:59 +02:00
|
|
|
programid = match.group(1)
|
2019-09-06 22:49:49 +02:00
|
|
|
match = re.findall(r"<li class='([a-z]+ )?playlist-item( [a-z]+)*?'( data-[-a-z]+='[^']+')* data-item-id='([^']+)'", self.get_urldata())
|
2014-08-11 22:45:59 +02:00
|
|
|
if not match:
|
2018-11-18 12:47:19 +01:00
|
|
|
logging.error("Couldn't retrieve episode list")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2014-09-27 22:53:41 +02:00
|
|
|
episodNr = []
|
|
|
|
for i in match:
|
2016-01-01 14:09:44 +01:00
|
|
|
episodNr.append(i[3])
|
2014-08-11 22:45:59 +02:00
|
|
|
episodes = []
|
2014-12-21 13:16:05 +01:00
|
|
|
n = 0
|
2014-09-27 22:53:41 +02:00
|
|
|
for i in sorted(episodNr):
|
2018-05-13 13:06:45 +02:00
|
|
|
if n == config.get("all_last"):
|
2014-12-21 13:16:05 +01:00
|
|
|
break
|
2021-02-28 22:05:15 +01:00
|
|
|
episodes.append(f"http://www.nickelodeon.se/serier/{programid}-something/videos/{i}-something")
|
2014-12-21 13:16:05 +01:00
|
|
|
n += 1
|
2014-08-11 22:45:59 +02:00
|
|
|
return episodes
|
2018-01-27 00:03:42 +01:00
|
|
|
|
|
|
|
|
|
|
|
class MtvMusic(Service, OpenGraphThumbMixin):
|
2019-08-25 00:27:31 +02:00
|
|
|
supported_domains = ["mtv.se"]
|
2018-01-27 00:03:42 +01:00
|
|
|
|
|
|
|
def get(self):
|
|
|
|
data = self.get_urldata()
|
|
|
|
|
2019-08-25 00:27:31 +02:00
|
|
|
match = re.search("window.pagePlaylist = (.*);", data)
|
2018-01-27 00:03:42 +01:00
|
|
|
if not match:
|
|
|
|
yield ServiceError("Can't find video info")
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
janson = json.loads(match.group(1))
|
2018-01-30 20:11:37 +01:00
|
|
|
except Exception:
|
2021-04-27 19:44:09 +02:00
|
|
|
yield ServiceError(f"Can't decode api request: {match.group(1)}")
|
2018-01-27 00:03:42 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
parse = urlparse(self.url)
|
|
|
|
wanted_id = parse.path.split("/")[-1].split("-")[0]
|
|
|
|
|
|
|
|
for n in janson:
|
|
|
|
if wanted_id == str(n["id"]):
|
|
|
|
|
2019-09-06 22:49:49 +02:00
|
|
|
mrssxmlurl = "http://media-utils.mtvnservices.com/services/MediaGenerator/" "mgid:arc:video:mtv.se:{}?acceptMethods=hls".format(
|
2020-12-26 13:10:56 +01:00
|
|
|
n["video_token"],
|
2019-08-25 00:27:31 +02:00
|
|
|
)
|
2018-01-27 00:03:42 +01:00
|
|
|
hls_asset = self.http.request("get", mrssxmlurl)
|
|
|
|
xml = ET.XML(hls_asset.text)
|
|
|
|
|
2019-08-25 00:27:31 +02:00
|
|
|
if (
|
|
|
|
xml.find("./video") is not None
|
|
|
|
and xml.find("./video").find("item") is not None
|
|
|
|
and xml.find("./video").find("item").find("rendition") is not None
|
2019-09-06 22:49:49 +02:00
|
|
|
and xml.find("./video").find("item").find("rendition").find("src") is not None
|
2019-08-25 00:27:31 +02:00
|
|
|
):
|
2018-01-27 00:03:42 +01:00
|
|
|
|
2019-09-06 22:49:49 +02:00
|
|
|
hls_url = xml.find("./video").find("item").find("rendition").find("src").text
|
2021-05-16 02:22:37 +02:00
|
|
|
yield from hlsparse(self.config, self.http.request("get", hls_url), hls_url, output=self.output)
|