2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-02-12 19:43:37 +01:00
|
|
|
import re
|
|
|
|
import json
|
2014-06-07 20:43:40 +02:00
|
|
|
import copy
|
2014-08-12 19:57:03 +02:00
|
|
|
import os
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
from svtplay_dl.service import Service
|
2015-08-30 10:33:38 +02:00
|
|
|
from svtplay_dl.utils import filenamify
|
2013-03-17 19:55:19 +01:00
|
|
|
from svtplay_dl.log import log
|
2014-04-21 18:26:03 +02:00
|
|
|
from svtplay_dl.fetcher.rtmp import RTMP
|
2015-10-04 14:37:16 +02:00
|
|
|
from svtplay_dl.fetcher.hls import hlsparse
|
2014-08-31 01:20:36 +02:00
|
|
|
from svtplay_dl.subtitle import subtitle
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2015-09-15 20:10:32 +02:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
class Kanal5(Service):
|
2014-08-12 19:57:53 +02:00
|
|
|
supported_domains = ['kanal5play.se', 'kanal9play.se', 'kanal11play.se']
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2014-01-11 23:02:47 +01:00
|
|
|
def __init__(self, url):
|
|
|
|
Service.__init__(self, url)
|
2015-08-30 00:06:20 +02:00
|
|
|
self.cookies = {}
|
2014-01-11 23:02:47 +01:00
|
|
|
self.subtitle = None
|
|
|
|
|
2014-01-06 23:14:06 +01:00
|
|
|
def get(self, options):
|
|
|
|
match = re.search(r".*video/([0-9]+)", self.url)
|
2013-01-17 00:21:47 +01:00
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Can't find video file")
|
2014-11-16 21:55:54 +01:00
|
|
|
return
|
2013-03-14 22:20:27 +01:00
|
|
|
|
|
|
|
video_id = match.group(1)
|
|
|
|
if options.username and options.password:
|
2014-07-28 16:01:27 +02:00
|
|
|
# get session cookie
|
2015-08-31 19:45:15 +02:00
|
|
|
data = self.http.request("get", "http://www.kanal5play.se/", cookies=self.cookies)
|
2014-08-28 07:56:47 +02:00
|
|
|
authurl = "https://kanal5swe.appspot.com/api/user/login?callback=jQuery171029989&email=%s&password=%s&_=136250" % \
|
|
|
|
(options.username, options.password)
|
2015-08-31 19:45:15 +02:00
|
|
|
data = self.http.request("get", authurl, cookies=data.cookies).text
|
2013-05-05 12:57:42 +02:00
|
|
|
match = re.search(r"({.*})\);", data)
|
2013-03-14 22:20:27 +01:00
|
|
|
jsondata = json.loads(match.group(1))
|
2014-08-12 00:08:51 +02:00
|
|
|
if jsondata["success"] is False:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError(jsondata["message"])
|
2014-11-16 21:55:54 +01:00
|
|
|
return
|
2013-03-14 22:20:27 +01:00
|
|
|
authToken = jsondata["userData"]["auth"]
|
2015-08-30 00:06:20 +02:00
|
|
|
self.cookies = {"authToken": authToken}
|
|
|
|
options.cookies = self.cookies
|
2013-03-14 22:20:27 +01:00
|
|
|
|
2014-04-21 18:26:03 +02:00
|
|
|
url = "http://www.kanal5play.se/api/getVideo?format=FLASH&videoId=%s" % video_id
|
2015-08-31 22:47:11 +02:00
|
|
|
data = self.http.request("get", url, cookies=self.cookies).text
|
2014-12-08 23:07:02 +01:00
|
|
|
data = json.loads(data)
|
2015-08-30 00:06:20 +02:00
|
|
|
options.cookies = self.cookies
|
2014-03-26 22:47:30 +01:00
|
|
|
if not options.live:
|
|
|
|
options.live = data["isLive"]
|
2014-03-21 20:59:58 +01:00
|
|
|
|
2014-08-12 19:57:03 +02:00
|
|
|
if options.output_auto:
|
|
|
|
directory = os.path.dirname(options.output)
|
2014-08-27 22:59:41 +02:00
|
|
|
options.service = "kanal5"
|
|
|
|
|
|
|
|
title = "%s-s%s-%s-%s-%s" % (data["program"]["name"], data["seasonNumber"], data["episodeText"], data["id"], options.service)
|
2014-08-12 19:57:03 +02:00
|
|
|
title = filenamify(title)
|
|
|
|
if len(directory):
|
2015-08-24 23:02:18 +02:00
|
|
|
options.output = os.path.join(directory, title)
|
2014-08-12 19:57:03 +02:00
|
|
|
else:
|
|
|
|
options.output = title
|
|
|
|
|
2014-12-22 17:41:40 +01:00
|
|
|
if self.exclude(options):
|
2015-09-06 23:04:48 +02:00
|
|
|
yield ServiceError("Excluding video")
|
2014-12-22 17:41:40 +01:00
|
|
|
return
|
|
|
|
|
2014-11-25 21:34:46 +01:00
|
|
|
if data["hasSubtitle"]:
|
|
|
|
yield subtitle(copy.copy(options), "json", "http://www.kanal5play.se/api/subtitles/%s" % video_id)
|
|
|
|
|
2014-09-21 19:12:17 +02:00
|
|
|
if options.force_subtitle:
|
|
|
|
return
|
|
|
|
|
2015-08-30 10:33:38 +02:00
|
|
|
show = True
|
2014-10-06 23:21:43 +02:00
|
|
|
if "streams" in data.keys():
|
2014-07-21 15:38:36 +02:00
|
|
|
for i in data["streams"]:
|
2014-11-16 21:55:54 +01:00
|
|
|
if i["drmProtected"]:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("We cant download drm files for this site.")
|
2014-11-16 21:55:54 +01:00
|
|
|
return
|
|
|
|
steambaseurl = data["streamBaseUrl"]
|
|
|
|
bitrate = i["bitrate"]
|
|
|
|
if bitrate > 1000:
|
|
|
|
bitrate = bitrate / 1000
|
|
|
|
options2 = copy.copy(options)
|
|
|
|
options2.other = "-W %s -y %s " % ("http://www.kanal5play.se/flash/K5StandardPlayer.swf", i["source"])
|
|
|
|
options2.live = True
|
|
|
|
yield RTMP(options2, steambaseurl, bitrate)
|
|
|
|
|
|
|
|
url = "http://www.kanal5play.se/api/getVideo?format=IPAD&videoId=%s" % video_id
|
2015-08-31 19:45:15 +02:00
|
|
|
data = self.http.request("get", url, cookies=self.cookies)
|
2015-08-31 22:47:11 +02:00
|
|
|
data = json.loads(data.text)
|
2015-08-30 10:33:38 +02:00
|
|
|
if "reasonsForNoStreams" in data:
|
|
|
|
show = False
|
2014-11-16 21:55:54 +01:00
|
|
|
if "streams" in data.keys():
|
|
|
|
for i in data["streams"]:
|
2015-10-04 14:37:16 +02:00
|
|
|
streams = hlsparse(options, self.http.request("get", i["source"]), i["source"])
|
2014-11-16 21:55:54 +01:00
|
|
|
for n in list(streams.keys()):
|
2015-10-04 14:37:16 +02:00
|
|
|
yield streams[n]
|
2015-08-30 10:33:38 +02:00
|
|
|
if "reasonsForNoStreams" in data and show:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError(data["reasonsForNoStreams"][0])
|
2015-09-01 23:44:57 +02:00
|
|
|
|
|
|
|
def find_all_episodes(self, options):
|
|
|
|
program = re.search(".*/program/(\d+)", self.url)
|
|
|
|
if not program:
|
|
|
|
log.error("Can't find program id in url")
|
|
|
|
return None
|
|
|
|
baseurl = "http://www.kanal5play.se/content/program/%s" % program.group(1)
|
|
|
|
data = self.http.request("get", baseurl).text
|
|
|
|
sasong = re.search("/program/\d+/sasong/(\d+)", data)
|
|
|
|
if not sasong:
|
|
|
|
log.error("Can't find seasong id")
|
|
|
|
return None
|
|
|
|
seasong = int(sasong.group(1))
|
|
|
|
episodes = []
|
|
|
|
n = 0
|
|
|
|
more = True
|
|
|
|
while more:
|
|
|
|
url = "%s/sasong/%s" % (baseurl, seasong)
|
|
|
|
data = self.http.request("get", url)
|
|
|
|
if data.status_code == 404:
|
|
|
|
more = False
|
|
|
|
else:
|
|
|
|
regex = re.compile(r'href="(/play/program/\d+/video/\d+)"')
|
|
|
|
for match in regex.finditer(data.text):
|
|
|
|
if n == options.all_last:
|
|
|
|
break
|
2015-09-03 22:09:30 +02:00
|
|
|
url2 = "http://www.kanal5play.se%s" % match.group(1)
|
|
|
|
if url2 not in episodes:
|
|
|
|
episodes.append(url2)
|
2015-09-01 23:44:57 +02:00
|
|
|
n += 1
|
|
|
|
seasong -= 1
|
|
|
|
|
|
|
|
return episodes
|