2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-01-17 00:21:47 +01:00
|
|
|
import re
|
2014-02-05 23:15:19 +01:00
|
|
|
import json
|
2014-06-07 20:43:40 +02:00
|
|
|
import copy
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
from svtplay_dl.service import Service
|
2015-08-30 00:06:20 +02:00
|
|
|
from svtplay_dl.utils import decode_html_entities
|
2015-09-06 14:19:10 +02:00
|
|
|
from svtplay_dl.error import ServiceError
|
2014-09-27 22:34:37 +02:00
|
|
|
from svtplay_dl.fetcher.hls import HLS, hlsparse
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
class Aftonbladet(Service):
|
2014-02-05 23:15:19 +01:00
|
|
|
supported_domains = ['tv.aftonbladet.se']
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2014-01-06 23:14:06 +01:00
|
|
|
def get(self, options):
|
2015-08-30 00:06:20 +02:00
|
|
|
data = self.get_urldata()
|
2014-12-22 17:41:40 +01:00
|
|
|
|
|
|
|
if self.exclude(options):
|
|
|
|
return
|
|
|
|
|
2014-02-08 17:31:02 +01:00
|
|
|
match = re.search('data-aptomaId="([-0-9a-z]+)"', data)
|
2013-01-17 00:21:47 +01:00
|
|
|
if not match:
|
2015-02-17 22:40:55 +01:00
|
|
|
match = re.search('data-player-config="([^"]+)"', data)
|
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Can't find video info")
|
2015-02-17 22:40:55 +01:00
|
|
|
return
|
|
|
|
janson = json.loads(decode_html_entities(match.group(1)))
|
|
|
|
videoId = janson["videoId"]
|
|
|
|
else:
|
|
|
|
videoId = match.group(1)
|
|
|
|
match = re.search(r'data-isLive="(\w+)"', data)
|
|
|
|
if not match:
|
2015-09-06 14:19:10 +02:00
|
|
|
yield ServiceError("Can't find live info")
|
2015-02-17 22:40:55 +01:00
|
|
|
return
|
|
|
|
if match.group(1) == "true":
|
|
|
|
options.live = True
|
|
|
|
|
2014-02-05 23:15:19 +01:00
|
|
|
if not options.live:
|
2014-12-08 23:07:02 +01:00
|
|
|
dataurl = "http://aftonbladet-play-metadata.cdn.drvideo.aptoma.no/video/%s.json" % videoId
|
2015-09-06 16:02:54 +02:00
|
|
|
data = self.http.request("get", dataurl).text
|
2014-02-05 23:15:19 +01:00
|
|
|
data = json.loads(data)
|
|
|
|
videoId = data["videoId"]
|
|
|
|
|
|
|
|
streamsurl = "http://aftonbladet-play-static-ext.cdn.drvideo.aptoma.no/actions/video/?id=%s&formats&callback=" % videoId
|
2015-09-06 16:02:54 +02:00
|
|
|
data = self.http.request("get", streamsurl).text
|
2014-12-08 23:07:02 +01:00
|
|
|
streams = json.loads(data)
|
2014-07-14 14:49:03 +02:00
|
|
|
hlsstreams = streams["formats"]["hls"]
|
|
|
|
if "level3" in hlsstreams.keys():
|
2014-09-27 22:34:37 +02:00
|
|
|
hls = hlsstreams["level3"]
|
2014-07-14 14:49:03 +02:00
|
|
|
else:
|
2014-09-27 22:34:37 +02:00
|
|
|
hls = hlsstreams["akamai"]
|
|
|
|
if "csmil" in hls.keys():
|
|
|
|
hls = hls["csmil"][0]
|
|
|
|
else:
|
|
|
|
hls = hls["m3u8"][0]
|
2014-05-01 22:10:27 +02:00
|
|
|
address = hls["address"]
|
|
|
|
path = hls["path"]
|
|
|
|
|
|
|
|
for i in hls["files"]:
|
2014-07-14 14:49:03 +02:00
|
|
|
if "filename" in i.keys():
|
2014-09-27 22:34:37 +02:00
|
|
|
plist = "http://%s/%s/%s/master.m3u8" % (address, path, i["filename"])
|
|
|
|
else:
|
|
|
|
plist = "http://%s/%s/%s" % (address, path, hls["filename"])
|
2014-10-05 20:44:57 +02:00
|
|
|
|
2015-08-31 19:45:15 +02:00
|
|
|
streams = hlsparse(plist, self.http.request("get", plist).text)
|
2015-02-17 22:40:03 +01:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield HLS(copy.copy(options), streams[n], n)
|