2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-01-17 00:21:47 +01:00
|
|
|
import re
|
2014-02-05 23:15:19 +01:00
|
|
|
import json
|
2014-06-07 20:43:40 +02:00
|
|
|
import copy
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
from svtplay_dl.service import Service
|
2014-12-08 23:07:02 +01:00
|
|
|
from svtplay_dl.utils import get_http_data
|
2013-03-17 19:55:19 +01:00
|
|
|
from svtplay_dl.log import log
|
2014-09-27 22:34:37 +02:00
|
|
|
from svtplay_dl.fetcher.hls import HLS, hlsparse
|
2013-02-12 19:43:37 +01:00
|
|
|
|
2013-04-21 12:44:31 +02:00
|
|
|
class Aftonbladet(Service):
|
2014-02-05 23:15:19 +01:00
|
|
|
supported_domains = ['tv.aftonbladet.se']
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2014-01-06 23:14:06 +01:00
|
|
|
def get(self, options):
|
2014-12-08 23:07:02 +01:00
|
|
|
error, data = self.get_urldata()
|
|
|
|
if error:
|
2014-11-25 23:20:14 +01:00
|
|
|
log.error("Cant download page")
|
|
|
|
return
|
2014-12-22 17:41:40 +01:00
|
|
|
|
|
|
|
if self.exclude(options):
|
|
|
|
return
|
|
|
|
|
2014-02-08 17:31:02 +01:00
|
|
|
match = re.search('data-aptomaId="([-0-9a-z]+)"', data)
|
2013-01-17 00:21:47 +01:00
|
|
|
if not match:
|
2014-02-05 23:15:19 +01:00
|
|
|
log.error("Can't find video info")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2014-02-05 23:15:19 +01:00
|
|
|
videoId = match.group(1)
|
2014-02-08 17:31:02 +01:00
|
|
|
match = re.search(r'data-isLive="(\w+)"', data)
|
2014-02-05 23:15:19 +01:00
|
|
|
if not match:
|
|
|
|
log.error("Can't find live info")
|
2014-10-06 23:21:43 +02:00
|
|
|
return
|
2014-02-05 23:15:19 +01:00
|
|
|
if match.group(1) == "true":
|
2013-03-02 21:28:58 +01:00
|
|
|
options.live = True
|
2014-02-05 23:15:19 +01:00
|
|
|
if not options.live:
|
2014-12-08 23:07:02 +01:00
|
|
|
dataurl = "http://aftonbladet-play-metadata.cdn.drvideo.aptoma.no/video/%s.json" % videoId
|
|
|
|
error, data = get_http_data(dataurl)
|
|
|
|
if error:
|
|
|
|
log.error("Cant get vidoe info")
|
2014-11-25 23:20:14 +01:00
|
|
|
return
|
2014-02-05 23:15:19 +01:00
|
|
|
data = json.loads(data)
|
|
|
|
videoId = data["videoId"]
|
|
|
|
|
|
|
|
streamsurl = "http://aftonbladet-play-static-ext.cdn.drvideo.aptoma.no/actions/video/?id=%s&formats&callback=" % videoId
|
2014-12-08 23:07:02 +01:00
|
|
|
error, data = get_http_data(streamsurl)
|
|
|
|
if error:
|
|
|
|
log.error("Cant download video info")
|
|
|
|
return
|
|
|
|
streams = json.loads(data)
|
2014-07-14 14:49:03 +02:00
|
|
|
hlsstreams = streams["formats"]["hls"]
|
|
|
|
if "level3" in hlsstreams.keys():
|
2014-09-27 22:34:37 +02:00
|
|
|
hls = hlsstreams["level3"]
|
2014-07-14 14:49:03 +02:00
|
|
|
else:
|
2014-09-27 22:34:37 +02:00
|
|
|
hls = hlsstreams["akamai"]
|
|
|
|
if "csmil" in hls.keys():
|
|
|
|
hls = hls["csmil"][0]
|
|
|
|
else:
|
|
|
|
hls = hls["m3u8"][0]
|
2014-05-01 22:10:27 +02:00
|
|
|
address = hls["address"]
|
|
|
|
path = hls["path"]
|
|
|
|
|
|
|
|
for i in hls["files"]:
|
2014-07-14 14:49:03 +02:00
|
|
|
if "filename" in i.keys():
|
2014-09-27 22:34:37 +02:00
|
|
|
plist = "http://%s/%s/%s/master.m3u8" % (address, path, i["filename"])
|
|
|
|
else:
|
|
|
|
plist = "http://%s/%s/%s" % (address, path, hls["filename"])
|
2014-10-05 20:44:57 +02:00
|
|
|
|
|
|
|
streams = hlsparse(plist)
|
2015-02-17 22:40:03 +01:00
|
|
|
if streams:
|
|
|
|
for n in list(streams.keys()):
|
|
|
|
yield HLS(copy.copy(options), streams[n], n)
|