2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-01-17 00:21:47 +01:00
|
|
|
import sys
|
2013-04-21 12:56:59 +02:00
|
|
|
import socket
|
2013-01-17 00:21:47 +01:00
|
|
|
import logging
|
2013-03-24 20:20:16 +01:00
|
|
|
import re
|
2014-01-05 16:32:23 +01:00
|
|
|
import time
|
2014-01-05 17:02:48 +01:00
|
|
|
import unicodedata
|
2014-07-22 10:19:10 +02:00
|
|
|
|
2014-01-05 16:58:17 +01:00
|
|
|
try:
|
|
|
|
import HTMLParser
|
|
|
|
except ImportError:
|
2014-07-22 10:19:10 +02:00
|
|
|
# pylint: disable-msg=import-error
|
2014-01-05 16:58:17 +01:00
|
|
|
import html.parser as HTMLParser
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2013-12-30 01:35:08 +01:00
|
|
|
is_py2 = (sys.version_info[0] == 2)
|
|
|
|
is_py3 = (sys.version_info[0] == 3)
|
|
|
|
is_py2_old = (sys.version_info < (2, 7))
|
|
|
|
|
2013-04-27 13:46:38 +02:00
|
|
|
# Used for UA spoofing in get_http_data()
|
|
|
|
FIREFOX_UA = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
|
|
|
|
|
|
|
|
from svtplay_dl.utils.urllib import build_opener, Request, HTTPCookieProcessor, \
|
2013-04-21 13:42:33 +02:00
|
|
|
HTTPRedirectHandler, HTTPError, URLError, \
|
|
|
|
addinfourl, CookieJar
|
2013-01-17 00:21:47 +01:00
|
|
|
|
|
|
|
log = logging.getLogger('svtplay_dl')
|
|
|
|
progress_stream = sys.stderr
|
|
|
|
|
2013-04-14 21:08:12 +02:00
|
|
|
class NoRedirectHandler(HTTPRedirectHandler):
|
2013-04-27 13:46:38 +02:00
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
2013-04-14 21:08:12 +02:00
|
|
|
def http_error_302(self, req, fp, code, msg, headers):
|
|
|
|
infourl = addinfourl(fp, headers, req.get_full_url())
|
|
|
|
infourl.status = code
|
|
|
|
infourl.code = code
|
|
|
|
return infourl
|
|
|
|
http_error_300 = http_error_302
|
|
|
|
http_error_301 = http_error_302
|
|
|
|
http_error_303 = http_error_302
|
|
|
|
http_error_307 = http_error_302
|
|
|
|
|
2014-12-08 23:07:02 +01:00
|
|
|
def get_http_data(url, header=None, post=None, useragent=FIREFOX_UA,
|
2013-04-27 13:46:38 +02:00
|
|
|
referer=None, cookiejar=None):
|
2013-01-17 00:21:47 +01:00
|
|
|
""" Get the page to parse it for streams """
|
2013-03-14 22:19:13 +01:00
|
|
|
if not cookiejar:
|
|
|
|
cookiejar = CookieJar()
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2014-01-05 16:32:23 +01:00
|
|
|
log.debug("HTTP getting %r", url)
|
|
|
|
starttime = time.time()
|
2014-12-08 23:07:02 +01:00
|
|
|
error = None
|
2014-12-15 12:33:49 +01:00
|
|
|
if post:
|
|
|
|
if is_py3:
|
|
|
|
post = bytes(post, encoding="utf-8")
|
|
|
|
request = Request(url, data=post)
|
|
|
|
else:
|
|
|
|
request = Request(url)
|
2013-08-13 20:06:46 +02:00
|
|
|
standard_header = {'Referer': referer, 'User-Agent': useragent}
|
|
|
|
for key, value in [head for head in standard_header.items() if head[1]]:
|
|
|
|
request.add_header(key, value)
|
|
|
|
if header:
|
|
|
|
for key, value in [head for head in header.items() if head[1]]:
|
|
|
|
request.add_header(key, value)
|
2013-04-27 13:46:38 +02:00
|
|
|
|
|
|
|
opener = build_opener(HTTPCookieProcessor(cookiejar))
|
|
|
|
|
2014-12-08 23:07:02 +01:00
|
|
|
try:
|
|
|
|
response = opener.open(request)
|
|
|
|
except HTTPError as e:
|
|
|
|
error = True
|
|
|
|
data = e.read()
|
|
|
|
return error, data
|
|
|
|
|
2013-12-30 01:35:08 +01:00
|
|
|
if is_py3:
|
2013-01-17 00:21:47 +01:00
|
|
|
data = response.read()
|
|
|
|
try:
|
|
|
|
data = data.decode("utf-8")
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
data = response.read()
|
|
|
|
except socket.error as e:
|
2014-12-08 23:07:02 +01:00
|
|
|
return True, "Lost the connection to the server"
|
2013-01-17 00:21:47 +01:00
|
|
|
response.close()
|
2014-01-05 16:32:23 +01:00
|
|
|
|
|
|
|
spent_time = time.time() - starttime
|
|
|
|
bps = 8 * len(data) / max(spent_time, 0.001)
|
|
|
|
|
|
|
|
log.debug("HTTP got %d bytes from %r in %.2fs (= %dbps)",
|
|
|
|
len(data), url, spent_time, bps)
|
|
|
|
|
2014-12-08 23:07:02 +01:00
|
|
|
return error, data
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2013-04-14 21:08:12 +02:00
|
|
|
def check_redirect(url):
|
2013-04-27 13:46:38 +02:00
|
|
|
opener = build_opener(NoRedirectHandler())
|
2014-01-25 23:53:09 +01:00
|
|
|
opener.addheaders += [('User-Agent', FIREFOX_UA)]
|
2013-04-27 13:46:38 +02:00
|
|
|
response = opener.open(url)
|
2013-04-14 21:08:12 +02:00
|
|
|
if response.code in (300, 301, 302, 303, 307):
|
|
|
|
return response.headers["location"]
|
|
|
|
else:
|
|
|
|
return url
|
|
|
|
|
2013-01-17 00:21:47 +01:00
|
|
|
def select_quality(options, streams):
|
2014-07-13 16:03:03 +02:00
|
|
|
available = sorted(int(x.bitrate) for x in streams)
|
2013-03-25 19:27:26 +01:00
|
|
|
try:
|
|
|
|
optq = int(options.quality)
|
|
|
|
except ValueError:
|
|
|
|
log.error("Requested quality need to be a number")
|
|
|
|
sys.exit(4)
|
2013-03-23 18:26:48 +01:00
|
|
|
if optq:
|
2013-03-25 19:27:26 +01:00
|
|
|
try:
|
|
|
|
optf = int(options.flexibleq)
|
|
|
|
except ValueError:
|
|
|
|
log.error("Flexible-quality need to be a number")
|
|
|
|
sys.exit(4)
|
2013-03-23 18:26:48 +01:00
|
|
|
if not optf:
|
|
|
|
wanted = [optq]
|
|
|
|
else:
|
|
|
|
wanted = range(optq-optf, optq+optf+1)
|
2013-01-17 00:21:47 +01:00
|
|
|
else:
|
2013-03-23 18:26:48 +01:00
|
|
|
wanted = [available[-1]]
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2013-03-23 18:26:48 +01:00
|
|
|
selected = None
|
|
|
|
for q in available:
|
|
|
|
if q in wanted:
|
|
|
|
selected = q
|
|
|
|
break
|
2014-09-28 23:32:46 +02:00
|
|
|
if not selected and selected != 0:
|
2014-12-26 02:04:29 +01:00
|
|
|
data = sorted(streams, key=lambda x: (x.bitrate, x.name()), reverse=True)
|
2014-07-13 15:12:06 +02:00
|
|
|
datas = []
|
|
|
|
for i in data:
|
|
|
|
datas.append([i.bitrate, i.name()])
|
|
|
|
quality = ", ".join("%s (%s)" % (str(x), str(y)) for x, y in datas)
|
|
|
|
log.error("Can't find that quality. Try one of: %s (or try --flexible-quality)", quality)
|
|
|
|
|
2013-01-17 00:21:47 +01:00
|
|
|
sys.exit(4)
|
2014-04-27 10:43:26 +02:00
|
|
|
for i in streams:
|
2014-04-27 13:19:56 +02:00
|
|
|
if int(i.bitrate) == selected:
|
2014-04-27 10:43:26 +02:00
|
|
|
stream = i
|
|
|
|
return stream
|
2013-01-17 00:21:47 +01:00
|
|
|
|
2014-01-05 16:58:17 +01:00
|
|
|
def ensure_unicode(s):
|
|
|
|
"""
|
|
|
|
Ensure string is a unicode string. If it isn't it assumed it is
|
|
|
|
utf-8 and decodes it to a unicode string.
|
|
|
|
"""
|
|
|
|
if (is_py2 and isinstance(s, str)) or (is_py3 and isinstance(s, bytes)):
|
|
|
|
s = s.decode('utf-8', 'replace')
|
|
|
|
return s
|
|
|
|
|
|
|
|
def decode_html_entities(s):
|
|
|
|
"""
|
|
|
|
Replaces html entities with the character they represent.
|
|
|
|
|
2014-01-06 22:12:56 +01:00
|
|
|
>>> print(decode_html_entities("<3 &"))
|
|
|
|
<3 &
|
2014-01-05 16:58:17 +01:00
|
|
|
"""
|
|
|
|
parser = HTMLParser.HTMLParser()
|
|
|
|
def unesc(m):
|
|
|
|
return parser.unescape(m.group())
|
|
|
|
return re.sub(r'(&[^;]+;)', unesc, ensure_unicode(s))
|
2014-01-05 17:02:48 +01:00
|
|
|
|
|
|
|
def filenamify(title):
|
|
|
|
"""
|
|
|
|
Convert a string to something suitable as a file name.
|
2014-01-06 22:12:56 +01:00
|
|
|
|
|
|
|
>>> print(filenamify(u'Matlagning del 1 av 10 - R\xe4ksm\xf6rg\xe5s | SVT Play'))
|
|
|
|
matlagning-del-1-av-10-raksmorgas-svt-play
|
|
|
|
|
2014-01-05 17:02:48 +01:00
|
|
|
"""
|
|
|
|
# ensure it is unicode
|
|
|
|
title = ensure_unicode(title)
|
|
|
|
|
|
|
|
# NFD decomposes chars into base char and diacritical mark, which means that we will get base char when we strip out non-ascii.
|
|
|
|
title = unicodedata.normalize('NFD', title)
|
|
|
|
|
|
|
|
# Drop any non ascii letters/digits
|
2014-12-28 13:57:11 +01:00
|
|
|
title = re.sub(r'[^a-zA-Z0-9 -.]', '', title)
|
2014-01-05 17:02:48 +01:00
|
|
|
# Drop any leading/trailing whitespace that may have appeared
|
|
|
|
title = title.strip()
|
|
|
|
# Lowercase
|
|
|
|
title = title.lower()
|
|
|
|
# Replace whitespace with dash
|
|
|
|
title = re.sub(r'[-\s]+', '-', title)
|
|
|
|
|
|
|
|
return title
|
2014-01-19 14:26:48 +01:00
|
|
|
|
|
|
|
def download_thumbnail(options, url):
|
2014-12-08 23:07:02 +01:00
|
|
|
error, data = get_http_data(url)
|
2014-01-19 14:26:48 +01:00
|
|
|
|
|
|
|
filename = re.search(r"(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
tbn = "%s.tbn" % filename.group(1)
|
|
|
|
log.info("Thumbnail: %s", tbn)
|
|
|
|
|
|
|
|
fd = open(tbn, "wb")
|
|
|
|
fd.write(data)
|
|
|
|
fd.close()
|