2013-03-02 21:26:28 +01:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2013-03-01 23:39:42 +01:00
|
|
|
from __future__ import absolute_import
|
2013-01-17 00:21:47 +01:00
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
|
|
|
|
if sys.version_info > (3, 0):
|
2013-03-14 22:19:13 +01:00
|
|
|
from urllib.request import Request, urlopen, build_opener, HTTPCookieProcessor
|
2013-01-17 00:21:47 +01:00
|
|
|
from urllib.error import HTTPError, URLError
|
|
|
|
from urllib.parse import urlparse, parse_qs, unquote_plus, quote_plus
|
|
|
|
from io import BytesIO as StringIO
|
2013-03-14 22:19:13 +01:00
|
|
|
from http.cookiejar import CookieJar, Cookie
|
2013-01-17 00:21:47 +01:00
|
|
|
else:
|
2013-03-14 22:19:13 +01:00
|
|
|
from urllib2 import Request, urlopen, HTTPError, URLError, build_opener, HTTPCookieProcessor
|
2013-01-17 00:21:47 +01:00
|
|
|
from urlparse import urlparse, parse_qs
|
|
|
|
from urllib import unquote_plus, quote_plus
|
|
|
|
from StringIO import StringIO
|
2013-03-14 22:19:13 +01:00
|
|
|
from cookielib import CookieJar, Cookie
|
2013-01-17 00:21:47 +01:00
|
|
|
|
|
|
|
log = logging.getLogger('svtplay_dl')
|
|
|
|
progress_stream = sys.stderr
|
|
|
|
|
2013-03-14 22:19:13 +01:00
|
|
|
def get_http_data(url, method="GET", header="", data="", referer=None, cookiejar=None):
|
2013-01-17 00:21:47 +01:00
|
|
|
""" Get the page to parse it for streams """
|
2013-03-14 22:19:13 +01:00
|
|
|
if not cookiejar:
|
|
|
|
cookiejar = CookieJar()
|
|
|
|
request = build_opener(HTTPCookieProcessor(cookiejar))
|
|
|
|
request.addheaders += [('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')]
|
2013-01-17 00:21:47 +01:00
|
|
|
|
|
|
|
if len(header) > 0:
|
2013-03-14 22:19:13 +01:00
|
|
|
request.addheaders += [('Content-Type', header)]
|
2013-01-17 00:21:47 +01:00
|
|
|
if len(data) > 0:
|
|
|
|
request.add_data(data)
|
2013-03-10 14:20:07 +01:00
|
|
|
if referer:
|
2013-03-14 22:19:13 +01:00
|
|
|
request.addheaders += [('Referer', referer)]
|
2013-01-17 00:21:47 +01:00
|
|
|
try:
|
2013-03-14 22:19:13 +01:00
|
|
|
response = request.open(url)
|
2013-01-17 00:21:47 +01:00
|
|
|
except HTTPError as e:
|
|
|
|
log.error("Something wrong with that url")
|
|
|
|
log.error("Error code: %s" % e.code)
|
|
|
|
sys.exit(5)
|
|
|
|
except URLError as e:
|
|
|
|
log.error("Something wrong with that url")
|
|
|
|
log.error("Error code: %s" % e.reason)
|
|
|
|
sys.exit(5)
|
|
|
|
except ValueError as e:
|
|
|
|
log.error("Try adding http:// before the url")
|
|
|
|
sys.exit(5)
|
|
|
|
if sys.version_info > (3, 0):
|
|
|
|
data = response.read()
|
|
|
|
try:
|
|
|
|
data = data.decode("utf-8")
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
data = response.read()
|
|
|
|
except socket.error as e:
|
|
|
|
log.error("Lost the connection to the server")
|
|
|
|
sys.exit(5)
|
|
|
|
response.close()
|
|
|
|
return data
|
|
|
|
|
2013-03-10 13:47:12 +01:00
|
|
|
def timestr(seconds):
|
|
|
|
total = float(seconds) / 1000
|
|
|
|
hours = int(total / 3600)
|
|
|
|
minutes = int(total / 60)
|
|
|
|
sec = total % 60
|
|
|
|
output = "%02d:%02d:%02.02f" % (hours, minutes, sec)
|
|
|
|
return output.replace(".", ",")
|
|
|
|
|
|
|
|
def norm(name):
|
|
|
|
if name[0] == "{":
|
|
|
|
uri, tag = name[1:].split("}")
|
|
|
|
return tag
|
|
|
|
else:
|
|
|
|
return name
|
|
|
|
|
|
|
|
def subtitle_tt(options, data):
|
|
|
|
i = 1
|
|
|
|
data = ""
|
|
|
|
skip = False
|
|
|
|
tree = ET.parse(data)
|
|
|
|
for node in tree.iter():
|
|
|
|
tag = norm(node.tag)
|
|
|
|
if tag == "p":
|
|
|
|
if skip:
|
|
|
|
data = data + "\n"
|
|
|
|
data += '%s\n%s,%s --> %s,%s\n' % (i, node.attrib["begin"][:8], node.attrib["begin"][9:], node.attrib["end"][:8], node.attrib["end"][9:])
|
|
|
|
data += '%s\n' % node.text.strip(' \t\n\r')
|
|
|
|
skip = True
|
|
|
|
i += 1
|
|
|
|
if tag == "br":
|
|
|
|
if node.tail:
|
|
|
|
data += '%s\n\n' % node.tail.strip(' \t\n\r')
|
|
|
|
skip = False
|
|
|
|
filename = re.search("(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
if filename:
|
|
|
|
options.output = "%s.srt" % filename.group(1)
|
|
|
|
log.info("Subtitle: %s", options.output)
|
|
|
|
fd = open(options.output, "w")
|
|
|
|
fd.write(data)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def subtitle_json(options, data):
|
|
|
|
data = json.loads(data)
|
|
|
|
number = 1
|
|
|
|
subs = ""
|
|
|
|
for i in data:
|
|
|
|
subs += "%s\n%s --> %s\n" % (number, timestr(int(i["startMillis"])), timestr(int(i["endMillis"])))
|
|
|
|
subs += "%s\n\n" % i["text"]
|
|
|
|
number += 1
|
|
|
|
|
|
|
|
filename = re.search("(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
if filename:
|
|
|
|
options.output = "%s.srt" % filename.group(1)
|
|
|
|
log.info("Subtitle: %s", options.output)
|
|
|
|
fd = open(options.output, "w")
|
|
|
|
fd.write(subs)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def subtitle_sami(options, data):
|
|
|
|
tree = ET.XML(data)
|
|
|
|
subt = tree.find("Font")
|
|
|
|
subs = ""
|
|
|
|
for i in subt.getiterator():
|
|
|
|
if i.tag == "Subtitle":
|
|
|
|
if i.attrib["SpotNumber"] == 1:
|
|
|
|
subs += "%s\n%s --> %s\n" % (i.attrib["SpotNumber"], i.attrib["TimeIn"], i.attrib["TimeOut"])
|
|
|
|
else:
|
|
|
|
subs += "\n%s\n%s --> %s\n" % (i.attrib["SpotNumber"], i.attrib["TimeIn"], i.attrib["TimeOut"])
|
|
|
|
else:
|
|
|
|
subs += "%s\n" % i.text
|
|
|
|
|
|
|
|
filename = re.search("(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
if filename:
|
|
|
|
options.output = "%s.srt" % filename.group(1)
|
|
|
|
log.info("Subtitle: %s", options.output)
|
|
|
|
fd = open(options.output, "w")
|
|
|
|
fd.write(subs)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def subtitle_smi(options, data):
|
|
|
|
recomp = re.compile(r'<SYNC Start=(\d+)>\s+<P Class=\w+>(.*)<br>\s+<SYNC Start=(\d+)>\s+<P Class=\w+>', re.M|re.I|re.U)
|
|
|
|
number = 1
|
|
|
|
subs = ""
|
|
|
|
for i in recomp.finditer(data):
|
|
|
|
subs += "%s\n%s --> %s\n" % (number, timestr(i.group(1)), timestr(i.group(3)))
|
|
|
|
text = "%s\n\n" % i.group(2)
|
|
|
|
subs += text.replace("<br>", "\n")
|
|
|
|
number += 1
|
|
|
|
|
|
|
|
filename = re.search("(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
if filename:
|
|
|
|
options.output = "%s.srt" % filename.group(1)
|
|
|
|
log.info("Subtitle: %s", options.output)
|
|
|
|
fd = open(options.output, "w")
|
|
|
|
fd.write(subs)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def subtitle_wsrt(options, data):
|
|
|
|
recomp = re.compile("(\d+)\r\n([\d:\.]+ --> [\d:\.]+)?([^\r\n]+)?\r\n([^\r\n]+)\r\n(([^\r\n]*)\r\n)?")
|
|
|
|
srt = ""
|
|
|
|
for i in recomp.finditer(data):
|
|
|
|
sub = "%s\n%s\n%s\n" % (i.group(1), i.group(2).replace(".", ","), i.group(4))
|
|
|
|
if len(i.group(6)) > 0:
|
|
|
|
sub += "%s\n" % i.group(6)
|
|
|
|
sub += "\n"
|
|
|
|
sub = re.sub('<[^>]*>', '', sub)
|
|
|
|
srt += sub
|
|
|
|
filename = re.search("(.*)\.[a-z0-9]{2,3}$", options.output)
|
|
|
|
if filename:
|
|
|
|
options.output = "%s.srt" % filename.group(1)
|
|
|
|
log.info("Subtitle: %s", options.output)
|
|
|
|
fd = open(options.output, "w")
|
|
|
|
fd.write(srt)
|
|
|
|
fd.close()
|
|
|
|
|
2013-01-17 00:21:47 +01:00
|
|
|
def select_quality(options, streams):
|
|
|
|
sort = sorted(streams.keys(), key=int)
|
|
|
|
|
|
|
|
if options.quality:
|
|
|
|
quality = options.quality
|
|
|
|
else:
|
|
|
|
quality = sort.pop()
|
|
|
|
|
|
|
|
try:
|
|
|
|
selected = streams[int(quality)]
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
log.error("Can't find that quality. (Try one of: %s)",
|
|
|
|
", ".join(map(str, sort)))
|
|
|
|
sys.exit(4)
|
|
|
|
|
|
|
|
return selected
|
|
|
|
|