2015-08-02 20:59:11 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
import db
|
|
|
|
import ub
|
2016-03-28 21:07:13 +02:00
|
|
|
from flask import current_app as app
|
2016-12-23 09:53:39 +01:00
|
|
|
import logging
|
2015-08-02 20:59:11 +02:00
|
|
|
import smtplib
|
2017-02-20 19:52:00 +01:00
|
|
|
from tempfile import gettempdir
|
2016-06-17 14:42:39 +02:00
|
|
|
import socket
|
2015-08-02 20:59:11 +02:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import traceback
|
2016-03-26 16:12:29 +01:00
|
|
|
import re
|
|
|
|
import unicodedata
|
2017-04-02 10:05:07 +02:00
|
|
|
|
2017-03-05 10:40:39 +01:00
|
|
|
try:
|
2017-03-06 04:44:54 +01:00
|
|
|
from StringIO import StringIO
|
|
|
|
from email.MIMEBase import MIMEBase
|
|
|
|
from email.MIMEMultipart import MIMEMultipart
|
|
|
|
from email.MIMEText import MIMEText
|
2017-03-07 21:08:28 +01:00
|
|
|
except ImportError as e:
|
|
|
|
from io import StringIO
|
|
|
|
from email.mime.base import MIMEBase
|
|
|
|
from email.mime.multipart import MIMEMultipart
|
|
|
|
from email.mime.text import MIMEText
|
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
from email import encoders
|
|
|
|
from email.generator import Generator
|
2017-02-23 04:26:39 +01:00
|
|
|
from email.utils import formatdate
|
|
|
|
from email.utils import make_msgid
|
2016-11-09 19:24:33 +01:00
|
|
|
from flask_babel import gettext as _
|
2015-08-02 20:59:11 +02:00
|
|
|
import subprocess
|
2017-02-20 19:52:00 +01:00
|
|
|
import threading
|
2017-01-30 18:58:36 +01:00
|
|
|
import shutil
|
2017-02-20 19:52:00 +01:00
|
|
|
import requests
|
|
|
|
import zipfile
|
|
|
|
from tornado.ioloop import IOLoop
|
2017-03-17 00:36:37 +01:00
|
|
|
try:
|
|
|
|
import gdriveutils as gd
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2017-03-01 23:38:03 +01:00
|
|
|
import web
|
2017-02-20 19:52:00 +01:00
|
|
|
|
2017-02-15 18:09:17 +01:00
|
|
|
try:
|
|
|
|
import unidecode
|
2017-03-31 16:52:25 +02:00
|
|
|
use_unidecode = True
|
2017-03-06 04:53:17 +01:00
|
|
|
except Exception as e:
|
2017-03-31 16:52:25 +02:00
|
|
|
use_unidecode = False
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-02-20 19:52:00 +01:00
|
|
|
# Global variables
|
|
|
|
global_task = None
|
|
|
|
updater_thread = None
|
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
def update_download(book_id, user_id):
|
2016-12-23 09:53:39 +01:00
|
|
|
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
|
|
|
|
book_id).first()
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
if not check:
|
|
|
|
new_download = ub.Downloads(user_id=user_id, book_id=book_id)
|
|
|
|
ub.session.add(new_download)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
def make_mobi(book_id, calibrepath):
|
|
|
|
vendorpath = os.path.join(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) +
|
|
|
|
os.sep + "../vendor" + os.sep))
|
2016-12-23 09:53:39 +01:00
|
|
|
if sys.platform == "win32":
|
2017-01-22 16:44:37 +01:00
|
|
|
kindlegen = os.path.join(vendorpath, u"kindlegen.exe")
|
2016-11-12 10:52:59 +01:00
|
|
|
else:
|
2017-01-22 16:44:37 +01:00
|
|
|
kindlegen = os.path.join(vendorpath, u"kindlegen")
|
2015-08-02 20:59:11 +02:00
|
|
|
if not os.path.exists(kindlegen):
|
2016-03-28 21:07:13 +02:00
|
|
|
app.logger.error("make_mobi: kindlegen binary not found in: %s" % kindlegen)
|
2016-03-27 23:36:51 +02:00
|
|
|
return None
|
2015-08-02 20:59:11 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2016-03-27 23:36:51 +02:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == 'EPUB').first()
|
|
|
|
if not data:
|
2016-03-28 21:07:13 +02:00
|
|
|
app.logger.error("make_mobi: epub format not found for book id: %d" % book_id)
|
2016-03-27 23:36:51 +02:00
|
|
|
return None
|
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
file_path = os.path.join(calibrepath, book.path, data.name)
|
2016-11-12 10:52:59 +01:00
|
|
|
if os.path.exists(file_path + u".epub"):
|
2016-12-23 09:53:39 +01:00
|
|
|
p = subprocess.Popen((kindlegen + " \"" + file_path + u".epub\" ").encode(sys.getfilesystemencoding()),
|
2017-04-04 19:05:09 +02:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
2016-12-23 09:53:39 +01:00
|
|
|
# Poll process for new output until finished
|
|
|
|
while True:
|
|
|
|
nextline = p.stdout.readline()
|
|
|
|
if nextline == '' and p.poll() is not None:
|
|
|
|
break
|
|
|
|
if nextline != "\r\n":
|
|
|
|
app.logger.debug(nextline.strip('\r\n'))
|
|
|
|
|
|
|
|
check = p.returncode
|
2015-08-02 20:59:11 +02:00
|
|
|
if not check or check < 2:
|
|
|
|
book.data.append(db.Data(
|
|
|
|
name=book.data[0].name,
|
2017-04-02 10:42:33 +02:00
|
|
|
book_format="MOBI",
|
2015-08-02 20:59:11 +02:00
|
|
|
book=book.id,
|
|
|
|
uncompressed_size=os.path.getsize(file_path + ".mobi")
|
|
|
|
))
|
|
|
|
db.session.commit()
|
|
|
|
return file_path + ".mobi"
|
|
|
|
else:
|
2016-03-28 21:07:13 +02:00
|
|
|
app.logger.error("make_mobi: kindlegen failed with error while converting book")
|
2016-03-27 23:36:51 +02:00
|
|
|
return None
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2016-03-28 21:07:13 +02:00
|
|
|
app.logger.error("make_mobie: epub not found: %s.epub" % file_path)
|
2016-03-27 23:36:51 +02:00
|
|
|
return None
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
class StderrLogger(object):
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
buffer = ''
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.logger = logging.getLogger('cps.web')
|
|
|
|
|
|
|
|
def write(self, message):
|
2017-01-28 20:16:40 +01:00
|
|
|
if message == '\n':
|
2016-12-23 09:53:39 +01:00
|
|
|
self.logger.debug(self.buffer)
|
2017-01-28 20:16:40 +01:00
|
|
|
self.buffer = ''
|
2016-12-23 09:53:39 +01:00
|
|
|
else:
|
2017-01-28 20:16:40 +01:00
|
|
|
self.buffer += message
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
def send_raw_email(kindle_mail, msg):
|
2016-12-23 09:53:39 +01:00
|
|
|
settings = ub.get_mail_settings()
|
2017-01-22 16:44:37 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
msg['From'] = settings["mail_from"]
|
|
|
|
msg['To'] = kindle_mail
|
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
use_ssl = int(settings.get('mail_use_ssl', 0))
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
# convert MIME message to string
|
|
|
|
fp = StringIO()
|
|
|
|
gen = Generator(fp, mangle_from_=False)
|
|
|
|
gen.flatten(msg)
|
|
|
|
msg = fp.getvalue()
|
|
|
|
|
|
|
|
# send email
|
|
|
|
try:
|
2017-01-28 20:16:40 +01:00
|
|
|
timeout = 600 # set timeout to 5mins
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
org_stderr = smtplib.stderr
|
|
|
|
smtplib.stderr = StderrLogger()
|
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
if use_ssl == 2:
|
2017-01-16 08:37:42 +01:00
|
|
|
mailserver = smtplib.SMTP_SSL(settings["mail_server"], settings["mail_port"], timeout)
|
|
|
|
else:
|
|
|
|
mailserver = smtplib.SMTP(settings["mail_server"], settings["mail_port"], timeout)
|
2016-12-23 09:53:39 +01:00
|
|
|
mailserver.set_debuglevel(1)
|
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
if use_ssl == 1:
|
2017-01-18 19:07:45 +01:00
|
|
|
mailserver.starttls()
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
if settings["mail_password"]:
|
|
|
|
mailserver.login(settings["mail_login"], settings["mail_password"])
|
|
|
|
mailserver.sendmail(settings["mail_login"], kindle_mail, msg)
|
|
|
|
mailserver.quit()
|
|
|
|
|
|
|
|
smtplib.stderr = org_stderr
|
|
|
|
|
2017-03-05 10:40:39 +01:00
|
|
|
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException) as e:
|
2016-12-23 09:53:39 +01:00
|
|
|
app.logger.error(traceback.print_exc())
|
|
|
|
return _("Failed to send mail: %s" % str(e))
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
def send_test_mail(kindle_mail):
|
|
|
|
msg = MIMEMultipart()
|
|
|
|
msg['Subject'] = _(u'Calibre-web test email')
|
|
|
|
text = _(u'This email has been sent via calibre web.')
|
|
|
|
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
|
2017-01-28 20:16:40 +01:00
|
|
|
return send_raw_email(kindle_mail, msg)
|
2017-01-22 16:44:37 +01:00
|
|
|
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
def send_mail(book_id, kindle_mail, calibrepath):
|
2016-12-23 09:53:39 +01:00
|
|
|
"""Send email with attachments"""
|
2015-08-02 20:59:11 +02:00
|
|
|
# create MIME message
|
|
|
|
msg = MIMEMultipart()
|
2017-01-18 19:07:45 +01:00
|
|
|
msg['Subject'] = _(u'Send to Kindle')
|
2017-02-23 04:26:39 +01:00
|
|
|
msg['Message-Id'] = make_msgid('calibre-web')
|
|
|
|
msg['Date'] = formatdate(localtime=True)
|
2017-01-18 19:07:45 +01:00
|
|
|
text = _(u'This email has been sent via calibre web.')
|
2017-01-16 08:37:42 +01:00
|
|
|
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2016-03-27 23:36:51 +02:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id)
|
|
|
|
|
|
|
|
formats = {}
|
|
|
|
|
|
|
|
for entry in data:
|
|
|
|
if entry.format == "MOBI":
|
2017-01-22 16:44:37 +01:00
|
|
|
formats["mobi"] = os.path.join(calibrepath, book.path, entry.name + ".mobi")
|
2016-03-27 23:36:51 +02:00
|
|
|
if entry.format == "EPUB":
|
2017-01-22 16:44:37 +01:00
|
|
|
formats["epub"] = os.path.join(calibrepath, book.path, entry.name + ".epub")
|
2016-03-27 23:36:51 +02:00
|
|
|
if entry.format == "PDF":
|
2017-01-22 16:44:37 +01:00
|
|
|
formats["pdf"] = os.path.join(calibrepath, book.path, entry.name + ".pdf")
|
2016-03-27 23:36:51 +02:00
|
|
|
|
|
|
|
if len(formats) == 0:
|
2016-11-09 19:24:33 +01:00
|
|
|
return _("Could not find any formats suitable for sending by email")
|
2016-03-27 23:36:51 +02:00
|
|
|
|
2016-03-29 01:25:25 +02:00
|
|
|
if 'mobi' in formats:
|
2016-03-27 23:36:51 +02:00
|
|
|
msg.attach(get_attachment(formats['mobi']))
|
|
|
|
elif 'epub' in formats:
|
2017-01-28 20:16:40 +01:00
|
|
|
filepath = make_mobi(book.id, calibrepath)
|
2016-03-27 23:36:51 +02:00
|
|
|
if filepath is not None:
|
|
|
|
msg.attach(get_attachment(filepath))
|
2016-09-21 10:27:37 +02:00
|
|
|
elif filepath is None:
|
2016-11-09 19:24:33 +01:00
|
|
|
return _("Could not convert epub to mobi")
|
2016-03-27 23:36:51 +02:00
|
|
|
elif 'pdf' in formats:
|
|
|
|
msg.attach(get_attachment(formats['pdf']))
|
|
|
|
elif 'pdf' in formats:
|
|
|
|
msg.attach(get_attachment(formats['pdf']))
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2016-11-09 19:24:33 +01:00
|
|
|
return _("Could not find any formats suitable for sending by email")
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
return send_raw_email(kindle_mail, msg)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_attachment(file_path):
|
2016-12-23 09:53:39 +01:00
|
|
|
"""Get file as MIMEBase message"""
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
file_ = open(file_path, 'rb')
|
|
|
|
attachment = MIMEBase('application', 'octet-stream')
|
|
|
|
attachment.set_payload(file_.read())
|
|
|
|
file_.close()
|
|
|
|
encoders.encode_base64(attachment)
|
|
|
|
|
|
|
|
attachment.add_header('Content-Disposition', 'attachment',
|
|
|
|
filename=os.path.basename(file_path))
|
|
|
|
return attachment
|
|
|
|
except IOError:
|
|
|
|
traceback.print_exc()
|
2017-02-15 18:09:17 +01:00
|
|
|
app.logger.error = (u'The requested file could not be read. Maybe wrong permissions?')
|
2015-08-02 20:59:11 +02:00
|
|
|
return None
|
2016-03-26 16:12:29 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def get_valid_filename(value, replace_whitespace=True):
|
2016-03-26 16:12:29 +01:00
|
|
|
"""
|
|
|
|
Returns the given string converted to a string that can be used for a clean
|
|
|
|
filename. Limits num characters to 128 max.
|
|
|
|
"""
|
2017-03-31 16:52:25 +02:00
|
|
|
if value[-1:] == u'.':
|
2017-02-15 18:09:17 +01:00
|
|
|
value = value[:-1]+u'_'
|
|
|
|
if use_unidecode:
|
|
|
|
value=(unidecode.unidecode(value)).strip()
|
|
|
|
else:
|
2017-02-18 20:12:07 +01:00
|
|
|
value=value.replace(u'§',u'SS')
|
|
|
|
value=value.replace(u'ß',u'ss')
|
2017-02-15 18:09:17 +01:00
|
|
|
value = unicodedata.normalize('NFKD', value)
|
|
|
|
re_slugify = re.compile('[\W\s-]', re.UNICODE)
|
2017-04-04 19:05:09 +02:00
|
|
|
if isinstance(value, str): #Python3 str, Python2 unicode
|
2017-03-06 06:42:00 +01:00
|
|
|
value = re_slugify.sub('', value).strip()
|
|
|
|
else:
|
2017-03-05 11:48:59 +01:00
|
|
|
value = unicode(re_slugify.sub('', value).strip())
|
2016-04-03 23:52:32 +02:00
|
|
|
if replace_whitespace:
|
2017-02-15 18:09:17 +01:00
|
|
|
#*+:\"/<>? werden durch _ ersetzt
|
2017-02-18 20:12:07 +01:00
|
|
|
value = re.sub('[\*\+:\\\"/<>\?]+', u'_', value, flags=re.U)
|
2017-02-15 18:09:17 +01:00
|
|
|
|
|
|
|
value = value[:128]
|
2016-03-26 16:12:29 +01:00
|
|
|
return value
|
|
|
|
|
2017-02-15 18:09:17 +01:00
|
|
|
def get_sorted_author(value):
|
|
|
|
regexes = ["^(JR|SR)\.?$","^I{1,3}\.?$","^IV\.?$"]
|
|
|
|
combined = "(" + ")|(".join(regexes) + ")"
|
|
|
|
value = value.split(" ")
|
2017-03-31 16:52:25 +02:00
|
|
|
if re.match(combined, value[-1].upper()):
|
2017-02-15 18:09:17 +01:00
|
|
|
value2 = value[-2] + ", " + " ".join(value[:-2]) + " " + value[-1]
|
|
|
|
else:
|
|
|
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
|
|
|
return value2
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-04-15 18:18:33 +02:00
|
|
|
def delete_book(book, calibrepath):
|
|
|
|
path = os.path.join(calibrepath, book.path)#.replace('/',os.path.sep)).replace('\\',os.path.sep)
|
|
|
|
shutil.rmtree(path, ignore_errors=True)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
def update_dir_stucture(book_id, calibrepath):
|
2016-12-23 09:53:39 +01:00
|
|
|
db.session.connection().connection.connection.create_function("title_sort", 1, db.title_sort)
|
2016-04-03 23:52:32 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-02-15 18:09:17 +01:00
|
|
|
path = os.path.join(calibrepath, book.path)#.replace('/',os.path.sep)).replace('\\',os.path.sep)
|
2017-03-30 21:17:18 +02:00
|
|
|
|
2017-02-15 18:09:17 +01:00
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
|
|
|
titledir = book.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
2017-03-30 21:17:18 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
if titledir != new_titledir:
|
2016-04-05 20:50:28 +02:00
|
|
|
new_title_path = os.path.join(os.path.dirname(path), new_titledir)
|
|
|
|
os.rename(path, new_title_path)
|
|
|
|
path = new_title_path
|
2017-02-15 18:09:17 +01:00
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
2017-04-03 20:05:55 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
if authordir != new_authordir:
|
2017-01-22 21:30:36 +01:00
|
|
|
new_author_path = os.path.join(os.path.join(calibrepath, new_authordir), os.path.basename(path))
|
2017-02-17 22:54:14 +01:00
|
|
|
os.renames(path, new_author_path)
|
2017-02-15 18:09:17 +01:00
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
2016-04-03 23:52:32 +02:00
|
|
|
db.session.commit()
|
2017-01-30 18:58:36 +01:00
|
|
|
|
2017-03-31 16:52:25 +02:00
|
|
|
|
2017-03-01 23:38:03 +01:00
|
|
|
def update_dir_structure_gdrive(book_id):
|
|
|
|
db.session.connection().connection.connection.create_function("title_sort", 1, db.title_sort)
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-04-04 19:05:09 +02:00
|
|
|
|
2017-03-01 23:38:03 +01:00
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
|
|
|
titledir = book.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
|
|
|
|
|
|
|
if titledir != new_titledir:
|
|
|
|
print (titledir)
|
|
|
|
gFile=gd.getFileFromEbooksFolder(web.Gdrive.Instance().drive,os.path.dirname(book.path),titledir)
|
|
|
|
gFile['title']= new_titledir
|
|
|
|
gFile.Upload()
|
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
|
|
|
|
|
|
|
if authordir != new_authordir:
|
|
|
|
gFile=gd.getFileFromEbooksFolder(web.Gdrive.Instance().drive,None,authordir)
|
2017-03-31 16:52:25 +02:00
|
|
|
gFile['title'] = new_authordir
|
2017-03-01 23:38:03 +01:00
|
|
|
gFile.Upload()
|
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
2017-02-20 19:52:00 +01:00
|
|
|
class Updater(threading.Thread):
|
2017-01-30 18:58:36 +01:00
|
|
|
|
2017-02-20 19:52:00 +01:00
|
|
|
def __init__(self):
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.status=0
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
global global_task
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 1
|
2017-02-20 19:52:00 +01:00
|
|
|
r = requests.get('https://api.github.com/repos/janeczku/calibre-web/zipball/master', stream=True)
|
|
|
|
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 2
|
2017-02-20 19:52:00 +01:00
|
|
|
z = zipfile.ZipFile(StringIO(r.content))
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 3
|
2017-02-20 19:52:00 +01:00
|
|
|
tmp_dir = gettempdir()
|
|
|
|
z.extractall(tmp_dir)
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 4
|
2017-02-20 19:52:00 +01:00
|
|
|
self.update_source(os.path.join(tmp_dir,os.path.splitext(fname)[0]),ub.config.get_main_dir)
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 5
|
2017-02-20 19:52:00 +01:00
|
|
|
global_task = 0
|
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
|
|
|
ub.session.close()
|
|
|
|
ub.engine.dispose()
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 6
|
2017-03-13 01:44:20 +01:00
|
|
|
|
|
|
|
if web.gevent_server:
|
|
|
|
web.gevent_server.stop()
|
|
|
|
else:
|
2017-03-30 21:17:18 +02:00
|
|
|
# stop tornado server
|
2017-03-13 01:44:20 +01:00
|
|
|
server = IOLoop.instance()
|
|
|
|
server.add_callback(server.stop)
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 7
|
2017-02-20 19:52:00 +01:00
|
|
|
|
|
|
|
def get_update_status(self):
|
|
|
|
return self.status
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 19:52:00 +01:00
|
|
|
def file_to_list(self, file):
|
|
|
|
return [x.strip() for x in open(file, 'r') if not x.startswith('#EXT')]
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 19:52:00 +01:00
|
|
|
def one_minus_two(self, one, two):
|
|
|
|
return [x for x in one if x not in set(two)]
|
|
|
|
|
2017-04-03 21:05:28 +02:00
|
|
|
@classmethod
|
2017-02-20 19:52:00 +01:00
|
|
|
def reduce_dirs(self, delete_files, new_list):
|
|
|
|
new_delete = []
|
|
|
|
for file in delete_files:
|
|
|
|
parts = file.split(os.sep)
|
|
|
|
sub = ''
|
|
|
|
for i in range(len(parts)):
|
|
|
|
sub = os.path.join(sub, parts[i])
|
|
|
|
if sub == '':
|
|
|
|
sub = os.sep
|
|
|
|
count = 0
|
|
|
|
for song in new_list:
|
|
|
|
if song.startswith(sub):
|
|
|
|
count += 1
|
|
|
|
break
|
|
|
|
if count == 0:
|
|
|
|
if sub != '\\':
|
|
|
|
new_delete.append(sub)
|
2017-01-30 18:58:36 +01:00
|
|
|
break
|
2017-02-20 19:52:00 +01:00
|
|
|
return list(set(new_delete))
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 19:52:00 +01:00
|
|
|
def reduce_files(self, remove_items, exclude_items):
|
|
|
|
rf = []
|
|
|
|
for item in remove_items:
|
2017-02-21 19:40:22 +01:00
|
|
|
if not item.startswith(exclude_items):
|
2017-02-20 19:52:00 +01:00
|
|
|
rf.append(item)
|
|
|
|
return rf
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 19:52:00 +01:00
|
|
|
def moveallfiles(self, root_src_dir, root_dst_dir):
|
|
|
|
change_permissions = True
|
|
|
|
if sys.platform == "win32" or sys.platform == "darwin":
|
|
|
|
change_permissions = False
|
|
|
|
else:
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
2017-02-20 19:52:00 +01:00
|
|
|
new_permissions = os.stat(root_dst_dir)
|
|
|
|
# print new_permissions
|
|
|
|
for src_dir, dirs, files in os.walk(root_src_dir):
|
|
|
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
|
|
|
if not os.path.exists(dst_dir):
|
|
|
|
os.makedirs(dst_dir)
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
2017-01-30 19:45:03 +01:00
|
|
|
if change_permissions:
|
2017-02-20 19:52:00 +01:00
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
|
|
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
|
|
|
for file_ in files:
|
|
|
|
src_file = os.path.join(src_dir, file_)
|
|
|
|
dst_file = os.path.join(dst_dir, file_)
|
|
|
|
if os.path.exists(dst_file):
|
|
|
|
if change_permissions:
|
|
|
|
permission = os.stat(dst_file)
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
2017-02-20 19:52:00 +01:00
|
|
|
os.remove(dst_file)
|
|
|
|
else:
|
|
|
|
if change_permissions:
|
|
|
|
permission = new_permissions
|
|
|
|
shutil.move(src_file, dst_dir)
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
2017-02-20 19:52:00 +01:00
|
|
|
if change_permissions:
|
|
|
|
try:
|
|
|
|
os.chown(dst_file, permission.st_uid, permission.st_uid)
|
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
2017-03-06 04:53:17 +01:00
|
|
|
except Exception as e:
|
2017-02-20 19:52:00 +01:00
|
|
|
e = sys.exc_info()
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug('Fail '+str(dst_file)+' error: '+str(e))
|
2017-02-20 19:52:00 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
def update_source(self, source, destination):
|
|
|
|
# destination files
|
|
|
|
old_list = list()
|
|
|
|
exclude = (
|
2017-03-31 16:52:25 +02:00
|
|
|
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
|
|
|
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
2017-02-20 19:52:00 +01:00
|
|
|
for root, dirs, files in os.walk(destination, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
for name in dirs:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
# source files
|
|
|
|
new_list = list()
|
|
|
|
for root, dirs, files in os.walk(source, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
for name in dirs:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
|
|
|
delete_files = self.one_minus_two(old_list, new_list)
|
|
|
|
|
|
|
|
rf = self.reduce_files(delete_files, exclude)
|
|
|
|
|
|
|
|
remove_items = self.reduce_dirs(rf, new_list)
|
|
|
|
|
|
|
|
self.moveallfiles(source, destination)
|
|
|
|
|
|
|
|
for item in remove_items:
|
|
|
|
item_path = os.path.join(destination, item[1:])
|
|
|
|
if os.path.isdir(item_path):
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
2017-02-20 19:52:00 +01:00
|
|
|
shutil.rmtree(item_path)
|
2017-01-30 19:45:03 +01:00
|
|
|
else:
|
|
|
|
try:
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
2017-03-31 16:52:25 +02:00
|
|
|
# log_from_thread("Delete file " + item_path)
|
2017-02-20 19:52:00 +01:00
|
|
|
os.remove(item_path)
|
2017-03-29 21:43:55 +02:00
|
|
|
except Exception:
|
2017-02-21 19:40:22 +01:00
|
|
|
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
2017-02-20 19:52:00 +01:00
|
|
|
shutil.rmtree(source, ignore_errors=True)
|
2017-02-21 19:40:22 +01:00
|
|
|
|