2015-08-02 20:59:11 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
2017-12-14 16:39:25 +01:00
|
|
|
|
2019-01-20 19:37:45 +01:00
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
|
|
|
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
|
|
|
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
|
|
|
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
|
|
|
# apetresc, nanu-c, mutschler
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
from cps import mimetypes, global_WorkerThread, searched_ids
|
|
|
|
from flask import render_template, request, redirect, url_for, send_from_directory, make_response, g, flash, abort
|
2019-02-09 19:01:57 +01:00
|
|
|
from werkzeug.exceptions import default_exceptions
|
2017-01-28 20:16:40 +01:00
|
|
|
import helper
|
2015-08-02 20:59:11 +02:00
|
|
|
import os
|
2015-10-13 18:07:17 +02:00
|
|
|
from sqlalchemy.exc import IntegrityError
|
2019-02-08 20:11:44 +01:00
|
|
|
from flask_login import login_user, logout_user, login_required, current_user
|
2016-11-09 19:24:33 +01:00
|
|
|
from flask_babel import gettext as _
|
2015-08-02 20:59:11 +02:00
|
|
|
from werkzeug.security import generate_password_hash, check_password_hash
|
2017-03-29 20:59:37 +02:00
|
|
|
from werkzeug.datastructures import Headers
|
2016-11-09 19:24:33 +01:00
|
|
|
from babel import Locale as LC
|
2019-02-08 20:11:44 +01:00
|
|
|
from babel.dates import format_date
|
2018-09-02 11:48:58 +02:00
|
|
|
from babel.core import UnknownLocaleError
|
2015-10-13 19:06:37 +02:00
|
|
|
import base64
|
2016-03-26 16:12:29 +01:00
|
|
|
from sqlalchemy.sql import *
|
2016-03-29 00:09:11 +02:00
|
|
|
import json
|
2016-04-03 23:52:32 +02:00
|
|
|
import datetime
|
2016-11-09 19:24:33 +01:00
|
|
|
from iso639 import languages as isoLanguages
|
2016-07-16 10:44:47 +02:00
|
|
|
import re
|
2017-01-22 16:44:37 +01:00
|
|
|
import db
|
2017-02-20 19:34:37 +01:00
|
|
|
import gdriveutils
|
2018-07-18 20:21:44 +02:00
|
|
|
from redirect import redirect_back
|
2019-02-08 20:11:44 +01:00
|
|
|
from cps import lm, babel, ub, config, get_locale, language_table, app
|
2019-02-06 21:52:24 +01:00
|
|
|
from pagination import Pagination
|
2019-02-09 21:26:17 +01:00
|
|
|
from sqlalchemy.sql.expression import text
|
|
|
|
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support = dict()
|
|
|
|
try:
|
|
|
|
from oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
|
|
|
|
feature_support['oauth'] = True
|
2019-02-09 21:26:17 +01:00
|
|
|
except ImportError:
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['oauth'] = False
|
|
|
|
oauth_check = {}
|
2019-02-09 21:26:17 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
import ldap
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['ldap'] = True
|
2019-02-09 21:26:17 +01:00
|
|
|
except ImportError:
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['ldap'] = False
|
2018-11-03 13:43:38 +01:00
|
|
|
|
2018-09-30 09:43:20 +02:00
|
|
|
try:
|
|
|
|
from googleapiclient.errors import HttpError
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
from goodreads.client import GoodreadsClient
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['goodreads'] = True
|
2018-09-30 09:43:20 +02:00
|
|
|
except ImportError:
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['goodreads'] = False
|
2018-09-30 09:43:20 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
import Levenshtein
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['levenshtein'] = True
|
2018-09-30 09:43:20 +02:00
|
|
|
except ImportError:
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['levenshtein'] = False
|
2018-09-30 09:43:20 +02:00
|
|
|
|
|
|
|
try:
|
2019-02-09 21:26:17 +01:00
|
|
|
from functools import reduce, wraps
|
2018-09-30 09:43:20 +02:00
|
|
|
except ImportError:
|
|
|
|
pass # We're not using Python 3
|
|
|
|
|
|
|
|
try:
|
|
|
|
import rarfile
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['rar'] = True
|
2018-09-30 09:43:20 +02:00
|
|
|
except ImportError:
|
2019-02-16 07:23:08 +01:00
|
|
|
feature_support['rar'] = False
|
2018-09-30 09:43:20 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
from natsort import natsorted as sort
|
|
|
|
except ImportError:
|
2019-02-08 20:11:44 +01:00
|
|
|
sort = sorted # Just use regular sort then, may cause issues with badly named pages in cbz/cbr files
|
2017-02-20 19:34:37 +01:00
|
|
|
|
2017-03-05 10:40:39 +01:00
|
|
|
try:
|
2017-03-07 23:03:10 +01:00
|
|
|
from urllib.parse import quote
|
2017-03-31 18:31:16 +02:00
|
|
|
except ImportError:
|
2017-03-05 11:48:59 +01:00
|
|
|
from urllib import quote
|
2017-03-05 10:40:39 +01:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
from flask import Blueprint
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
# Global variables
|
2019-02-08 20:11:44 +01:00
|
|
|
|
2019-01-27 11:14:38 +01:00
|
|
|
EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'}
|
|
|
|
|
2019-02-16 07:23:08 +01:00
|
|
|
'''EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +
|
|
|
|
(['rar','cbr'] if feature_support['rar'] else []))'''
|
2019-01-27 11:14:38 +01:00
|
|
|
|
2018-10-01 10:35:13 +02:00
|
|
|
|
2019-02-09 19:01:57 +01:00
|
|
|
# custom error page
|
2018-10-01 10:35:13 +02:00
|
|
|
def error_http(error):
|
|
|
|
return render_template('http_error.html',
|
2018-11-03 17:38:52 +01:00
|
|
|
error_code=error.code,
|
|
|
|
error_name=error.name,
|
|
|
|
instance=config.config_calibre_web_title
|
|
|
|
), error.code
|
2018-10-01 10:35:13 +02:00
|
|
|
|
|
|
|
# http error handling
|
|
|
|
for ex in default_exceptions:
|
2018-11-03 17:38:52 +01:00
|
|
|
# new routine for all client errors, server errors stay
|
|
|
|
if ex < 500:
|
|
|
|
app.register_error_handler(ex, error_http)
|
2018-10-01 10:35:13 +02:00
|
|
|
|
2019-02-09 19:01:57 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
web = Blueprint('web', __name__)
|
2018-09-08 21:16:56 +02:00
|
|
|
|
2017-03-07 19:10:17 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
@lm.user_loader
|
2017-01-28 20:16:40 +01:00
|
|
|
def load_user(user_id):
|
2019-02-06 21:52:24 +01:00
|
|
|
try:
|
2019-02-08 20:11:44 +01:00
|
|
|
return ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
2019-02-06 21:52:24 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
@lm.header_loader
|
|
|
|
def load_user_from_header(header_val):
|
|
|
|
if header_val.startswith('Basic '):
|
|
|
|
header_val = header_val.replace('Basic ', '', 1)
|
2017-01-28 20:16:40 +01:00
|
|
|
basic_username = basic_password = ''
|
2015-08-02 20:59:11 +02:00
|
|
|
try:
|
|
|
|
header_val = base64.b64decode(header_val)
|
2015-10-13 19:06:37 +02:00
|
|
|
basic_username = header_val.split(':')[0]
|
|
|
|
basic_password = header_val.split(':')[1]
|
2015-08-02 20:59:11 +02:00
|
|
|
except TypeError:
|
|
|
|
pass
|
2017-11-12 14:10:08 +01:00
|
|
|
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == basic_username.lower()).first()
|
2015-10-13 19:06:37 +02:00
|
|
|
if user and check_password_hash(user.password, basic_password):
|
|
|
|
return user
|
|
|
|
return
|
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
def login_required_if_no_ano(func):
|
2017-10-10 20:13:28 +02:00
|
|
|
@wraps(func)
|
|
|
|
def decorated_view(*args, **kwargs):
|
|
|
|
if config.config_anonbrowse == 1:
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
return login_required(func)(*args, **kwargs)
|
|
|
|
return decorated_view
|
2016-04-27 10:35:23 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-07-08 03:18:03 +02:00
|
|
|
def remote_login_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if config.config_remote_login:
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
if request.is_xhr:
|
|
|
|
data = {'status': 'error', 'message': 'Forbidden'}
|
2018-08-24 15:48:09 +02:00
|
|
|
response = make_response(json.dumps(data, ensure_ascii=False))
|
2017-07-08 03:18:03 +02:00
|
|
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
|
|
|
return response, 403
|
|
|
|
abort(403)
|
|
|
|
|
|
|
|
return inner
|
|
|
|
|
|
|
|
|
2015-10-13 18:07:17 +02:00
|
|
|
def admin_required(f):
|
|
|
|
"""
|
|
|
|
Checks if current_user.role == 1
|
|
|
|
"""
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2015-10-13 18:07:17 +02:00
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
2016-04-27 10:35:23 +02:00
|
|
|
if current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
return inner
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
def unconfigured(f):
|
|
|
|
"""
|
|
|
|
Checks if current_user.role == 1
|
|
|
|
"""
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
2017-01-28 20:16:40 +01:00
|
|
|
if not config.db_configured:
|
2017-01-22 16:44:37 +01:00
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2017-01-22 16:44:37 +01:00
|
|
|
return inner
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
def download_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_download() or current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
return inner
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
def upload_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_upload() or current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
return inner
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
|
2016-04-27 10:35:23 +02:00
|
|
|
def edit_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_edit() or current_user.role_admin():
|
2015-10-13 18:07:17 +02:00
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2015-10-13 18:07:17 +02:00
|
|
|
return inner
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2018-08-19 10:14:20 +02:00
|
|
|
# Language and content filters for displaying in the UI
|
2017-08-26 17:12:16 +02:00
|
|
|
def common_filters():
|
2016-11-09 19:24:33 +01:00
|
|
|
if current_user.filter_language() != "all":
|
2017-04-02 10:05:07 +02:00
|
|
|
lang_filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
2016-11-09 19:24:33 +01:00
|
|
|
else:
|
2017-08-28 18:28:48 +02:00
|
|
|
lang_filter = true()
|
2017-08-08 18:52:00 +02:00
|
|
|
content_rating_filter = false() if current_user.mature_content else \
|
|
|
|
db.Books.tags.any(db.Tags.name.in_(config.mature_content_tags()))
|
2017-08-26 17:12:16 +02:00
|
|
|
return and_(lang_filter, ~content_rating_filter)
|
|
|
|
|
|
|
|
|
2018-08-19 10:14:20 +02:00
|
|
|
# Creates for all stored languages a translated speaking name in the array for the UI
|
2018-08-24 15:48:09 +02:00
|
|
|
def speaking_language(languages=None):
|
2018-08-19 10:14:20 +02:00
|
|
|
if not languages:
|
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2018-09-02 11:48:58 +02:00
|
|
|
except UnknownLocaleError:
|
2018-08-19 10:14:20 +02:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
return languages
|
|
|
|
|
2019-02-09 18:46:36 +01:00
|
|
|
# checks if domain is in database (including wildcards)
|
|
|
|
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
|
|
|
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
|
|
|
def check_valid_domain(domain_text):
|
|
|
|
domain_text = domain_text.split('@', 1)[-1].lower()
|
|
|
|
sql = "SELECT * FROM registration WHERE :domain LIKE domain;"
|
|
|
|
result = ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
|
|
|
return len(result)
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
|
2019-01-06 14:16:52 +01:00
|
|
|
# Orders all Authors in the list according to authors sort
|
|
|
|
def order_authors(entry):
|
|
|
|
sort_authors = entry.author_sort.split('&')
|
|
|
|
authors_ordered = list()
|
|
|
|
error = False
|
|
|
|
for auth in sort_authors:
|
|
|
|
# ToDo: How to handle not found authorname
|
|
|
|
result = db.session.query(db.Authors).filter(db.Authors.sort == auth.lstrip().strip()).first()
|
|
|
|
if not result:
|
|
|
|
error = True
|
|
|
|
break
|
|
|
|
authors_ordered.append(result)
|
|
|
|
if not error:
|
|
|
|
entry.authors = authors_ordered
|
|
|
|
return entry
|
2018-08-19 10:14:20 +02:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
|
2017-08-26 17:12:16 +02:00
|
|
|
# Fill indexpage with all requested data from database
|
2018-07-16 19:04:18 +02:00
|
|
|
def fill_indexpage(page, database, db_filter, order, *join):
|
2017-01-28 20:16:40 +01:00
|
|
|
if current_user.show_detail_random():
|
2018-08-28 09:42:19 +02:00
|
|
|
randm = db.session.query(db.Books).filter(common_filters())\
|
2017-08-08 18:52:00 +02:00
|
|
|
.order_by(func.random()).limit(config.config_random_books)
|
2016-12-23 09:53:39 +01:00
|
|
|
else:
|
2018-08-28 09:42:19 +02:00
|
|
|
randm = false()
|
2017-01-22 16:44:37 +01:00
|
|
|
off = int(int(config.config_books_per_page) * (page - 1))
|
|
|
|
pagination = Pagination(page, config.config_books_per_page,
|
2019-02-08 20:11:44 +01:00
|
|
|
len(db.session.query(database).filter(db_filter).filter(common_filters()).all()))
|
|
|
|
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters()).\
|
|
|
|
order_by(*order).offset(off).limit(config.config_books_per_page).all()
|
2019-01-06 14:16:52 +01:00
|
|
|
for book in entries:
|
|
|
|
book = order_authors(book)
|
2018-08-28 09:42:19 +02:00
|
|
|
return entries, randm, pagination
|
2016-11-09 19:24:33 +01:00
|
|
|
|
|
|
|
|
2018-08-19 10:14:20 +02:00
|
|
|
# read search results from calibre-database and return it (function is used for feed and simple search
|
2018-08-12 13:31:18 +02:00
|
|
|
def get_search_results(term):
|
|
|
|
q = list()
|
|
|
|
authorterms = re.split("[, ]+", term)
|
|
|
|
for authorterm in authorterms:
|
|
|
|
q.append(db.Books.authors.any(db.Authors.name.ilike("%" + authorterm + "%")))
|
|
|
|
db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
|
|
|
|
db.Books.authors.any(db.Authors.name.ilike("%" + term + "%"))
|
|
|
|
|
|
|
|
return db.session.query(db.Books).filter(common_filters()).filter(
|
|
|
|
db.or_(db.Books.tags.any(db.Tags.name.ilike("%" + term + "%")),
|
|
|
|
db.Books.series.any(db.Series.name.ilike("%" + term + "%")),
|
|
|
|
db.Books.authors.any(and_(*q)),
|
|
|
|
db.Books.publishers.any(db.Publishers.name.ilike("%" + term + "%")),
|
|
|
|
db.Books.title.ilike("%" + term + "%"))).all()
|
|
|
|
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
# Returns the template for rendering and includes the instance name
|
2017-01-22 21:30:36 +01:00
|
|
|
def render_title_template(*args, **kwargs):
|
|
|
|
return render_template(instance=config.config_calibre_web_title, *args, **kwargs)
|
|
|
|
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
@web.before_app_request
|
2015-08-02 20:59:11 +02:00
|
|
|
def before_request():
|
|
|
|
g.user = current_user
|
2017-01-22 16:44:37 +01:00
|
|
|
g.allow_registration = config.config_public_reg
|
|
|
|
g.allow_upload = config.config_uploading
|
2019-01-11 08:36:28 +01:00
|
|
|
g.current_theme = config.config_theme
|
2017-11-01 16:55:51 +01:00
|
|
|
g.public_shelfes = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1).order_by(ub.Shelf.name).all()
|
2019-02-16 07:23:08 +01:00
|
|
|
if not config.db_configured and request.endpoint not in ('admin.basic_configuration', 'login') and '/static/' not in request.path:
|
|
|
|
return redirect(url_for('admin.basic_configuration'))
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/ajax/emailstat")
|
2018-07-30 20:12:41 +02:00
|
|
|
@login_required
|
|
|
|
def get_email_status_json():
|
2019-02-08 20:11:44 +01:00
|
|
|
tasks = global_WorkerThread.get_taskstatus()
|
2018-11-18 17:09:13 +01:00
|
|
|
answer = helper.render_task_status(tasks)
|
2019-02-08 20:11:44 +01:00
|
|
|
js = json.dumps(answer, default=helper.json_serial)
|
2018-07-30 20:12:41 +02:00
|
|
|
response = make_response(js)
|
|
|
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
|
|
|
return response
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2018-08-31 10:47:58 +02:00
|
|
|
'''
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/ajax/getcomic/<int:book_id>/<book_format>/<int:page>")
|
2017-11-18 10:34:21 +01:00
|
|
|
@login_required
|
|
|
|
def get_comic_book(book_id, book_format, page):
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
if not book:
|
2017-11-19 20:37:43 +01:00
|
|
|
return "", 204
|
2017-11-18 10:34:21 +01:00
|
|
|
else:
|
|
|
|
for bookformat in book.data:
|
|
|
|
if bookformat.format.lower() == book_format.lower():
|
|
|
|
cbr_file = os.path.join(config.config_calibre_dir, book.path, bookformat.name) + "." + book_format
|
2017-12-15 18:14:20 +01:00
|
|
|
if book_format in ("cbr", "rar"):
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['rar'] == True:
|
2017-11-18 10:34:21 +01:00
|
|
|
rarfile.UNRAR_TOOL = config.config_rarfile_location
|
|
|
|
try:
|
|
|
|
rf = rarfile.RarFile(cbr_file)
|
2018-01-11 14:43:39 +01:00
|
|
|
names = sort(rf.namelist())
|
|
|
|
extract = lambda page: rf.read(names[page])
|
2017-11-18 10:34:21 +01:00
|
|
|
except:
|
|
|
|
# rarfile not valid
|
2017-11-19 20:37:43 +01:00
|
|
|
app.logger.error('Unrar binary not found, or unable to decompress file ' + cbr_file)
|
|
|
|
return "", 204
|
2017-11-18 10:34:21 +01:00
|
|
|
else:
|
2017-11-19 18:08:55 +01:00
|
|
|
app.logger.info('Unrar is not supported please install python rarfile extension')
|
2017-11-18 10:34:21 +01:00
|
|
|
# no support means return nothing
|
2017-11-19 20:37:43 +01:00
|
|
|
return "", 204
|
2018-01-11 14:43:39 +01:00
|
|
|
elif book_format in ("cbz", "zip"):
|
2017-11-18 10:34:21 +01:00
|
|
|
zf = zipfile.ZipFile(cbr_file)
|
2018-01-11 14:43:39 +01:00
|
|
|
names=sort(zf.namelist())
|
|
|
|
extract = lambda page: zf.read(names[page])
|
|
|
|
elif book_format in ("cbt", "tar"):
|
2017-12-15 18:14:20 +01:00
|
|
|
tf = tarfile.TarFile(cbr_file)
|
2018-01-11 14:43:39 +01:00
|
|
|
names=sort(tf.getnames())
|
|
|
|
extract = lambda page: tf.extractfile(names[page]).read()
|
|
|
|
else:
|
|
|
|
app.logger.error('unsupported comic format')
|
|
|
|
return "", 204
|
|
|
|
|
|
|
|
if sys.version_info.major >= 3:
|
|
|
|
b64 = codecs.encode(extract(page), 'base64').decode()
|
|
|
|
else:
|
|
|
|
b64 = extract(page).encode('base64')
|
|
|
|
ext = names[page].rpartition('.')[-1]
|
|
|
|
if ext not in ('png', 'gif', 'jpg', 'jpeg'):
|
|
|
|
ext = 'png'
|
|
|
|
extractedfile="data:image/" + ext + ";base64," + b64
|
|
|
|
fileData={"name": names[page], "page":page, "last":len(names)-1, "content": extractedfile}
|
2017-11-18 10:34:21 +01:00
|
|
|
return make_response(json.dumps(fileData))
|
2017-11-20 07:53:52 +01:00
|
|
|
return "", 204
|
2018-08-31 10:47:58 +02:00
|
|
|
'''
|
2017-11-18 10:34:21 +01:00
|
|
|
|
2018-09-02 11:48:58 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_authors_json", methods=['GET', 'POST'])
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 09:53:39 +01:00
|
|
|
def get_authors_json():
|
2016-04-15 23:35:18 +02:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
2017-10-20 23:23:16 +02:00
|
|
|
entries = db.session.query(db.Authors).filter(db.Authors.name.ilike("%" + query + "%")).all()
|
2019-02-08 20:11:44 +01:00
|
|
|
json_dumps = json.dumps([dict(name=r.name.replace('|', ',')) for r in entries])
|
2016-04-15 23:35:18 +02:00
|
|
|
return json_dumps
|
|
|
|
|
2019-01-08 07:58:22 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_publishers_json", methods=['GET', 'POST'])
|
2018-09-30 14:08:55 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def get_publishers_json():
|
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
|
|
|
entries = db.session.query(db.Publishers).filter(db.Publishers.name.ilike("%" + query + "%")).all()
|
2019-02-08 20:11:44 +01:00
|
|
|
json_dumps = json.dumps([dict(name=r.name.replace('|', ',')) for r in entries])
|
2018-09-30 14:08:55 +02:00
|
|
|
return json_dumps
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-01-14 20:27:53 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_tags_json", methods=['GET', 'POST'])
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 09:53:39 +01:00
|
|
|
def get_tags_json():
|
2016-04-15 23:35:18 +02:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
2017-11-13 21:33:36 +01:00
|
|
|
entries = db.session.query(db.Tags).filter(db.Tags.name.ilike("%" + query + "%")).all()
|
2017-03-31 18:31:16 +02:00
|
|
|
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
2016-04-15 23:35:18 +02:00
|
|
|
return json_dumps
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_languages_json", methods=['GET', 'POST'])
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def get_languages_json():
|
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q').lower()
|
2018-09-08 21:16:56 +02:00
|
|
|
# languages = speaking_language()
|
|
|
|
languages = language_table[get_locale()]
|
2018-09-09 10:13:08 +02:00
|
|
|
entries_start = [s for key, s in languages.items() if s.lower().startswith(query.lower())]
|
|
|
|
if len(entries_start) < 5:
|
2019-02-08 20:11:44 +01:00
|
|
|
entries = [s for key, s in languages.items() if query in s.lower()]
|
2018-09-09 10:13:08 +02:00
|
|
|
entries_start.extend(entries[0:(5-len(entries_start))])
|
|
|
|
entries_start = list(set(entries_start))
|
|
|
|
json_dumps = json.dumps([dict(name=r) for r in entries_start[0:5]])
|
2016-11-09 19:24:33 +01:00
|
|
|
return json_dumps
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_series_json", methods=['GET', 'POST'])
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 09:53:39 +01:00
|
|
|
def get_series_json():
|
2016-04-15 23:35:18 +02:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
2017-10-20 23:23:16 +02:00
|
|
|
entries = db.session.query(db.Series).filter(db.Series.name.ilike("%" + query + "%")).all()
|
2017-03-31 18:31:16 +02:00
|
|
|
# entries = db.session.execute("select name from series where name like '%" + query + "%'")
|
|
|
|
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
2016-04-15 23:35:18 +02:00
|
|
|
return json_dumps
|
2016-12-23 09:53:39 +01:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/get_matching_tags", methods=['GET', 'POST'])
|
2016-05-02 22:43:50 +02:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 09:53:39 +01:00
|
|
|
def get_matching_tags():
|
2016-05-02 22:43:50 +02:00
|
|
|
tag_dict = {'tags': []}
|
|
|
|
if request.method == "GET":
|
|
|
|
q = db.session.query(db.Books)
|
|
|
|
author_input = request.args.get('author_name')
|
|
|
|
title_input = request.args.get('book_title')
|
2016-05-03 14:27:38 +02:00
|
|
|
include_tag_inputs = request.args.getlist('include_tag')
|
|
|
|
exclude_tag_inputs = request.args.getlist('exclude_tag')
|
2017-10-20 23:23:16 +02:00
|
|
|
q = q.filter(db.Books.authors.any(db.Authors.name.ilike("%" + author_input + "%")),
|
|
|
|
db.Books.title.ilike("%" + title_input + "%"))
|
2016-05-03 14:27:38 +02:00
|
|
|
if len(include_tag_inputs) > 0:
|
|
|
|
for tag in include_tag_inputs:
|
2016-05-02 22:43:50 +02:00
|
|
|
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
|
2016-05-03 14:27:38 +02:00
|
|
|
if len(exclude_tag_inputs) > 0:
|
|
|
|
for tag in exclude_tag_inputs:
|
|
|
|
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
|
2016-05-02 22:43:50 +02:00
|
|
|
for book in q:
|
|
|
|
for tag in book.tags:
|
|
|
|
if tag.id not in tag_dict['tags']:
|
|
|
|
tag_dict['tags'].append(tag.id)
|
|
|
|
json_dumps = json.dumps(tag_dict)
|
|
|
|
return json_dumps
|
2015-10-13 19:06:37 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/", defaults={'page': 1})
|
|
|
|
@web.route('/page/<int:page>')
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 20:59:11 +02:00
|
|
|
def index(page):
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, [db.Books.timestamp.desc()])
|
2017-01-22 21:30:36 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Recently Added Books"), page="root")
|
2017-07-06 04:09:01 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/books/newest', defaults={'page': 1})
|
|
|
|
@web.route('/books/newest/page/<int:page>')
|
2017-07-06 04:09:01 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def newest_books(page):
|
2017-11-15 07:40:17 +01:00
|
|
|
if current_user.show_sorted():
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, [db.Books.pubdate.desc()])
|
2017-11-12 14:06:33 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Newest Books"), page="newest")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2017-07-06 04:09:01 +02:00
|
|
|
|
2017-11-30 16:49:46 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/books/oldest', defaults={'page': 1})
|
|
|
|
@web.route('/books/oldest/page/<int:page>')
|
2017-07-06 04:09:01 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def oldest_books(page):
|
2017-11-15 07:40:17 +01:00
|
|
|
if current_user.show_sorted():
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, [db.Books.pubdate])
|
2017-11-12 14:06:33 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Oldest Books"), page="oldest")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2017-07-06 04:09:01 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/books/a-z', defaults={'page': 1})
|
|
|
|
@web.route('/books/a-z/page/<int:page>')
|
2017-07-06 04:09:01 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def titles_ascending(page):
|
2017-11-15 07:40:17 +01:00
|
|
|
if current_user.show_sorted():
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, [db.Books.sort])
|
2017-11-12 14:06:33 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Books (A-Z)"), page="a-z")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2017-07-06 04:09:01 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/books/z-a', defaults={'page': 1})
|
|
|
|
@web.route('/books/z-a/page/<int:page>')
|
2017-07-06 04:09:01 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def titles_descending(page):
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, [db.Books.sort.desc()])
|
2017-07-06 04:09:01 +02:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Books (Z-A)"), page="z-a")
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/hot", defaults={'page': 1})
|
|
|
|
@web.route('/hot/page/<int:page>')
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 20:59:11 +02:00
|
|
|
def hot_books(page):
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_hot_books():
|
|
|
|
if current_user.show_detail_random():
|
|
|
|
random = db.session.query(db.Books).filter(common_filters())\
|
|
|
|
.order_by(func.random()).limit(config.config_random_books)
|
2017-03-07 19:10:17 +01:00
|
|
|
else:
|
2018-08-28 09:42:19 +02:00
|
|
|
random = false()
|
2017-11-12 14:06:33 +01:00
|
|
|
off = int(int(config.config_books_per_page) * (page - 1))
|
|
|
|
all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by(
|
|
|
|
ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
|
|
|
|
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
|
|
|
entries = list()
|
|
|
|
for book in hot_books:
|
|
|
|
downloadBook = db.session.query(db.Books).filter(common_filters()).filter(db.Books.id == book.Downloads.book_id).first()
|
|
|
|
if downloadBook:
|
|
|
|
entries.append(downloadBook)
|
|
|
|
else:
|
2018-07-14 08:31:52 +02:00
|
|
|
ub.delete_download(book.Downloads.book_id)
|
|
|
|
# ub.session.query(ub.Downloads).filter(book.Downloads.book_id == ub.Downloads.book_id).delete()
|
|
|
|
# ub.session.commit()
|
2017-11-12 14:06:33 +01:00
|
|
|
numBooks = entries.__len__()
|
|
|
|
pagination = Pagination(page, config.config_books_per_page, numBooks)
|
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Hot Books (most downloaded)"), page="hot")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
2019-02-08 20:11:44 +01:00
|
|
|
abort(404)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/rated", defaults={'page': 1})
|
|
|
|
@web.route('/rated/page/<int:page>')
|
2017-02-04 14:28:18 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def best_rated_books(page):
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_best_rated_books():
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
|
2018-07-16 19:04:18 +02:00
|
|
|
[db.Books.timestamp.desc()])
|
2017-11-12 14:06:33 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Best rated books"), page="rated")
|
2018-10-28 14:40:31 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2017-02-04 14:28:18 +01:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/discover", defaults={'page': 1})
|
|
|
|
@web.route('/discover/page/<int:page>')
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 20:59:11 +02:00
|
|
|
def discover(page):
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_random_books():
|
2018-07-16 19:04:18 +02:00
|
|
|
entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)])
|
2017-11-30 16:49:46 +01:00
|
|
|
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('discover.html', entries=entries, pagination=pagination,
|
|
|
|
title=_(u"Random Books"), page="discover")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/author")
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def author_list():
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_author():
|
|
|
|
entries = db.session.query(db.Authors, func.count('books_authors_link.book').label('count'))\
|
|
|
|
.join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
|
|
|
.group_by('books_authors_link.author').order_by(db.Authors.sort).all()
|
|
|
|
for entry in entries:
|
2017-11-30 16:49:46 +01:00
|
|
|
entry.Authors.name = entry.Authors.name.replace('|', ',')
|
2019-02-08 20:11:44 +01:00
|
|
|
return render_title_template('list.html', entries=entries, folder='web.author',
|
2019-01-11 08:36:28 +01:00
|
|
|
title=u"Author list", page="authorlist")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/author/<int:book_id>", defaults={'page': 1})
|
|
|
|
@web.route("/author/<int:book_id>/<int:page>")
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2017-04-14 20:29:11 +02:00
|
|
|
def author(book_id, page):
|
2017-09-17 07:15:08 +02:00
|
|
|
entries, __, pagination = fill_indexpage(page, db.Books, db.Books.authors.any(db.Authors.id == book_id),
|
2019-02-08 20:11:44 +01:00
|
|
|
[db.Series.name, db.Books.series_index], db.books_series_link, db.Series)
|
2017-08-16 18:24:44 +02:00
|
|
|
if entries is None:
|
2017-02-03 13:20:35 +01:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2019-02-06 21:52:24 +01:00
|
|
|
return redirect(url_for("web.index"))
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
name = db.session.query(db.Authors).filter(db.Authors.id == book_id).first().name.replace('|', ',')
|
2017-08-16 18:24:44 +02:00
|
|
|
|
|
|
|
author_info = None
|
2017-08-16 18:24:44 +02:00
|
|
|
other_books = []
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['goodreads'] and config.config_use_goodreads:
|
2018-06-01 00:28:16 +02:00
|
|
|
try:
|
|
|
|
gc = GoodreadsClient(config.config_goodreads_api_key, config.config_goodreads_api_secret)
|
|
|
|
author_info = gc.find_author(author_name=name)
|
|
|
|
other_books = get_unique_other_books(entries.all(), author_info.books)
|
2018-08-12 09:29:57 +02:00
|
|
|
except Exception:
|
2018-06-01 00:28:16 +02:00
|
|
|
# Skip goodreads, if site is down/inaccessible
|
2018-07-07 12:27:07 +02:00
|
|
|
app.logger.error('Goodreads website is down/inaccessible')
|
2017-08-16 18:24:44 +02:00
|
|
|
|
|
|
|
return render_title_template('author.html', entries=entries, pagination=pagination,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=name, author=author_info, other_books=other_books, page="author")
|
2017-08-16 18:24:44 +02:00
|
|
|
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/publisher")
|
2018-09-30 18:30:24 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def publisher_list():
|
|
|
|
if current_user.show_publisher():
|
|
|
|
entries = db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count'))\
|
|
|
|
.join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
|
|
|
.group_by('books_publishers_link.publisher').order_by(db.Publishers.sort).all()
|
2019-02-08 20:11:44 +01:00
|
|
|
return render_title_template('list.html', entries=entries, folder='web.publisher',
|
2018-09-30 18:30:24 +02:00
|
|
|
title=_(u"Publisher list"), page="publisherlist")
|
|
|
|
else:
|
|
|
|
abort(404)
|
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/publisher/<int:book_id>", defaults={'page': 1})
|
|
|
|
@web.route('/publisher/<int:book_id>/<int:page>')
|
2018-09-30 18:30:24 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def publisher(book_id, page):
|
2018-10-28 14:40:31 +01:00
|
|
|
publisher = db.session.query(db.Publishers).filter(db.Publishers.id == book_id).first()
|
|
|
|
if publisher:
|
2018-11-25 11:25:20 +01:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books,
|
2019-02-08 20:11:44 +01:00
|
|
|
db.Books.publishers.any(db.Publishers.id == book_id),
|
|
|
|
(db.Series.name, db.Books.series_index), db.books_series_link,
|
|
|
|
db.Series)
|
2018-10-28 14:40:31 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(u"Publisher: %(name)s", name=publisher.name), page="publisher")
|
2018-10-28 14:40:31 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2018-09-30 18:30:24 +02:00
|
|
|
|
|
|
|
|
2017-08-24 17:53:53 +02:00
|
|
|
def get_unique_other_books(library_books, author_books):
|
|
|
|
# Get all identifiers (ISBN, Goodreads, etc) and filter author's books by that list so we show fewer duplicates
|
|
|
|
# Note: Not all images will be shown, even though they're available on Goodreads.com.
|
|
|
|
# See https://www.goodreads.com/topic/show/18213769-goodreads-book-images
|
2019-02-08 20:11:44 +01:00
|
|
|
identifiers = reduce(lambda acc, book: acc + map(lambda identifier: identifier.val, book.identifiers),
|
|
|
|
library_books, [])
|
|
|
|
other_books = filter(lambda book: book.isbn not in identifiers and book.gid["#text"] not in identifiers,
|
|
|
|
author_books)
|
2017-08-24 17:53:53 +02:00
|
|
|
|
|
|
|
# Fuzzy match book titles
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['levenshtein']:
|
2017-08-24 17:53:53 +02:00
|
|
|
library_titles = reduce(lambda acc, book: acc + [book.title], library_books, [])
|
|
|
|
other_books = filter(lambda author_book: not filter(
|
|
|
|
lambda library_book:
|
2019-02-08 20:11:44 +01:00
|
|
|
# Remove items in parentheses before comparing
|
|
|
|
Levenshtein.ratio(re.sub(r"\(.*\)", "", author_book.title), library_book) > 0.7,
|
2017-08-24 17:53:53 +02:00
|
|
|
library_titles
|
|
|
|
), other_books)
|
|
|
|
|
|
|
|
return other_books
|
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/series")
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2016-11-09 19:24:33 +01:00
|
|
|
def series_list():
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_series():
|
|
|
|
entries = db.session.query(db.Series, func.count('books_series_link.book').label('count'))\
|
|
|
|
.join(db.books_series_link).join(db.Books).filter(common_filters())\
|
|
|
|
.group_by('books_series_link.series').order_by(db.Series.sort).all()
|
2019-02-08 20:11:44 +01:00
|
|
|
return render_title_template('list.html', entries=entries, folder='web.series',
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Series list"), page="serieslist")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/series/<int:book_id>/", defaults={'page': 1})
|
|
|
|
@web.route("/series/<int:book_id>/<int:page>")
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2017-04-02 10:05:07 +02:00
|
|
|
def series(book_id, page):
|
2018-10-28 14:40:31 +01:00
|
|
|
name = db.session.query(db.Series).filter(db.Series.id == book_id).first()
|
|
|
|
if name:
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.series.any(db.Series.id == book_id),
|
2019-02-08 20:11:44 +01:00
|
|
|
[db.Books.series_index])
|
2018-11-28 18:28:55 +01:00
|
|
|
return render_title_template('index.html', random=random, pagination=pagination, entries=entries,
|
2018-10-28 14:40:31 +01:00
|
|
|
title=_(u"Series: %(serie)s", serie=name.name), page="series")
|
2016-12-23 09:53:39 +01:00
|
|
|
else:
|
2018-10-28 14:40:31 +01:00
|
|
|
abort(404)
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/language")
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def language_overview():
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_language():
|
|
|
|
if current_user.filter_language() == u"all":
|
2018-08-19 10:14:20 +02:00
|
|
|
languages = speaking_language()
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
2016-11-09 19:24:33 +01:00
|
|
|
try:
|
2017-11-12 14:06:33 +01:00
|
|
|
cur_l = LC.parse(current_user.filter_language())
|
2018-09-02 11:48:58 +02:00
|
|
|
except UnknownLocaleError:
|
2017-11-12 14:06:33 +01:00
|
|
|
cur_l = None
|
|
|
|
languages = db.session.query(db.Languages).filter(
|
|
|
|
db.Languages.lang_code == current_user.filter_language()).all()
|
|
|
|
if cur_l:
|
|
|
|
languages[0].name = cur_l.get_language_name(get_locale())
|
|
|
|
else:
|
|
|
|
languages[0].name = _(isoLanguages.get(part3=languages[0].lang_code).name)
|
|
|
|
lang_counter = db.session.query(db.books_languages_link,
|
|
|
|
func.count('books_languages_link.book').label('bookcount')).group_by(
|
|
|
|
'books_languages_link.lang_code').all()
|
|
|
|
return render_title_template('languages.html', languages=languages, lang_counter=lang_counter,
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Available languages"), page="langlist")
|
2016-12-23 09:53:39 +01:00
|
|
|
else:
|
2017-11-12 14:06:33 +01:00
|
|
|
abort(404)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/language/<name>", defaults={'page': 1})
|
|
|
|
@web.route('/language/<name>/page/<int:page>')
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 09:53:39 +01:00
|
|
|
def language(name, page):
|
2016-11-09 19:24:33 +01:00
|
|
|
try:
|
|
|
|
cur_l = LC.parse(name)
|
2018-10-28 14:40:31 +01:00
|
|
|
lang_name = cur_l.get_language_name(get_locale())
|
2018-09-02 11:48:58 +02:00
|
|
|
except UnknownLocaleError:
|
2018-10-28 14:40:31 +01:00
|
|
|
try:
|
|
|
|
lang_name = _(isoLanguages.get(part3=name).name)
|
|
|
|
except KeyError:
|
|
|
|
abort(404)
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.languages.any(db.Languages.lang_code == name),
|
|
|
|
[db.Books.timestamp.desc()])
|
2017-01-22 21:30:36 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2018-10-28 14:40:31 +01:00
|
|
|
title=_(u"Language: %(name)s", name=lang_name), page="language")
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/category")
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def category_list():
|
2017-11-12 14:06:33 +01:00
|
|
|
if current_user.show_category():
|
|
|
|
entries = db.session.query(db.Tags, func.count('books_tags_link.book').label('count'))\
|
|
|
|
.join(db.books_tags_link).join(db.Books).order_by(db.Tags.name).filter(common_filters())\
|
|
|
|
.group_by('books_tags_link.tag').all()
|
2019-02-08 20:11:44 +01:00
|
|
|
return render_title_template('list.html', entries=entries, folder='web.category',
|
2018-07-15 18:50:35 +02:00
|
|
|
title=_(u"Category list"), page="catlist")
|
2017-11-12 14:06:33 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/category/<int:book_id>", defaults={'page': 1})
|
|
|
|
@web.route('/category/<int:book_id>/<int:page>')
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2017-04-02 10:05:07 +02:00
|
|
|
def category(book_id, page):
|
2018-10-28 14:40:31 +01:00
|
|
|
name = db.session.query(db.Tags).filter(db.Tags.id == book_id).first()
|
|
|
|
if name:
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.tags.any(db.Tags.id == book_id),
|
2019-02-08 20:11:44 +01:00
|
|
|
(db.Series.name, db.Books.series_index),db.books_series_link,
|
|
|
|
db.Series)
|
2018-10-28 14:40:31 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
|
|
|
title=_(u"Category: %(name)s", name=name.name), page="category")
|
|
|
|
else:
|
|
|
|
abort(404)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/ajax/toggleread/<int:book_id>", methods=['POST'])
|
2017-02-19 21:08:22 +01:00
|
|
|
@login_required
|
2017-04-02 10:05:07 +02:00
|
|
|
def toggle_read(book_id):
|
2018-07-15 15:08:06 +02:00
|
|
|
if not config.config_read_column:
|
|
|
|
book = ub.session.query(ub.ReadBook).filter(ub.and_(ub.ReadBook.user_id == int(current_user.id),
|
2017-04-02 10:05:07 +02:00
|
|
|
ub.ReadBook.book_id == book_id)).first()
|
2018-07-15 15:08:06 +02:00
|
|
|
if book:
|
|
|
|
book.is_read = not book.is_read
|
|
|
|
else:
|
|
|
|
readBook = ub.ReadBook()
|
|
|
|
readBook.user_id = int(current_user.id)
|
|
|
|
readBook.book_id = book_id
|
|
|
|
readBook.is_read = True
|
|
|
|
book = readBook
|
|
|
|
ub.session.merge(book)
|
|
|
|
ub.session.commit()
|
2017-02-19 21:08:22 +01:00
|
|
|
else:
|
2018-07-15 15:08:06 +02:00
|
|
|
try:
|
|
|
|
db.session.connection().connection.connection.create_function("title_sort", 1, db.title_sort)
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
|
|
|
read_status = getattr(book, 'custom_column_' + str(config.config_read_column))
|
|
|
|
if len(read_status):
|
|
|
|
read_status[0].value = not read_status[0].value
|
|
|
|
db.session.commit()
|
|
|
|
else:
|
2018-07-15 15:14:26 +02:00
|
|
|
cc_class = db.cc_classes[config.config_read_column]
|
2018-07-15 15:08:06 +02:00
|
|
|
new_cc = cc_class(value=1, book=book_id)
|
|
|
|
db.session.add(new_cc)
|
|
|
|
db.session.commit()
|
|
|
|
except KeyError:
|
|
|
|
app.logger.error(
|
|
|
|
u"Custom Column No.%d is not exisiting in calibre database" % config.config_read_column)
|
|
|
|
return ""
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/book/<int:book_id>")
|
2016-11-09 19:24:33 +01:00
|
|
|
@login_required_if_no_ano
|
2017-04-02 10:05:07 +02:00
|
|
|
def show_book(book_id):
|
2017-08-26 17:12:16 +02:00
|
|
|
entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
2016-12-23 09:53:39 +01:00
|
|
|
if entries:
|
|
|
|
for index in range(0, len(entries.languages)):
|
2016-11-09 19:24:33 +01:00
|
|
|
try:
|
2016-12-23 09:53:39 +01:00
|
|
|
entries.languages[index].language_name = LC.parse(entries.languages[index].lang_code).get_language_name(
|
|
|
|
get_locale())
|
2018-09-02 11:48:58 +02:00
|
|
|
except UnknownLocaleError:
|
2016-12-23 09:53:39 +01:00
|
|
|
entries.languages[index].language_name = _(
|
|
|
|
isoLanguages.get(part3=entries.languages[index].lang_code).name)
|
2017-03-01 00:42:46 +01:00
|
|
|
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
|
|
|
|
|
|
|
if config.config_columns_to_ignore:
|
2017-04-14 20:29:11 +02:00
|
|
|
cc = []
|
2017-03-01 00:42:46 +01:00
|
|
|
for col in tmpcc:
|
2017-04-14 20:29:11 +02:00
|
|
|
r = re.compile(config.config_columns_to_ignore)
|
2017-03-01 00:42:46 +01:00
|
|
|
if r.match(col.label):
|
2017-03-07 20:36:43 +01:00
|
|
|
cc.append(col)
|
2017-03-01 00:42:46 +01:00
|
|
|
else:
|
2017-04-14 20:29:11 +02:00
|
|
|
cc = tmpcc
|
2016-11-09 19:24:33 +01:00
|
|
|
book_in_shelfs = []
|
2017-04-02 10:05:07 +02:00
|
|
|
shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).all()
|
2016-11-09 19:24:33 +01:00
|
|
|
for entry in shelfs:
|
|
|
|
book_in_shelfs.append(entry.shelf)
|
|
|
|
|
2017-10-10 16:18:28 +02:00
|
|
|
if not current_user.is_anonymous:
|
2018-07-15 15:08:06 +02:00
|
|
|
if not config.config_read_column:
|
2019-02-08 20:11:44 +01:00
|
|
|
matching_have_read_book = ub.session.query(ub.ReadBook).\
|
|
|
|
filter(ub.and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
|
2018-07-15 15:08:06 +02:00
|
|
|
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].is_read
|
|
|
|
else:
|
|
|
|
try:
|
2019-02-08 20:11:44 +01:00
|
|
|
matching_have_read_book = getattr(entries, 'custom_column_'+str(config.config_read_column))
|
2018-07-15 15:08:06 +02:00
|
|
|
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value
|
|
|
|
except KeyError:
|
|
|
|
app.logger.error(
|
|
|
|
u"Custom Column No.%d is not exisiting in calibre database" % config.config_read_column)
|
|
|
|
have_read = None
|
|
|
|
|
2017-03-03 00:56:07 +01:00
|
|
|
else:
|
2017-04-14 20:29:11 +02:00
|
|
|
have_read = None
|
2017-02-19 21:08:22 +01:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
entries.tags = sort(entries.tags, key=lambda tag: tag.name)
|
2018-04-22 20:43:43 +02:00
|
|
|
|
2019-01-06 14:16:52 +01:00
|
|
|
entries = order_authors(entries)
|
|
|
|
|
2018-11-18 17:09:13 +01:00
|
|
|
kindle_list = helper.check_send_to_kindle(entries)
|
2018-11-25 11:25:20 +01:00
|
|
|
reader_list = helper.check_read_formats(entries)
|
2018-10-01 20:19:29 +02:00
|
|
|
|
2019-01-27 11:14:38 +01:00
|
|
|
audioentries = []
|
|
|
|
for media_format in entries.data:
|
|
|
|
if media_format.format.lower() in EXTENSIONS_AUDIO:
|
|
|
|
audioentries.append(media_format.format.lower())
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
return render_title_template('detail.html', entry=entries, audioentries=audioentries, cc=cc,
|
|
|
|
is_xhr=request.is_xhr, title=entries.title, books_shelfs=book_in_shelfs,
|
2018-11-25 11:25:20 +01:00
|
|
|
have_read=have_read, kindle_list=kindle_list, reader_list=reader_list, page="book")
|
2016-12-23 09:53:39 +01:00
|
|
|
else:
|
2016-11-09 19:24:33 +01:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2019-02-06 21:52:24 +01:00
|
|
|
return redirect(url_for("web.index"))
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/ajax/bookmark/<int:book_id>/<book_format>", methods=['POST'])
|
2017-08-23 17:52:52 +02:00
|
|
|
@login_required
|
|
|
|
def bookmark(book_id, book_format):
|
|
|
|
bookmark_key = request.form["bookmark"]
|
|
|
|
ub.session.query(ub.Bookmark).filter(ub.and_(ub.Bookmark.user_id == int(current_user.id),
|
|
|
|
ub.Bookmark.book_id == book_id,
|
|
|
|
ub.Bookmark.format == book_format)).delete()
|
|
|
|
if not bookmark_key:
|
|
|
|
ub.session.commit()
|
|
|
|
return "", 204
|
|
|
|
|
2018-08-19 13:36:04 +02:00
|
|
|
lbookmark = ub.Bookmark(user_id=current_user.id,
|
2019-02-08 20:11:44 +01:00
|
|
|
book_id=book_id,
|
|
|
|
format=book_format,
|
|
|
|
bookmark_key=bookmark_key)
|
2018-08-19 13:36:04 +02:00
|
|
|
ub.session.merge(lbookmark)
|
2017-08-23 17:52:52 +02:00
|
|
|
ub.session.commit()
|
|
|
|
return "", 201
|
|
|
|
|
2018-08-31 10:47:58 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/tasks")
|
2018-07-30 20:12:41 +02:00
|
|
|
@login_required
|
|
|
|
def get_tasks_status():
|
|
|
|
# if current user admin, show all email, otherwise only own emails
|
2019-02-08 20:11:44 +01:00
|
|
|
tasks = global_WorkerThread.get_taskstatus()
|
2018-11-03 13:43:38 +01:00
|
|
|
# UIanswer = copy.deepcopy(answer)
|
|
|
|
answer = helper.render_task_status(tasks)
|
2018-07-30 20:12:41 +02:00
|
|
|
# foreach row format row
|
2019-01-11 22:24:48 +01:00
|
|
|
return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks")
|
2018-07-30 20:12:41 +02:00
|
|
|
|
2017-08-23 17:52:52 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/search", methods=["GET"])
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 20:59:11 +02:00
|
|
|
def search():
|
2017-10-21 21:50:47 +02:00
|
|
|
term = request.args.get("query").strip().lower()
|
2015-08-02 20:59:11 +02:00
|
|
|
if term:
|
2018-08-12 13:31:18 +02:00
|
|
|
entries = get_search_results(term)
|
2018-09-03 19:12:15 +02:00
|
|
|
ids = list()
|
|
|
|
for element in entries:
|
|
|
|
ids.append(element.id)
|
2019-02-08 20:11:44 +01:00
|
|
|
searched_ids[current_user.id] = ids
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('search.html', searchterm=term, entries=entries, page="search")
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('search.html', searchterm="", page="search")
|
2016-11-09 19:24:33 +01:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/advanced_search", methods=['GET'])
|
2016-05-02 22:43:50 +02:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def advanced_search():
|
2018-08-12 18:21:57 +02:00
|
|
|
# Build custom columns names
|
|
|
|
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
|
|
|
if config.config_columns_to_ignore:
|
|
|
|
cc = []
|
|
|
|
for col in tmpcc:
|
|
|
|
r = re.compile(config.config_columns_to_ignore)
|
|
|
|
if r.match(col.label):
|
|
|
|
cc.append(col)
|
|
|
|
else:
|
|
|
|
cc = tmpcc
|
|
|
|
|
|
|
|
db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
|
|
|
|
q = db.session.query(db.Books)
|
|
|
|
|
|
|
|
include_tag_inputs = request.args.getlist('include_tag')
|
|
|
|
exclude_tag_inputs = request.args.getlist('exclude_tag')
|
|
|
|
include_series_inputs = request.args.getlist('include_serie')
|
|
|
|
exclude_series_inputs = request.args.getlist('exclude_serie')
|
|
|
|
include_languages_inputs = request.args.getlist('include_language')
|
|
|
|
exclude_languages_inputs = request.args.getlist('exclude_language')
|
|
|
|
|
|
|
|
author_name = request.args.get("author_name")
|
|
|
|
book_title = request.args.get("book_title")
|
|
|
|
publisher = request.args.get("publisher")
|
|
|
|
pub_start = request.args.get("Publishstart")
|
|
|
|
pub_end = request.args.get("Publishend")
|
|
|
|
rating_low = request.args.get("ratinghigh")
|
|
|
|
rating_high = request.args.get("ratinglow")
|
|
|
|
description = request.args.get("comment")
|
2019-02-08 20:11:44 +01:00
|
|
|
if author_name:
|
|
|
|
author_name = author_name.strip().lower().replace(',','|')
|
|
|
|
if book_title:
|
|
|
|
book_title = book_title.strip().lower()
|
|
|
|
if publisher:
|
|
|
|
publisher = publisher.strip().lower()
|
2018-08-12 18:21:57 +02:00
|
|
|
|
|
|
|
searchterm = []
|
|
|
|
cc_present = False
|
|
|
|
for c in cc:
|
|
|
|
if request.args.get('custom_column_' + str(c.id)):
|
2018-09-08 12:28:48 +02:00
|
|
|
searchterm.extend([(u"%s: %s" % (c.name, request.args.get('custom_column_' + str(c.id))))])
|
2018-08-12 18:21:57 +02:00
|
|
|
cc_present = True
|
|
|
|
|
|
|
|
if include_tag_inputs or exclude_tag_inputs or include_series_inputs or exclude_series_inputs or \
|
|
|
|
include_languages_inputs or exclude_languages_inputs or author_name or book_title or \
|
|
|
|
publisher or pub_start or pub_end or rating_low or rating_high or description or cc_present:
|
|
|
|
searchterm = []
|
2019-02-08 20:11:44 +01:00
|
|
|
searchterm.extend((author_name.replace('|', ','), book_title, publisher))
|
2018-08-12 18:21:57 +02:00
|
|
|
if pub_start:
|
|
|
|
try:
|
2018-09-08 12:28:48 +02:00
|
|
|
searchterm.extend([_(u"Published after ") +
|
2019-02-08 20:11:44 +01:00
|
|
|
format_date(datetime.datetime.strptime(pub_start,"%Y-%m-%d"),
|
|
|
|
format='medium', locale=get_locale())])
|
2018-08-12 18:21:57 +02:00
|
|
|
except ValueError:
|
|
|
|
pub_start = u""
|
|
|
|
if pub_end:
|
|
|
|
try:
|
|
|
|
searchterm.extend([_(u"Published before ") +
|
2019-02-08 20:11:44 +01:00
|
|
|
format_date(datetime.datetime.strptime(pub_end,"%Y-%m-%d"),
|
|
|
|
format='medium', locale=get_locale())])
|
2018-08-12 18:21:57 +02:00
|
|
|
except ValueError:
|
|
|
|
pub_start = u""
|
|
|
|
tag_names = db.session.query(db.Tags).filter(db.Tags.id.in_(include_tag_inputs)).all()
|
|
|
|
searchterm.extend(tag.name for tag in tag_names)
|
|
|
|
serie_names = db.session.query(db.Series).filter(db.Series.id.in_(include_series_inputs)).all()
|
|
|
|
searchterm.extend(serie.name for serie in serie_names)
|
|
|
|
language_names = db.session.query(db.Languages).filter(db.Languages.id.in_(include_languages_inputs)).all()
|
2018-08-19 10:14:20 +02:00
|
|
|
if language_names:
|
|
|
|
language_names = speaking_language(language_names)
|
2018-08-12 18:21:57 +02:00
|
|
|
searchterm.extend(language.name for language in language_names)
|
|
|
|
if rating_high:
|
2018-09-08 12:28:48 +02:00
|
|
|
searchterm.extend([_(u"Rating <= %(rating)s", rating=rating_high)])
|
2018-08-12 18:21:57 +02:00
|
|
|
if rating_low:
|
2018-09-08 12:28:48 +02:00
|
|
|
searchterm.extend([_(u"Rating >= %(rating)s", rating=rating_low)])
|
2018-08-12 18:21:57 +02:00
|
|
|
# handle custom columns
|
|
|
|
for c in cc:
|
|
|
|
if request.args.get('custom_column_' + str(c.id)):
|
2018-09-08 12:28:48 +02:00
|
|
|
searchterm.extend([(u"%s: %s" % (c.name, request.args.get('custom_column_' + str(c.id))))])
|
2018-08-12 18:21:57 +02:00
|
|
|
searchterm = " + ".join(filter(None, searchterm))
|
|
|
|
q = q.filter()
|
|
|
|
if author_name:
|
|
|
|
q = q.filter(db.Books.authors.any(db.Authors.name.ilike("%" + author_name + "%")))
|
|
|
|
if book_title:
|
|
|
|
q = q.filter(db.Books.title.ilike("%" + book_title + "%"))
|
|
|
|
if pub_start:
|
|
|
|
q = q.filter(db.Books.pubdate >= pub_start)
|
|
|
|
if pub_end:
|
|
|
|
q = q.filter(db.Books.pubdate <= pub_end)
|
|
|
|
if publisher:
|
|
|
|
q = q.filter(db.Books.publishers.any(db.Publishers.name.ilike("%" + publisher + "%")))
|
|
|
|
for tag in include_tag_inputs:
|
|
|
|
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
|
|
|
|
for tag in exclude_tag_inputs:
|
|
|
|
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
|
|
|
|
for serie in include_series_inputs:
|
|
|
|
q = q.filter(db.Books.series.any(db.Series.id == serie))
|
|
|
|
for serie in exclude_series_inputs:
|
|
|
|
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
|
|
|
|
else:
|
|
|
|
for language in include_languages_inputs:
|
|
|
|
q = q.filter(db.Books.languages.any(db.Languages.id == language))
|
|
|
|
for language in exclude_languages_inputs:
|
|
|
|
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
|
|
|
|
if rating_high:
|
2018-12-02 12:48:12 +01:00
|
|
|
rating_high = int(rating_high) * 2
|
|
|
|
q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high))
|
2018-08-12 18:21:57 +02:00
|
|
|
if rating_low:
|
2019-02-08 20:11:44 +01:00
|
|
|
rating_low = int(rating_low) * 2
|
2018-12-02 12:48:12 +01:00
|
|
|
q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low))
|
2018-08-12 18:21:57 +02:00
|
|
|
if description:
|
|
|
|
q = q.filter(db.Books.comments.any(db.Comments.text.ilike("%" + description + "%")))
|
|
|
|
|
|
|
|
# search custom culumns
|
|
|
|
for c in cc:
|
|
|
|
custom_query = request.args.get('custom_column_' + str(c.id))
|
|
|
|
if custom_query:
|
|
|
|
if c.datatype == 'bool':
|
|
|
|
getattr(db.Books, 'custom_column_1')
|
|
|
|
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
|
2019-02-08 20:11:44 +01:00
|
|
|
db.cc_classes[c.id].value == (custom_query == "True")))
|
2018-08-12 18:21:57 +02:00
|
|
|
elif c.datatype == 'int':
|
|
|
|
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
|
2019-02-08 20:11:44 +01:00
|
|
|
db.cc_classes[c.id].value == custom_query))
|
2018-08-12 18:21:57 +02:00
|
|
|
else:
|
|
|
|
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
|
|
|
|
db.cc_classes[c.id].value.ilike("%" + custom_query + "%")))
|
|
|
|
q = q.all()
|
2018-08-19 10:14:20 +02:00
|
|
|
ids = list()
|
|
|
|
for element in q:
|
|
|
|
ids.append(element.id)
|
2019-02-08 20:11:44 +01:00
|
|
|
searched_ids[current_user.id] = ids
|
2018-08-12 18:21:57 +02:00
|
|
|
return render_title_template('search.html', searchterm=searchterm,
|
|
|
|
entries=q, title=_(u"search"), page="search")
|
2018-08-16 21:17:26 +02:00
|
|
|
# prepare data for search-form
|
2016-05-02 22:43:50 +02:00
|
|
|
tags = db.session.query(db.Tags).order_by(db.Tags.name).all()
|
2016-12-27 16:07:25 +01:00
|
|
|
series = db.session.query(db.Series).order_by(db.Series.name).all()
|
|
|
|
if current_user.filter_language() == u"all":
|
2018-08-19 10:14:20 +02:00
|
|
|
languages = speaking_language()
|
2016-12-27 16:07:25 +01:00
|
|
|
else:
|
2017-01-28 20:16:40 +01:00
|
|
|
languages = None
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('search_form.html', tags=tags, languages=languages,
|
2018-08-12 18:21:57 +02:00
|
|
|
series=series, title=_(u"search"), cc=cc, page="advsearch")
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-24 10:39:37 +01:00
|
|
|
@web.route("/cover/<int:book_id>")
|
2016-04-27 10:35:23 +02:00
|
|
|
@login_required_if_no_ano
|
2019-02-03 20:29:16 +01:00
|
|
|
def get_cover(book_id):
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
return helper.get_book_cover(book.path)
|
2017-11-30 16:49:46 +01:00
|
|
|
|
2018-08-19 13:36:04 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/show/<book_id>/<book_format>")
|
2017-05-25 08:46:33 +02:00
|
|
|
@login_required_if_no_ano
|
2017-11-30 16:49:46 +01:00
|
|
|
def serve_book(book_id, book_format):
|
2017-05-25 08:46:33 +02:00
|
|
|
book_format = book_format.split(".")[0]
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2019-02-08 20:11:44 +01:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == book_format.upper())\
|
|
|
|
.first()
|
2019-01-27 11:14:38 +01:00
|
|
|
app.logger.info('Serving book: %s', data.name)
|
2017-05-25 08:46:33 +02:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
headers = Headers()
|
|
|
|
try:
|
|
|
|
headers["Content-Type"] = mimetypes.types_map['.' + book_format]
|
|
|
|
except KeyError:
|
|
|
|
headers["Content-Type"] = "application/octet-stream"
|
2018-07-14 08:31:52 +02:00
|
|
|
df = gdriveutils.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
2018-08-19 13:36:04 +02:00
|
|
|
return gdriveutils.do_gdrive_download(df, headers)
|
2017-05-25 08:46:33 +02:00
|
|
|
else:
|
|
|
|
return send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + book_format)
|
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
@web.route("/unreadbooks/", defaults={'page': 1})
|
|
|
|
@web.route("/unreadbooks/<int:page>'")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def unread_books(page):
|
|
|
|
return render_read_books(page, False)
|
|
|
|
|
|
|
|
|
|
|
|
@web.route("/readbooks/", defaults={'page': 1})
|
|
|
|
@web.route("/readbooks/<int:page>'")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def read_books(page):
|
|
|
|
return render_read_books(page, True)
|
2017-02-19 21:08:22 +01:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2017-02-19 21:08:22 +01:00
|
|
|
def render_read_books(page, are_read, as_xml=False):
|
2018-07-15 15:08:06 +02:00
|
|
|
if not config.config_read_column:
|
|
|
|
readBooks = ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id))\
|
2019-02-08 20:11:44 +01:00
|
|
|
.filter(ub.ReadBook.is_read is True).all()
|
2018-07-15 15:08:06 +02:00
|
|
|
readBookIds = [x.book_id for x in readBooks]
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
readBooks = db.session.query(db.cc_classes[config.config_read_column])\
|
2019-02-08 20:11:44 +01:00
|
|
|
.filter(db.cc_classes[config.config_read_column].value is True).all()
|
2018-07-15 15:08:06 +02:00
|
|
|
readBookIds = [x.book for x in readBooks]
|
|
|
|
except KeyError:
|
2018-07-15 18:50:35 +02:00
|
|
|
app.logger.error(u"Custom Column No.%d is not existing in calibre database" % config.config_read_column)
|
2018-08-19 13:36:04 +02:00
|
|
|
readBookIds = []
|
2018-07-15 15:08:06 +02:00
|
|
|
|
2017-11-17 18:29:29 +01:00
|
|
|
if are_read:
|
|
|
|
db_filter = db.Books.id.in_(readBookIds)
|
2017-02-19 21:08:22 +01:00
|
|
|
else:
|
2017-11-17 18:29:29 +01:00
|
|
|
db_filter = ~db.Books.id.in_(readBookIds)
|
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db_filter, [db.Books.timestamp.desc()])
|
2017-02-19 21:08:22 +01:00
|
|
|
|
2017-02-22 23:45:19 +01:00
|
|
|
if as_xml:
|
2017-02-19 21:08:22 +01:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
|
|
|
response = make_response(xml)
|
2018-08-07 18:19:08 +02:00
|
|
|
response.headers["Content-Type"] = "application/xml; charset=utf-8"
|
2017-02-19 21:08:22 +01:00
|
|
|
return response
|
|
|
|
else:
|
2017-07-07 07:33:57 +02:00
|
|
|
if are_read:
|
2017-07-08 16:47:07 +02:00
|
|
|
name = _(u'Read Books') + ' (' + str(len(readBookIds)) + ')'
|
2017-07-07 07:33:57 +02:00
|
|
|
else:
|
|
|
|
total_books = db.session.query(func.count(db.Books.id)).scalar()
|
2017-07-08 16:47:07 +02:00
|
|
|
name = _(u'Unread Books') + ' (' + str(total_books - len(readBookIds)) + ')'
|
2017-02-19 21:08:22 +01:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(name, name=name), page="read")
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/read/<int:book_id>/<book_format>")
|
2017-02-25 11:14:45 +01:00
|
|
|
@login_required_if_no_ano
|
2017-04-02 10:05:07 +02:00
|
|
|
def read_book(book_id, book_format):
|
2015-08-02 20:59:11 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-08-23 17:52:52 +02:00
|
|
|
if not book:
|
2016-11-09 19:24:33 +01:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2019-02-06 21:52:24 +01:00
|
|
|
return redirect(url_for("web.index"))
|
2016-10-30 11:44:02 +01:00
|
|
|
|
2018-07-07 15:48:50 +02:00
|
|
|
# check if book was downloaded before
|
2019-01-27 11:14:38 +01:00
|
|
|
bookmark = None
|
2017-10-10 16:18:28 +02:00
|
|
|
if current_user.is_authenticated:
|
2019-01-27 11:14:38 +01:00
|
|
|
bookmark = ub.session.query(ub.Bookmark).filter(ub.and_(ub.Bookmark.user_id == int(current_user.id),
|
2019-02-08 20:11:44 +01:00
|
|
|
ub.Bookmark.book_id == book_id,
|
|
|
|
ub.Bookmark.format == book_format.upper())).first()
|
2017-08-23 17:52:52 +02:00
|
|
|
if book_format.lower() == "epub":
|
2019-01-27 11:14:38 +01:00
|
|
|
return render_title_template('read.html', bookid=book_id, title=_(u"Read a Book"), bookmark=bookmark)
|
2017-08-23 17:52:52 +02:00
|
|
|
elif book_format.lower() == "pdf":
|
|
|
|
return render_title_template('readpdf.html', pdffile=book_id, title=_(u"Read a Book"))
|
|
|
|
elif book_format.lower() == "txt":
|
|
|
|
return render_title_template('readtxt.html', txtfile=book_id, title=_(u"Read a Book"))
|
2019-01-27 11:14:38 +01:00
|
|
|
elif book_format.lower() == "mp3":
|
|
|
|
entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
|
|
|
return render_title_template('listenmp3.html', mp3file=book_id, audioformat=book_format.lower(),
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(u"Read a Book"), entry=entries, bookmark=bookmark)
|
2019-01-27 11:14:38 +01:00
|
|
|
elif book_format.lower() == "m4b":
|
|
|
|
entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
|
|
|
return render_title_template('listenmp3.html', mp3file=book_id, audioformat=book_format.lower(),
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(u"Read a Book"), entry=entries, bookmark=bookmark)
|
2019-01-27 11:14:38 +01:00
|
|
|
elif book_format.lower() == "m4a":
|
|
|
|
entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
|
|
|
return render_title_template('listenmp3.html', mp3file=book_id, audioformat=book_format.lower(),
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(u"Read a Book"), entry=entries, bookmark=bookmark)
|
2017-09-17 12:36:32 +02:00
|
|
|
else:
|
2018-07-07 15:48:50 +02:00
|
|
|
book_dir = os.path.join(config.get_main_dir, "cps", "static", str(book_id))
|
|
|
|
if not os.path.exists(book_dir):
|
|
|
|
os.mkdir(book_dir)
|
2017-11-30 16:49:46 +01:00
|
|
|
for fileext in ["cbr", "cbt", "cbz"]:
|
2017-09-17 15:33:22 +02:00
|
|
|
if book_format.lower() == fileext:
|
2019-02-08 20:11:44 +01:00
|
|
|
all_name = str(book_id) # + "/" + book.data[0].name + "." + fileext
|
|
|
|
# tmp_file = os.path.join(book_dir, book.data[0].name) + "." + fileext
|
|
|
|
# if not os.path.exists(all_name):
|
2018-07-07 15:48:50 +02:00
|
|
|
# cbr_file = os.path.join(config.config_calibre_dir, book.path, book.data[0].name) + "." + fileext
|
|
|
|
# copyfile(cbr_file, tmp_file)
|
|
|
|
return render_title_template('readcbr.html', comicfile=all_name, title=_(u"Read a Book"),
|
|
|
|
extension=fileext)
|
2019-02-16 07:23:08 +01:00
|
|
|
'''if feature_support['rar']:
|
2017-11-18 10:34:21 +01:00
|
|
|
extensionList = ["cbr","cbt","cbz"]
|
|
|
|
else:
|
|
|
|
extensionList = ["cbt","cbz"]
|
|
|
|
for fileext in extensionList:
|
2017-09-17 15:33:22 +02:00
|
|
|
if book_format.lower() == fileext:
|
2018-11-03 18:37:38 +01:00
|
|
|
return render_title_template('readcbr.html', comicfile=book_id,
|
|
|
|
extension=fileext, title=_(u"Read a Book"), book=book)
|
2017-11-19 20:37:43 +01:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible."), category="error")
|
2019-02-06 21:52:24 +01:00
|
|
|
return redirect(url_for("web.index"))'''
|
2017-08-23 17:52:52 +02:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/download/<int:book_id>/<book_format>")
|
2017-01-12 20:43:36 +01:00
|
|
|
@login_required_if_no_ano
|
2016-04-27 10:35:23 +02:00
|
|
|
@download_required
|
2017-04-02 10:05:07 +02:00
|
|
|
def get_download_link(book_id, book_format):
|
|
|
|
book_format = book_format.split(".")[0]
|
2015-08-02 20:59:11 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2018-11-03 18:37:38 +01:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id)\
|
|
|
|
.filter(db.Data.format == book_format.upper()).first()
|
2017-01-15 12:23:08 +01:00
|
|
|
if data:
|
2017-01-28 20:16:40 +01:00
|
|
|
# collect downloaded books only for registered user and not for anonymous user
|
2017-10-10 16:18:28 +02:00
|
|
|
if current_user.is_authenticated:
|
2018-07-14 08:31:52 +02:00
|
|
|
ub.update_download(book_id, int(current_user.id))
|
2017-01-15 12:23:08 +01:00
|
|
|
file_name = book.title
|
2017-02-15 18:09:17 +01:00
|
|
|
if len(book.authors) > 0:
|
2017-03-14 19:48:17 +01:00
|
|
|
file_name = book.authors[0].name + '_' + file_name
|
2017-01-15 12:23:08 +01:00
|
|
|
file_name = helper.get_valid_filename(file_name)
|
2017-04-14 20:29:11 +02:00
|
|
|
headers = Headers()
|
2017-03-07 00:17:57 +01:00
|
|
|
try:
|
2017-04-02 10:05:07 +02:00
|
|
|
headers["Content-Type"] = mimetypes.types_map['.' + book_format]
|
|
|
|
except KeyError:
|
|
|
|
headers["Content-Type"] = "application/octet-stream"
|
2018-11-03 18:37:38 +01:00
|
|
|
headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (quote(file_name.encode('utf-8')),
|
|
|
|
book_format)
|
2018-08-04 10:56:42 +02:00
|
|
|
return helper.do_download_file(book, book_format, data, headers)
|
2017-01-15 12:23:08 +01:00
|
|
|
else:
|
|
|
|
abort(404)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route("/download/<int:book_id>/<book_format>/<anyname>")
|
2017-03-08 06:56:13 +01:00
|
|
|
@login_required_if_no_ano
|
|
|
|
@download_required
|
2017-04-02 10:05:07 +02:00
|
|
|
def get_download_link_ext(book_id, book_format, anyname):
|
|
|
|
return get_download_link(book_id, book_format)
|
2017-01-28 20:16:40 +01:00
|
|
|
|
2017-04-14 20:29:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/register', methods=['GET', 'POST'])
|
2015-10-13 02:30:55 +02:00
|
|
|
def register():
|
2017-01-22 21:30:36 +01:00
|
|
|
if not config.config_public_reg:
|
2015-10-13 02:30:55 +02:00
|
|
|
abort(404)
|
2017-10-10 16:18:28 +02:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2019-02-24 10:39:37 +01:00
|
|
|
return redirect(url_for('web.index'))
|
2015-10-13 02:30:55 +02:00
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2018-08-24 15:48:09 +02:00
|
|
|
if not to_save["nickname"] or not to_save["email"]:
|
2016-11-09 19:24:33 +01:00
|
|
|
flash(_(u"Please fill out all fields!"), category="error")
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
2015-10-13 02:30:55 +02:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
existing_user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == to_save["nickname"]
|
|
|
|
.lower()).first()
|
2018-08-24 15:48:09 +02:00
|
|
|
existing_email = ub.session.query(ub.User).filter(ub.User.email == to_save["email"].lower()).first()
|
2015-10-13 02:30:55 +02:00
|
|
|
if not existing_user and not existing_email:
|
|
|
|
content = ub.User()
|
2018-08-24 15:48:09 +02:00
|
|
|
# content.password = generate_password_hash(to_save["password"])
|
|
|
|
if check_valid_domain(to_save["email"]):
|
|
|
|
content.nickname = to_save["nickname"]
|
|
|
|
content.email = to_save["email"]
|
|
|
|
password = helper.generate_random_password()
|
2018-10-03 21:58:37 +02:00
|
|
|
content.password = generate_password_hash(password)
|
2018-08-24 15:48:09 +02:00
|
|
|
content.role = config.config_default_role
|
|
|
|
content.sidebar_view = config.config_default_show
|
2019-01-19 16:48:04 +01:00
|
|
|
content.mature_content = bool(config.config_default_show & ub.MATURE_CONTENT)
|
2018-08-24 15:48:09 +02:00
|
|
|
try:
|
|
|
|
ub.session.add(content)
|
|
|
|
ub.session.commit()
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['oauth']:
|
2019-02-09 21:26:17 +01:00
|
|
|
register_user_with_oauth(content)
|
2019-02-08 20:11:44 +01:00
|
|
|
helper.send_registration_mail(to_save["email"], to_save["nickname"], password)
|
2018-08-24 15:48:09 +02:00
|
|
|
except Exception:
|
|
|
|
ub.session.rollback()
|
|
|
|
flash(_(u"An unknown error occurred. Please try again later."), category="error")
|
|
|
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
|
|
|
else:
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"Your e-mail is not allowed to register"), category="error")
|
2019-02-08 20:11:44 +01:00
|
|
|
app.logger.info('Registering failed for user "' + to_save['nickname'] + '" e-mail adress: ' +
|
|
|
|
to_save["email"])
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
|
2019-02-24 10:39:37 +01:00
|
|
|
return redirect(url_for('web.login'))
|
2015-10-13 02:30:55 +02:00
|
|
|
else:
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"This username or e-mail address is already in use."), category="error")
|
2018-07-15 18:50:35 +02:00
|
|
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
2015-10-13 02:30:55 +02:00
|
|
|
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['oauth']:
|
2019-02-09 21:26:17 +01:00
|
|
|
register_user_with_oauth()
|
2018-10-11 13:52:30 +02:00
|
|
|
return render_title_template('register.html', config=config, title=_(u"register"), page="register")
|
2015-10-13 02:30:55 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/login', methods=['GET', 'POST'])
|
2015-08-02 20:59:11 +02:00
|
|
|
def login():
|
2017-01-28 20:16:40 +01:00
|
|
|
if not config.db_configured:
|
2019-02-16 07:23:08 +01:00
|
|
|
return redirect(url_for('admin.basic_configuration'))
|
2017-10-10 16:18:28 +02:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2019-02-08 20:11:44 +01:00
|
|
|
return redirect(url_for('web.index'))
|
2015-08-02 20:59:11 +02:00
|
|
|
if request.method == "POST":
|
|
|
|
form = request.form.to_dict()
|
2019-02-08 20:11:44 +01:00
|
|
|
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == form['username'].strip().lower())\
|
|
|
|
.first()
|
2019-02-17 09:09:20 +01:00
|
|
|
if config.config_login_type == 1 and user:
|
2019-01-12 18:07:03 +01:00
|
|
|
try:
|
2019-02-17 09:09:20 +01:00
|
|
|
ub.User.try_login(form['username'], form['password'], config.config_ldap_dn,
|
|
|
|
config.config_ldap_provider_url)
|
2019-01-12 18:07:03 +01:00
|
|
|
login_user(user, remember=True)
|
2019-02-17 19:10:16 +01:00
|
|
|
flash(_(u"You are now logged in as: '%(nickname)s'", nickname=user.nickname), category="success")
|
2019-02-08 20:11:44 +01:00
|
|
|
return redirect_back(url_for("web.index"))
|
2019-01-12 18:07:03 +01:00
|
|
|
except ldap.INVALID_CREDENTIALS:
|
|
|
|
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
|
|
|
app.logger.info('LDAP Login failed for user "' + form['username'] + '" IP-adress: ' + ipAdress)
|
2019-01-13 11:02:03 +01:00
|
|
|
flash(_(u"Wrong Username or Password"), category="error")
|
2019-02-17 09:09:20 +01:00
|
|
|
except ldap.SERVER_DOWN:
|
|
|
|
app.logger.info('LDAP Login failed, LDAP Server down')
|
|
|
|
flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error")
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2019-02-17 09:09:20 +01:00
|
|
|
if user and check_password_hash(user.password, form['password']) and user.nickname is not "Guest":
|
|
|
|
login_user(user, remember=True)
|
2019-02-17 19:10:16 +01:00
|
|
|
flash(_(u"You are now logged in as: '%(nickname)s'", nickname=user.nickname), category="success")
|
2019-02-17 09:09:20 +01:00
|
|
|
return redirect_back(url_for("web.index"))
|
|
|
|
else:
|
|
|
|
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
|
|
|
app.logger.info('Login failed for user "' + form['username'] + '" IP-adress: ' + ipAdress)
|
|
|
|
flash(_(u"Wrong Username or Password"), category="error")
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2018-04-01 19:32:19 +02:00
|
|
|
# next_url = request.args.get('next')
|
|
|
|
# if next_url is None or not is_safe_url(next_url):
|
2019-02-06 21:52:24 +01:00
|
|
|
next_url = url_for('web.index')
|
2017-07-08 03:18:03 +02:00
|
|
|
|
2018-10-11 13:52:30 +02:00
|
|
|
return render_title_template('login.html', title=_(u"login"), next_url=next_url, config=config, page="login")
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/logout')
|
2015-08-02 20:59:11 +02:00
|
|
|
@login_required
|
|
|
|
def logout():
|
2017-10-10 16:18:28 +02:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2015-08-02 20:59:11 +02:00
|
|
|
logout_user()
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['oauth']:
|
2019-02-09 21:26:17 +01:00
|
|
|
logout_oauth_user()
|
2019-02-06 21:52:24 +01:00
|
|
|
return redirect(url_for('web.login'))
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/remote/login')
|
2017-07-08 03:18:03 +02:00
|
|
|
@remote_login_required
|
|
|
|
def remote_login():
|
|
|
|
auth_token = ub.RemoteAuthToken()
|
|
|
|
ub.session.add(auth_token)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2019-02-16 07:23:08 +01:00
|
|
|
verify_url = url_for('web.verify_token', token=auth_token.auth_token, _external=true)
|
2017-07-08 03:18:03 +02:00
|
|
|
|
|
|
|
return render_title_template('remote_login.html', title=_(u"login"), token=auth_token.auth_token,
|
2018-07-15 18:50:35 +02:00
|
|
|
verify_url=verify_url, page="remotelogin")
|
2017-07-08 03:18:03 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/verify/<token>')
|
2017-07-08 03:18:03 +02:00
|
|
|
@remote_login_required
|
|
|
|
@login_required
|
|
|
|
def verify_token(token):
|
|
|
|
auth_token = ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.auth_token == token).first()
|
|
|
|
|
|
|
|
# Token not found
|
|
|
|
if auth_token is None:
|
|
|
|
flash(_(u"Token not found"), category="error")
|
2019-02-16 07:23:08 +01:00
|
|
|
return redirect(url_for('web.index'))
|
2017-07-08 03:18:03 +02:00
|
|
|
|
|
|
|
# Token expired
|
|
|
|
if datetime.datetime.now() > auth_token.expiration:
|
|
|
|
ub.session.delete(auth_token)
|
|
|
|
ub.session.commit()
|
|
|
|
|
|
|
|
flash(_(u"Token has expired"), category="error")
|
2019-02-16 07:23:08 +01:00
|
|
|
return redirect(url_for('web.index'))
|
2017-07-08 03:18:03 +02:00
|
|
|
|
|
|
|
# Update token with user information
|
|
|
|
auth_token.user_id = current_user.id
|
|
|
|
auth_token.verified = True
|
|
|
|
ub.session.commit()
|
|
|
|
|
|
|
|
flash(_(u"Success! Please return to your device"), category="success")
|
2019-02-16 07:23:08 +01:00
|
|
|
return redirect(url_for('web.index'))
|
2017-07-08 03:18:03 +02:00
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/ajax/verify_token', methods=['POST'])
|
2017-07-08 03:18:03 +02:00
|
|
|
@remote_login_required
|
|
|
|
def token_verified():
|
|
|
|
token = request.form['token']
|
|
|
|
auth_token = ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.auth_token == token).first()
|
|
|
|
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
# Token not found
|
|
|
|
if auth_token is None:
|
|
|
|
data['status'] = 'error'
|
|
|
|
data['message'] = _(u"Token not found")
|
|
|
|
|
|
|
|
# Token expired
|
|
|
|
elif datetime.datetime.now() > auth_token.expiration:
|
|
|
|
ub.session.delete(auth_token)
|
|
|
|
ub.session.commit()
|
|
|
|
|
|
|
|
data['status'] = 'error'
|
|
|
|
data['message'] = _(u"Token has expired")
|
|
|
|
|
|
|
|
elif not auth_token.verified:
|
|
|
|
data['status'] = 'not_verified'
|
|
|
|
|
|
|
|
else:
|
|
|
|
user = ub.session.query(ub.User).filter(ub.User.id == auth_token.user_id).first()
|
|
|
|
login_user(user)
|
|
|
|
|
|
|
|
ub.session.delete(auth_token)
|
|
|
|
ub.session.commit()
|
|
|
|
|
|
|
|
data['status'] = 'success'
|
|
|
|
flash(_(u"you are now logged in as: '%(nickname)s'", nickname=user.nickname), category="success")
|
|
|
|
|
2018-08-24 15:48:09 +02:00
|
|
|
response = make_response(json.dumps(data, ensure_ascii=False))
|
2017-07-08 03:18:03 +02:00
|
|
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2019-02-06 21:52:24 +01:00
|
|
|
@web.route('/send/<int:book_id>/<book_format>/<int:convert>')
|
2015-08-02 20:59:11 +02:00
|
|
|
@login_required
|
2016-04-27 10:35:23 +02:00
|
|
|
@download_required
|
2018-11-18 17:09:13 +01:00
|
|
|
def send_to_kindle(book_id, book_format, convert):
|
2015-08-02 21:23:24 +02:00
|
|
|
settings = ub.get_mail_settings()
|
|
|
|
if settings.get("mail_server", "mail.example.com") == "mail.example.com":
|
2016-11-09 19:24:33 +01:00
|
|
|
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
|
2015-08-02 21:23:24 +02:00
|
|
|
elif current_user.kindle_mail:
|
2018-11-25 11:25:20 +01:00
|
|
|
result = helper.send_mail(book_id, book_format, convert, current_user.kindle_mail, config.config_calibre_dir,
|
|
|
|
current_user.nickname)
|
2016-03-27 23:36:51 +02:00
|
|
|
if result is None:
|
2018-07-30 20:12:41 +02:00
|
|
|
flash(_(u"Book successfully queued for sending to %(kindlemail)s", kindlemail=current_user.kindle_mail),
|
2016-12-23 09:53:39 +01:00
|
|
|
category="success")
|
2018-07-14 08:31:52 +02:00
|
|
|
ub.update_download(book_id, int(current_user.id))
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2016-12-23 09:53:39 +01:00
|
|
|
flash(_(u"There was an error sending this book: %(res)s", res=result), category="error")
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"Please configure your kindle e-mail address first..."), category="error")
|
2015-08-02 20:59:11 +02:00
|
|
|
return redirect(request.environ["HTTP_REFERER"])
|
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2019-02-08 20:11:44 +01:00
|
|
|
@web.route("/me", methods=["GET", "POST"])
|
2015-08-02 20:59:11 +02:00
|
|
|
@login_required
|
2019-02-08 20:11:44 +01:00
|
|
|
def profile():
|
|
|
|
content = ub.session.query(ub.User).filter(ub.User.id == int(current_user.id)).first()
|
|
|
|
downloads = list()
|
|
|
|
languages = speaking_language()
|
|
|
|
translations = babel.list_translations() + [LC('en')]
|
2019-02-16 07:23:08 +01:00
|
|
|
if feature_support['oauth']:
|
|
|
|
oauth_status = get_oauth_status()
|
|
|
|
else:
|
|
|
|
oauth_status = None
|
2019-02-08 20:11:44 +01:00
|
|
|
for book in content.downloads:
|
|
|
|
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
|
|
|
if downloadBook:
|
|
|
|
downloads.append(db.session.query(db.Books).filter(db.Books.id == book.book_id).first())
|
2015-10-13 19:12:18 +02:00
|
|
|
else:
|
2019-02-08 20:11:44 +01:00
|
|
|
ub.delete_download(book.book_id)
|
|
|
|
# ub.session.query(ub.Downloads).filter(book.book_id == ub.Downloads.book_id).delete()
|
|
|
|
# ub.session.commit()
|
2015-08-02 20:59:11 +02:00
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2016-11-09 19:24:33 +01:00
|
|
|
content.random_books = 0
|
2016-04-27 16:00:58 +02:00
|
|
|
if current_user.role_passwd() or current_user.role_admin():
|
2018-09-02 11:48:58 +02:00
|
|
|
if "password" in to_save and to_save["password"]:
|
2016-04-27 16:00:58 +02:00
|
|
|
content.password = generate_password_hash(to_save["password"])
|
2016-08-08 21:01:38 +02:00
|
|
|
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
2015-08-02 20:59:11 +02:00
|
|
|
content.kindle_mail = to_save["kindle_mail"]
|
2015-10-13 18:07:17 +02:00
|
|
|
if to_save["email"] and to_save["email"] != content.email:
|
2018-08-28 09:42:19 +02:00
|
|
|
if config.config_public_reg and not check_valid_domain(to_save["email"]):
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"E-mail is not from valid domain"), category="error")
|
2018-08-24 15:48:09 +02:00
|
|
|
return render_title_template("user_edit.html", content=content, downloads=downloads,
|
2019-02-08 20:11:44 +01:00
|
|
|
title=_(u"%(name)s's profile", name=current_user.nickname))
|
2015-10-13 18:07:17 +02:00
|
|
|
content.email = to_save["email"]
|
2016-11-09 19:24:33 +01:00
|
|
|
if "show_random" in to_save and to_save["show_random"] == "on":
|
|
|
|
content.random_books = 1
|
|
|
|
if "default_language" in to_save:
|
|
|
|
content.default_language = to_save["default_language"]
|
2018-08-28 09:42:19 +02:00
|
|
|
if "locale" in to_save:
|
2016-11-09 19:24:33 +01:00
|
|
|
content.locale = to_save["locale"]
|
2017-01-28 20:16:40 +01:00
|
|
|
content.sidebar_view = 0
|
|
|
|
if "show_random" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_RANDOM
|
|
|
|
if "show_language" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_LANGUAGE
|
|
|
|
if "show_series" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_SERIES
|
|
|
|
if "show_category" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_CATEGORY
|
2017-11-12 14:06:33 +01:00
|
|
|
if "show_recent" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_RECENT
|
|
|
|
if "show_sorted" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_SORTED
|
2017-01-28 20:16:40 +01:00
|
|
|
if "show_hot" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_HOT
|
2017-02-04 14:28:18 +01:00
|
|
|
if "show_best_rated" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_BEST_RATED
|
2017-01-28 20:16:40 +01:00
|
|
|
if "show_author" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_AUTHOR
|
2018-09-30 18:30:24 +02:00
|
|
|
if "show_publisher" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_PUBLISHER
|
2017-02-19 21:08:22 +01:00
|
|
|
if "show_read_and_unread" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_READ_AND_UNREAD
|
2017-01-28 20:16:40 +01:00
|
|
|
if "show_detail_random" in to_save:
|
|
|
|
content.sidebar_view += ub.DETAIL_RANDOM
|
2017-08-12 18:52:56 +02:00
|
|
|
|
|
|
|
content.mature_content = "show_mature_content" in to_save
|
|
|
|
|
2015-10-13 18:07:17 +02:00
|
|
|
try:
|
|
|
|
ub.session.commit()
|
|
|
|
except IntegrityError:
|
|
|
|
ub.session.rollback()
|
2018-08-28 10:29:11 +02:00
|
|
|
flash(_(u"Found an existing account for this e-mail address."), category="error")
|
2017-01-22 21:30:36 +01:00
|
|
|
return render_title_template("user_edit.html", content=content, downloads=downloads,
|
2019-02-16 07:23:08 +01:00
|
|
|
title=_(u"%(name)s's profile", name=current_user.nickname,
|
|
|
|
registered_oauth=oauth_check, oauth_status=oauth_status))
|
2016-11-09 19:24:33 +01:00
|
|
|
flash(_(u"Profile updated"), category="success")
|
2017-01-28 20:16:40 +01:00
|
|
|
return render_title_template("user_edit.html", translations=translations, profile=1, languages=languages,
|
2019-02-16 07:23:08 +01:00
|
|
|
content=content, downloads=downloads, title=_(u"%(name)s's profile",
|
|
|
|
name=current_user.nickname), page="me", registered_oauth=oauth_check,
|
|
|
|
oauth_status=oauth_status)
|
2016-12-23 09:53:39 +01:00
|
|
|
|