2015-08-02 20:59:11 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-01-20 19:37:45 +01:00
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
# Copyright (C) 2012-2019 mutschler, cervinko, ok11, jkrehm, nanu-c, Wineliva,
|
|
|
|
# pjeby, elelay, idalin, Ozzieisaacs
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2019-07-13 20:45:48 +02:00
|
|
|
from __future__ import division, print_function, unicode_literals
|
|
|
|
import sys
|
2015-08-02 20:59:11 +02:00
|
|
|
import os
|
|
|
|
import re
|
2016-04-20 18:56:03 +02:00
|
|
|
import ast
|
2020-05-23 10:16:29 +02:00
|
|
|
import json
|
2020-05-03 10:55:33 +02:00
|
|
|
from datetime import datetime
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-05-21 18:16:11 +02:00
|
|
|
from sqlalchemy import create_engine
|
2020-05-03 10:55:33 +02:00
|
|
|
from sqlalchemy import Table, Column, ForeignKey, CheckConstraint
|
2020-05-09 12:04:00 +02:00
|
|
|
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float
|
2019-07-13 20:45:48 +02:00
|
|
|
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
|
2020-06-06 21:21:10 +02:00
|
|
|
from sqlalchemy.orm.collections import InstrumentedList
|
|
|
|
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
|
2020-08-11 18:44:55 +02:00
|
|
|
from sqlalchemy.pool import StaticPool
|
2020-05-23 10:16:29 +02:00
|
|
|
from flask_login import current_user
|
|
|
|
from sqlalchemy.sql.expression import and_, true, false, text, func, or_
|
2020-07-25 19:39:19 +02:00
|
|
|
from sqlalchemy.ext.associationproxy import association_proxy
|
2020-05-23 10:16:29 +02:00
|
|
|
from babel import Locale as LC
|
|
|
|
from babel.core import UnknownLocaleError
|
|
|
|
from flask_babel import gettext as _
|
|
|
|
|
|
|
|
from . import logger, ub, isoLanguages
|
|
|
|
from .pagination import Pagination
|
|
|
|
|
|
|
|
try:
|
|
|
|
import unidecode
|
|
|
|
use_unidecode = True
|
|
|
|
except ImportError:
|
|
|
|
use_unidecode = False
|
|
|
|
|
2020-09-05 10:24:32 +02:00
|
|
|
Session = None
|
2017-10-21 21:50:47 +02:00
|
|
|
|
2020-08-14 19:43:54 +02:00
|
|
|
cc_exceptions = ['datetime', 'comments', 'composite', 'series']
|
2019-07-13 20:45:48 +02:00
|
|
|
cc_classes = {}
|
2017-11-30 16:49:46 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
Base = declarative_base()
|
|
|
|
|
|
|
|
books_authors_link = Table('books_authors_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('author', Integer, ForeignKey('authors.id'), primary_key=True)
|
2017-02-15 18:09:17 +01:00
|
|
|
)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
books_tags_link = Table('books_tags_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('tag', Integer, ForeignKey('tags.id'), primary_key=True)
|
2017-02-15 18:09:17 +01:00
|
|
|
)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
books_series_link = Table('books_series_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('series', Integer, ForeignKey('series.id'), primary_key=True)
|
2017-02-15 18:09:17 +01:00
|
|
|
)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
books_ratings_link = Table('books_ratings_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('rating', Integer, ForeignKey('ratings.id'), primary_key=True)
|
2017-02-15 18:09:17 +01:00
|
|
|
)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2015-10-13 01:21:22 +02:00
|
|
|
books_languages_link = Table('books_languages_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('lang_code', Integer, ForeignKey('languages.id'), primary_key=True)
|
2017-02-15 18:09:17 +01:00
|
|
|
)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-02-22 20:59:48 +01:00
|
|
|
books_publishers_link = Table('books_publishers_link', Base.metadata,
|
2017-10-21 21:50:47 +02:00
|
|
|
Column('book', Integer, ForeignKey('books.id'), primary_key=True),
|
|
|
|
Column('publisher', Integer, ForeignKey('publishers.id'), primary_key=True)
|
2017-02-22 20:59:48 +01:00
|
|
|
)
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2017-11-30 16:49:46 +01:00
|
|
|
|
2016-12-27 10:36:06 +01:00
|
|
|
class Identifiers(Base):
|
|
|
|
__tablename__ = 'identifiers'
|
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
type = Column(String(collation='NOCASE'), nullable=False, default="isbn")
|
|
|
|
val = Column(String(collation='NOCASE'), nullable=False)
|
|
|
|
book = Column(Integer, ForeignKey('books.id'), nullable=False)
|
2016-12-27 10:36:06 +01:00
|
|
|
|
2017-04-02 10:42:33 +02:00
|
|
|
def __init__(self, val, id_type, book):
|
2016-12-27 10:36:06 +01:00
|
|
|
self.val = val
|
2017-04-02 10:42:33 +02:00
|
|
|
self.type = id_type
|
2016-12-27 10:36:06 +01:00
|
|
|
self.book = book
|
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
#def get(self):
|
|
|
|
# return {self.type: self.val}
|
|
|
|
|
2016-12-27 10:36:06 +01:00
|
|
|
def formatType(self):
|
|
|
|
if self.type == "amazon":
|
|
|
|
return u"Amazon"
|
|
|
|
elif self.type == "isbn":
|
|
|
|
return u"ISBN"
|
|
|
|
elif self.type == "doi":
|
|
|
|
return u"DOI"
|
2017-01-05 20:11:02 +01:00
|
|
|
elif self.type == "goodreads":
|
|
|
|
return u"Goodreads"
|
2017-04-27 15:32:30 +02:00
|
|
|
elif self.type == "google":
|
|
|
|
return u"Google Books"
|
|
|
|
elif self.type == "kobo":
|
|
|
|
return u"Kobo"
|
2019-04-17 20:14:24 +02:00
|
|
|
if self.type == "lubimyczytac":
|
|
|
|
return u"Lubimyczytac"
|
2016-12-27 10:36:06 +01:00
|
|
|
else:
|
|
|
|
return self.type
|
|
|
|
|
|
|
|
def __repr__(self):
|
2020-05-12 14:44:57 +02:00
|
|
|
if self.type == "amazon" or self.type == "asin":
|
2016-12-27 10:36:06 +01:00
|
|
|
return u"https://amzn.com/{0}".format(self.val)
|
|
|
|
elif self.type == "isbn":
|
2020-05-09 17:11:56 +02:00
|
|
|
return u"https://www.worldcat.org/isbn/{0}".format(self.val)
|
2016-12-27 10:36:06 +01:00
|
|
|
elif self.type == "doi":
|
2020-05-09 17:11:56 +02:00
|
|
|
return u"https://dx.doi.org/{0}".format(self.val)
|
2017-01-05 20:11:02 +01:00
|
|
|
elif self.type == "goodreads":
|
2020-05-09 17:11:56 +02:00
|
|
|
return u"https://www.goodreads.com/book/show/{0}".format(self.val)
|
2017-01-16 04:14:49 +01:00
|
|
|
elif self.type == "douban":
|
|
|
|
return u"https://book.douban.com/subject/{0}".format(self.val)
|
2017-04-27 15:32:30 +02:00
|
|
|
elif self.type == "google":
|
|
|
|
return u"https://books.google.com/books?id={0}".format(self.val)
|
|
|
|
elif self.type == "kobo":
|
|
|
|
return u"https://www.kobo.com/ebook/{0}".format(self.val)
|
2019-04-17 20:14:24 +02:00
|
|
|
elif self.type == "lubimyczytac":
|
2020-05-09 17:11:56 +02:00
|
|
|
return u" https://lubimyczytac.pl/ksiazka/{0}".format(self.val)
|
2018-07-18 21:36:51 +02:00
|
|
|
elif self.type == "url":
|
|
|
|
return u"{0}".format(self.val)
|
2016-12-27 10:36:06 +01:00
|
|
|
else:
|
|
|
|
return u""
|
|
|
|
|
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Comments(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'comments'
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
text = Column(String(collation='NOCASE'), nullable=False)
|
|
|
|
book = Column(Integer, ForeignKey('books.id'), nullable=False)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __init__(self, text, book):
|
|
|
|
self.text = text
|
|
|
|
self.book = book
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.text
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Comments({0})>".format(self.text)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Tags(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'tags'
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
name = Column(String(collation='NOCASE'), unique=True, nullable=False)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __init__(self, name):
|
|
|
|
self.name = name
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.name
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Tags('{0})>".format(self.name)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Authors(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'authors'
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
name = Column(String(collation='NOCASE'), unique=True, nullable=False)
|
|
|
|
sort = Column(String(collation='NOCASE'))
|
|
|
|
link = Column(String, nullable=False, default="")
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __init__(self, name, sort, link):
|
|
|
|
self.name = name
|
|
|
|
self.sort = sort
|
|
|
|
self.link = link
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.name
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Authors('{0},{1}{2}')>".format(self.name, self.sort, self.link)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Series(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'series'
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
name = Column(String(collation='NOCASE'), unique=True, nullable=False)
|
|
|
|
sort = Column(String(collation='NOCASE'))
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __init__(self, name, sort):
|
|
|
|
self.name = name
|
|
|
|
self.sort = sort
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.name
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Series('{0},{1}')>".format(self.name, self.sort)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Ratings(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'ratings'
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
rating = Column(Integer, CheckConstraint('rating>-1 AND rating<11'), unique=True)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
def __init__(self, rating):
|
2016-04-03 23:52:32 +02:00
|
|
|
self.rating = rating
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.rating
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Ratings('{0}')>".format(self.rating)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-10-13 01:21:22 +02:00
|
|
|
class Languages(Base):
|
|
|
|
__tablename__ = 'languages'
|
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
lang_code = Column(String(collation='NOCASE'), nullable=False, unique=True)
|
2015-10-13 01:21:22 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
def __init__(self, lang_code):
|
2015-10-13 01:21:22 +02:00
|
|
|
self.lang_code = lang_code
|
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
2020-06-11 21:19:09 +02:00
|
|
|
if self.language_name:
|
|
|
|
return self.language_name
|
|
|
|
else:
|
|
|
|
return self.lang_code
|
2020-06-06 21:21:10 +02:00
|
|
|
|
2015-10-13 01:21:22 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Languages('{0}')>".format(self.lang_code)
|
|
|
|
|
2017-11-30 16:49:46 +01:00
|
|
|
|
2017-02-22 20:59:48 +01:00
|
|
|
class Publishers(Base):
|
|
|
|
__tablename__ = 'publishers'
|
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
name = Column(String(collation='NOCASE'), nullable=False, unique=True)
|
|
|
|
sort = Column(String(collation='NOCASE'))
|
2017-02-22 20:59:48 +01:00
|
|
|
|
2017-11-30 16:49:46 +01:00
|
|
|
def __init__(self, name, sort):
|
2017-02-22 20:59:48 +01:00
|
|
|
self.name = name
|
|
|
|
self.sort = sort
|
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def get(self):
|
|
|
|
return self.name
|
|
|
|
|
2017-02-22 20:59:48 +01:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Publishers('{0},{1}')>".format(self.name, self.sort)
|
|
|
|
|
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Data(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'data'
|
2020-05-21 18:16:11 +02:00
|
|
|
__table_args__ = {'schema':'calibre'}
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2020-05-03 10:55:33 +02:00
|
|
|
book = Column(Integer, ForeignKey('books.id'), nullable=False)
|
|
|
|
format = Column(String(collation='NOCASE'), nullable=False)
|
|
|
|
uncompressed_size = Column(Integer, nullable=False)
|
|
|
|
name = Column(String, nullable=False)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2017-04-02 10:42:33 +02:00
|
|
|
def __init__(self, book, book_format, uncompressed_size, name):
|
2016-04-03 23:52:32 +02:00
|
|
|
self.book = book
|
2017-04-02 10:42:33 +02:00
|
|
|
self.format = book_format
|
2016-04-03 23:52:32 +02:00
|
|
|
self.uncompressed_size = uncompressed_size
|
|
|
|
self.name = name
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
# ToDo: Check
|
|
|
|
def get(self):
|
|
|
|
return self.name
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return u"<Data('{0},{1}{2}{3}')>".format(self.book, self.format, self.uncompressed_size, self.name)
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 09:53:39 +01:00
|
|
|
|
2015-08-02 20:59:11 +02:00
|
|
|
class Books(Base):
|
2016-04-03 23:52:32 +02:00
|
|
|
__tablename__ = 'books'
|
|
|
|
|
2020-06-12 13:45:07 +02:00
|
|
|
DEFAULT_PUBDATE = datetime(101, 1, 1, 0, 0, 0, 0) # ("0101-01-01 00:00:00+00:00")
|
2017-07-10 01:27:46 +02:00
|
|
|
|
2020-05-03 10:55:33 +02:00
|
|
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
|
|
title = Column(String(collation='NOCASE'), nullable=False, default='Unknown')
|
|
|
|
sort = Column(String(collation='NOCASE'))
|
|
|
|
author_sort = Column(String(collation='NOCASE'))
|
|
|
|
timestamp = Column(TIMESTAMP, default=datetime.utcnow)
|
2020-06-06 09:52:35 +02:00
|
|
|
pubdate = Column(TIMESTAMP, default=DEFAULT_PUBDATE)
|
2020-05-04 18:19:30 +02:00
|
|
|
series_index = Column(String, nullable=False, default="1.0")
|
2020-05-03 10:55:33 +02:00
|
|
|
last_modified = Column(TIMESTAMP, default=datetime.utcnow)
|
|
|
|
path = Column(String, default="", nullable=False)
|
|
|
|
has_cover = Column(Integer, default=0)
|
2016-11-09 19:24:33 +01:00
|
|
|
uuid = Column(String)
|
2020-05-03 10:55:33 +02:00
|
|
|
isbn = Column(String(collation='NOCASE'), default="")
|
|
|
|
# Iccn = Column(String(collation='NOCASE'), default="")
|
|
|
|
flags = Column(Integer, nullable=False, default=1)
|
2016-04-03 23:52:32 +02:00
|
|
|
|
|
|
|
authors = relationship('Authors', secondary=books_authors_link, backref='books')
|
2020-07-25 19:39:19 +02:00
|
|
|
tags = relationship('Tags', secondary=books_tags_link, backref='books', order_by="Tags.name")
|
2016-04-03 23:52:32 +02:00
|
|
|
comments = relationship('Comments', backref='books')
|
|
|
|
data = relationship('Data', backref='books')
|
|
|
|
series = relationship('Series', secondary=books_series_link, backref='books')
|
|
|
|
ratings = relationship('Ratings', secondary=books_ratings_link, backref='books')
|
|
|
|
languages = relationship('Languages', secondary=books_languages_link, backref='books')
|
2017-02-22 20:59:48 +01:00
|
|
|
publishers = relationship('Publishers', secondary=books_publishers_link, backref='books')
|
2017-01-28 20:16:40 +01:00
|
|
|
identifiers = relationship('Identifiers', backref='books')
|
2016-12-27 10:36:06 +01:00
|
|
|
|
2017-01-28 20:16:40 +01:00
|
|
|
def __init__(self, title, sort, author_sort, timestamp, pubdate, series_index, last_modified, path, has_cover,
|
2017-11-30 16:49:46 +01:00
|
|
|
authors, tags, languages=None):
|
2016-04-03 23:52:32 +02:00
|
|
|
self.title = title
|
|
|
|
self.sort = sort
|
|
|
|
self.author_sort = author_sort
|
|
|
|
self.timestamp = timestamp
|
|
|
|
self.pubdate = pubdate
|
|
|
|
self.series_index = series_index
|
|
|
|
self.last_modified = last_modified
|
|
|
|
self.path = path
|
|
|
|
self.has_cover = has_cover
|
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
#def as_dict(self):
|
|
|
|
# return {c.name: getattr(self, c.name) for c in self.__table__.columns}
|
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def __repr__(self):
|
2016-12-23 09:53:39 +01:00
|
|
|
return u"<Books('{0},{1}{2}{3}{4}{5}{6}{7}{8}')>".format(self.title, self.sort, self.author_sort,
|
|
|
|
self.timestamp, self.pubdate, self.series_index,
|
|
|
|
self.last_modified, self.path, self.has_cover)
|
|
|
|
|
2018-05-26 17:21:20 +02:00
|
|
|
@property
|
|
|
|
def atom_timestamp(self):
|
2019-12-30 15:16:09 +01:00
|
|
|
return (self.timestamp.strftime('%Y-%m-%dT%H:%M:%S+00:00') or '')
|
2016-04-20 00:20:02 +02:00
|
|
|
|
2016-04-17 18:03:47 +02:00
|
|
|
class Custom_Columns(Base):
|
|
|
|
__tablename__ = 'custom_columns'
|
2017-03-30 21:17:18 +02:00
|
|
|
|
2017-10-21 21:50:47 +02:00
|
|
|
id = Column(Integer, primary_key=True)
|
2016-04-17 18:03:47 +02:00
|
|
|
label = Column(String)
|
|
|
|
name = Column(String)
|
|
|
|
datatype = Column(String)
|
|
|
|
mark_for_delete = Column(Boolean)
|
|
|
|
editable = Column(Boolean)
|
|
|
|
display = Column(String)
|
|
|
|
is_multiple = Column(Boolean)
|
2016-04-20 18:56:03 +02:00
|
|
|
normalized = Column(Boolean)
|
2017-04-02 10:42:33 +02:00
|
|
|
|
2016-04-20 18:56:03 +02:00
|
|
|
def get_display_dict(self):
|
|
|
|
display_dict = ast.literal_eval(self.display)
|
2019-01-14 20:27:53 +01:00
|
|
|
if sys.version_info < (3, 0):
|
|
|
|
display_dict['enum_values'] = [x.decode('unicode_escape') for x in display_dict['enum_values']]
|
2016-04-20 18:56:03 +02:00
|
|
|
return display_dict
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
class AlchemyEncoder(json.JSONEncoder):
|
|
|
|
|
|
|
|
def default(self, obj):
|
|
|
|
if isinstance(obj.__class__, DeclarativeMeta):
|
|
|
|
# an SQLAlchemy class
|
|
|
|
fields = {}
|
|
|
|
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
|
|
|
|
if field == 'books':
|
|
|
|
continue
|
|
|
|
data = obj.__getattribute__(field)
|
|
|
|
try:
|
|
|
|
if isinstance(data, str):
|
|
|
|
data = data.replace("'","\'")
|
|
|
|
elif isinstance(data, InstrumentedList):
|
|
|
|
el =list()
|
|
|
|
for ele in data:
|
|
|
|
if ele.get:
|
|
|
|
el.append(ele.get())
|
|
|
|
else:
|
|
|
|
el.append(json.dumps(ele, cls=AlchemyEncoder))
|
|
|
|
data =",".join(el)
|
|
|
|
if data == '[]':
|
|
|
|
data = ""
|
|
|
|
else:
|
|
|
|
json.dumps(data)
|
|
|
|
fields[field] = data
|
|
|
|
except:
|
|
|
|
fields[field] = ""
|
|
|
|
# a json-encodable dict
|
|
|
|
return fields
|
|
|
|
|
|
|
|
return json.JSONEncoder.default(self, obj)
|
|
|
|
|
2017-01-22 21:30:36 +01:00
|
|
|
|
2020-08-25 03:03:59 +02:00
|
|
|
class CalibreDB():
|
2020-05-21 18:16:11 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.engine = None
|
|
|
|
self.session = None
|
|
|
|
self.log = None
|
2020-05-23 10:16:29 +02:00
|
|
|
self.config = None
|
2020-05-21 18:16:11 +02:00
|
|
|
|
|
|
|
def setup_db(self, config, app_db_path):
|
2020-05-23 10:16:29 +02:00
|
|
|
self.config = config
|
2020-05-21 18:16:11 +02:00
|
|
|
self.dispose()
|
2020-09-05 10:24:32 +02:00
|
|
|
global Session
|
2020-05-21 18:16:11 +02:00
|
|
|
|
|
|
|
if not config.config_calibre_dir:
|
|
|
|
config.invalidate()
|
|
|
|
return False
|
|
|
|
|
|
|
|
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
config.invalidate()
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.engine = create_engine('sqlite://',
|
|
|
|
echo=False,
|
|
|
|
isolation_level="SERIALIZABLE",
|
2020-08-11 18:44:55 +02:00
|
|
|
connect_args={'check_same_thread': False},
|
|
|
|
poolclass=StaticPool)
|
2020-05-21 18:16:11 +02:00
|
|
|
self.engine.execute("attach database '{}' as calibre;".format(dbpath))
|
|
|
|
self.engine.execute("attach database '{}' as app_settings;".format(app_db_path))
|
|
|
|
|
|
|
|
conn = self.engine.connect()
|
|
|
|
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
|
|
|
|
except Exception as e:
|
|
|
|
config.invalidate(e)
|
|
|
|
return False
|
|
|
|
|
|
|
|
config.db_configured = True
|
|
|
|
self.update_title_sort(config, conn.connection)
|
|
|
|
|
|
|
|
if not cc_classes:
|
|
|
|
cc = conn.execute("SELECT id, datatype FROM custom_columns")
|
|
|
|
|
|
|
|
cc_ids = []
|
|
|
|
books_custom_column_links = {}
|
|
|
|
for row in cc:
|
|
|
|
if row.datatype not in cc_exceptions:
|
2020-07-25 19:39:19 +02:00
|
|
|
if row.datatype == 'series':
|
|
|
|
dicttable = {'__tablename__': 'books_custom_column_' + str(row.id) + '_link',
|
|
|
|
'id': Column(Integer, primary_key=True),
|
|
|
|
'book': Column(Integer, ForeignKey('books.id'),
|
|
|
|
primary_key=True),
|
|
|
|
'map_value': Column('value', Integer,
|
|
|
|
ForeignKey('custom_column_' +
|
|
|
|
str(row.id) + '.id'),
|
|
|
|
primary_key=True),
|
|
|
|
'extra': Column(Float),
|
2020-08-15 12:08:59 +02:00
|
|
|
'asoc' : relationship('custom_column_' + str(row.id), uselist=False),
|
2020-07-25 19:39:19 +02:00
|
|
|
'value' : association_proxy('asoc', 'value')
|
|
|
|
}
|
2020-08-15 12:08:59 +02:00
|
|
|
books_custom_column_links[row.id] = type(str('books_custom_column_' + str(row.id) + '_link'),
|
2020-07-25 19:39:19 +02:00
|
|
|
(Base,), dicttable)
|
|
|
|
else:
|
|
|
|
books_custom_column_links[row.id] = Table('books_custom_column_' + str(row.id) + '_link',
|
|
|
|
Base.metadata,
|
|
|
|
Column('book', Integer, ForeignKey('books.id'),
|
|
|
|
primary_key=True),
|
|
|
|
Column('value', Integer,
|
|
|
|
ForeignKey('custom_column_' +
|
|
|
|
str(row.id) + '.id'),
|
|
|
|
primary_key=True)
|
|
|
|
)
|
2020-05-21 18:16:11 +02:00
|
|
|
cc_ids.append([row.id, row.datatype])
|
2020-07-25 19:39:19 +02:00
|
|
|
|
|
|
|
ccdict = {'__tablename__': 'custom_column_' + str(row.id),
|
|
|
|
'id': Column(Integer, primary_key=True)}
|
|
|
|
if row.datatype == 'float':
|
|
|
|
ccdict['value'] = Column(Float)
|
2020-05-21 18:16:11 +02:00
|
|
|
elif row.datatype == 'int':
|
2020-07-25 19:39:19 +02:00
|
|
|
ccdict['value'] = Column(Integer)
|
|
|
|
elif row.datatype == 'bool':
|
|
|
|
ccdict['value'] = Column(Boolean)
|
2020-05-21 18:16:11 +02:00
|
|
|
else:
|
2020-07-25 19:39:19 +02:00
|
|
|
ccdict['value'] = Column(String)
|
|
|
|
if row.datatype in ['float', 'int', 'bool']:
|
|
|
|
ccdict['book'] = Column(Integer, ForeignKey('books.id'))
|
2020-08-15 12:08:59 +02:00
|
|
|
cc_classes[row.id] = type(str('custom_column_' + str(row.id)), (Base,), ccdict)
|
2020-05-21 18:16:11 +02:00
|
|
|
|
|
|
|
for cc_id in cc_ids:
|
|
|
|
if (cc_id[1] == 'bool') or (cc_id[1] == 'int') or (cc_id[1] == 'float'):
|
2020-05-24 20:19:43 +02:00
|
|
|
setattr(Books,
|
|
|
|
'custom_column_' + str(cc_id[0]),
|
|
|
|
relationship(cc_classes[cc_id[0]],
|
2020-08-14 19:43:54 +02:00
|
|
|
primaryjoin=(
|
2020-05-24 20:19:43 +02:00
|
|
|
Books.id == cc_classes[cc_id[0]].book),
|
|
|
|
backref='books'))
|
2020-07-25 19:39:19 +02:00
|
|
|
elif (cc_id[1] == 'series'):
|
|
|
|
setattr(Books,
|
|
|
|
'custom_column_' + str(cc_id[0]),
|
|
|
|
relationship(books_custom_column_links[cc_id[0]],
|
|
|
|
backref='books'))
|
2017-03-07 19:10:17 +01:00
|
|
|
else:
|
2020-05-24 20:19:43 +02:00
|
|
|
setattr(Books,
|
|
|
|
'custom_column_' + str(cc_id[0]),
|
|
|
|
relationship(cc_classes[cc_id[0]],
|
|
|
|
secondary=books_custom_column_links[cc_id[0]],
|
|
|
|
backref='books'))
|
2020-05-21 18:16:11 +02:00
|
|
|
|
|
|
|
Session = scoped_session(sessionmaker(autocommit=False,
|
2020-09-05 10:24:32 +02:00
|
|
|
autoflush=True,
|
2020-05-24 20:19:43 +02:00
|
|
|
bind=self.engine))
|
2020-05-21 18:16:11 +02:00
|
|
|
self.session = Session()
|
|
|
|
return True
|
|
|
|
|
2020-05-23 10:16:29 +02:00
|
|
|
def get_book(self, book_id):
|
|
|
|
return self.session.query(Books).filter(Books.id == book_id).first()
|
|
|
|
|
|
|
|
def get_filtered_book(self, book_id, allow_show_archived=False):
|
|
|
|
return self.session.query(Books).filter(Books.id == book_id).\
|
|
|
|
filter(self.common_filters(allow_show_archived)).first()
|
|
|
|
|
|
|
|
def get_book_by_uuid(self, book_uuid):
|
|
|
|
return self.session.query(Books).filter(Books.uuid == book_uuid).first()
|
|
|
|
|
|
|
|
def get_book_format(self, book_id, format):
|
|
|
|
return self.session.query(Data).filter(Data.book == book_id).filter(Data.format == format).first()
|
|
|
|
|
|
|
|
# Language and content filters for displaying in the UI
|
|
|
|
def common_filters(self, allow_show_archived=False):
|
|
|
|
if not allow_show_archived:
|
|
|
|
archived_books = (
|
|
|
|
ub.session.query(ub.ArchivedBook)
|
|
|
|
.filter(ub.ArchivedBook.user_id == int(current_user.id))
|
|
|
|
.filter(ub.ArchivedBook.is_archived == True)
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
|
|
|
|
archived_filter = Books.id.notin_(archived_book_ids)
|
|
|
|
else:
|
|
|
|
archived_filter = true()
|
|
|
|
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
lang_filter = Books.languages.any(Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
lang_filter = true()
|
|
|
|
negtags_list = current_user.list_denied_tags()
|
|
|
|
postags_list = current_user.list_allowed_tags()
|
|
|
|
neg_content_tags_filter = false() if negtags_list == [''] else Books.tags.any(Tags.name.in_(negtags_list))
|
|
|
|
pos_content_tags_filter = true() if postags_list == [''] else Books.tags.any(Tags.name.in_(postags_list))
|
|
|
|
if self.config.config_restricted_column:
|
|
|
|
pos_cc_list = current_user.allowed_column_value.split(',')
|
|
|
|
pos_content_cc_filter = true() if pos_cc_list == [''] else \
|
|
|
|
getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
|
|
|
|
any(cc_classes[self.config.config_restricted_column].value.in_(pos_cc_list))
|
|
|
|
neg_cc_list = current_user.denied_column_value.split(',')
|
|
|
|
neg_content_cc_filter = false() if neg_cc_list == [''] else \
|
|
|
|
getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
|
|
|
|
any(cc_classes[self.config.config_restricted_column].value.in_(neg_cc_list))
|
|
|
|
else:
|
|
|
|
pos_content_cc_filter = true()
|
|
|
|
neg_content_cc_filter = false()
|
|
|
|
return and_(lang_filter, pos_content_tags_filter, ~neg_content_tags_filter,
|
|
|
|
pos_content_cc_filter, ~neg_content_cc_filter, archived_filter)
|
|
|
|
|
|
|
|
# Fill indexpage with all requested data from database
|
2020-06-06 21:21:10 +02:00
|
|
|
def fill_indexpage(self, page, pagesize, database, db_filter, order, *join):
|
|
|
|
return self.fill_indexpage_with_archived_books(page, pagesize, database, db_filter, order, False, *join)
|
2020-05-23 10:16:29 +02:00
|
|
|
|
2020-06-06 21:21:10 +02:00
|
|
|
def fill_indexpage_with_archived_books(self, page, pagesize, database, db_filter, order, allow_show_archived, *join):
|
|
|
|
pagesize = pagesize or self.config.config_books_per_page
|
2020-05-23 10:16:29 +02:00
|
|
|
if current_user.show_detail_random():
|
|
|
|
randm = self.session.query(Books) \
|
|
|
|
.filter(self.common_filters(allow_show_archived)) \
|
|
|
|
.order_by(func.random()) \
|
|
|
|
.limit(self.config.config_random_books)
|
|
|
|
else:
|
|
|
|
randm = false()
|
2020-06-06 21:21:10 +02:00
|
|
|
off = int(int(pagesize) * (page - 1))
|
2020-05-23 10:16:29 +02:00
|
|
|
query = self.session.query(database) \
|
|
|
|
.join(*join, isouter=True) \
|
|
|
|
.filter(db_filter) \
|
|
|
|
.filter(self.common_filters(allow_show_archived))
|
2020-06-06 21:21:10 +02:00
|
|
|
pagination = Pagination(page, pagesize,
|
2020-05-23 10:16:29 +02:00
|
|
|
len(query.all()))
|
2020-06-06 21:21:10 +02:00
|
|
|
entries = query.order_by(*order).offset(off).limit(pagesize).all()
|
2020-05-23 10:16:29 +02:00
|
|
|
for book in entries:
|
|
|
|
book = self.order_authors(book)
|
|
|
|
return entries, randm, pagination
|
|
|
|
|
|
|
|
# Orders all Authors in the list according to authors sort
|
|
|
|
def order_authors(self, entry):
|
|
|
|
sort_authors = entry.author_sort.split('&')
|
|
|
|
authors_ordered = list()
|
|
|
|
error = False
|
|
|
|
for auth in sort_authors:
|
|
|
|
# ToDo: How to handle not found authorname
|
|
|
|
result = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).first()
|
|
|
|
if not result:
|
|
|
|
error = True
|
|
|
|
break
|
|
|
|
authors_ordered.append(result)
|
|
|
|
if not error:
|
|
|
|
entry.authors = authors_ordered
|
|
|
|
return entry
|
|
|
|
|
|
|
|
def get_typeahead(self, database, query, replace=('', ''), tag_filter=true()):
|
|
|
|
query = query or ''
|
2020-05-23 12:51:48 +02:00
|
|
|
self.session.connection().connection.connection.create_function("lower", 1, lcase)
|
2020-05-23 10:16:29 +02:00
|
|
|
entries = self.session.query(database).filter(tag_filter). \
|
|
|
|
filter(func.lower(database.name).ilike("%" + query + "%")).all()
|
|
|
|
json_dumps = json.dumps([dict(name=r.name.replace(*replace)) for r in entries])
|
|
|
|
return json_dumps
|
|
|
|
|
|
|
|
def check_exists_book(self, authr, title):
|
2020-05-23 12:51:48 +02:00
|
|
|
self.session.connection().connection.connection.create_function("lower", 1, lcase)
|
2020-05-23 10:16:29 +02:00
|
|
|
q = list()
|
|
|
|
authorterms = re.split(r'\s*&\s*', authr)
|
|
|
|
for authorterm in authorterms:
|
|
|
|
q.append(Books.authors.any(func.lower(Authors.name).ilike("%" + authorterm + "%")))
|
|
|
|
|
|
|
|
return self.session.query(Books)\
|
|
|
|
.filter(and_(Books.authors.any(and_(*q)), func.lower(Books.title).ilike("%" + title + "%"))).first()
|
|
|
|
|
|
|
|
# read search results from calibre-database and return it (function is used for feed and simple search
|
2020-06-08 17:34:03 +02:00
|
|
|
def get_search_results(self, term, offset=None, order=None, limit=None):
|
2020-06-06 21:21:10 +02:00
|
|
|
order = order or [Books.sort]
|
2020-06-08 17:34:03 +02:00
|
|
|
if offset != None and limit != None:
|
|
|
|
offset = int(offset)
|
|
|
|
limit = offset + int(limit)
|
2020-05-23 10:16:29 +02:00
|
|
|
term.strip().lower()
|
2020-05-23 12:51:48 +02:00
|
|
|
self.session.connection().connection.connection.create_function("lower", 1, lcase)
|
2020-05-23 10:16:29 +02:00
|
|
|
q = list()
|
|
|
|
authorterms = re.split("[, ]+", term)
|
|
|
|
for authorterm in authorterms:
|
|
|
|
q.append(Books.authors.any(func.lower(Authors.name).ilike("%" + authorterm + "%")))
|
2020-06-08 17:34:03 +02:00
|
|
|
result = self.session.query(Books).filter(self.common_filters(True)).filter(
|
2020-05-23 10:16:29 +02:00
|
|
|
or_(Books.tags.any(func.lower(Tags.name).ilike("%" + term + "%")),
|
|
|
|
Books.series.any(func.lower(Series.name).ilike("%" + term + "%")),
|
|
|
|
Books.authors.any(and_(*q)),
|
|
|
|
Books.publishers.any(func.lower(Publishers.name).ilike("%" + term + "%")),
|
|
|
|
func.lower(Books.title).ilike("%" + term + "%")
|
2020-06-08 17:34:03 +02:00
|
|
|
)).order_by(*order).all()
|
|
|
|
result_count = len(result)
|
|
|
|
return result[offset:limit], result_count
|
2020-05-23 10:16:29 +02:00
|
|
|
|
|
|
|
# Creates for all stored languages a translated speaking name in the array for the UI
|
|
|
|
def speaking_language(self, languages=None):
|
|
|
|
from . import get_locale
|
|
|
|
|
|
|
|
if not languages:
|
|
|
|
languages = self.session.query(Languages) \
|
|
|
|
.join(books_languages_link) \
|
|
|
|
.join(Books) \
|
|
|
|
.filter(self.common_filters()) \
|
|
|
|
.group_by(text('books_languages_link.lang_code')).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
|
|
|
except UnknownLocaleError:
|
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
return languages
|
|
|
|
|
2020-05-21 18:16:11 +02:00
|
|
|
def update_title_sort(self, config, conn=None):
|
|
|
|
# user defined sort function for calibre databases (Series, etc.)
|
|
|
|
def _title_sort(title):
|
|
|
|
# calibre sort stuff
|
|
|
|
title_pat = re.compile(config.config_title_regex, re.IGNORECASE)
|
|
|
|
match = title_pat.search(title)
|
|
|
|
if match:
|
|
|
|
prep = match.group(1)
|
2020-05-21 22:31:29 +02:00
|
|
|
title = title[len(prep):] + ', ' + prep
|
2020-05-21 18:16:11 +02:00
|
|
|
return title.strip()
|
|
|
|
|
|
|
|
conn = conn or self.session.connection().connection.connection
|
|
|
|
conn.create_function("title_sort", 1, _title_sort)
|
|
|
|
|
|
|
|
def dispose(self):
|
|
|
|
# global session
|
|
|
|
|
|
|
|
old_session = self.session
|
|
|
|
self.session = None
|
|
|
|
if old_session:
|
|
|
|
try: old_session.close()
|
|
|
|
except: pass
|
|
|
|
if old_session.bind:
|
|
|
|
try: old_session.bind.dispose()
|
|
|
|
except Exception: pass
|
|
|
|
|
|
|
|
for attr in list(Books.__dict__.keys()):
|
|
|
|
if attr.startswith("custom_column_"):
|
|
|
|
setattr(Books, attr, None)
|
|
|
|
|
|
|
|
for db_class in cc_classes.values():
|
|
|
|
Base.metadata.remove(db_class.__table__)
|
|
|
|
cc_classes.clear()
|
|
|
|
|
|
|
|
for table in reversed(Base.metadata.sorted_tables):
|
|
|
|
name = table.key
|
|
|
|
if name.startswith("custom_column_") or name.startswith("books_custom_column_"):
|
|
|
|
if table is not None:
|
|
|
|
Base.metadata.remove(table)
|
|
|
|
|
|
|
|
def reconnect_db(self, config, app_db_path):
|
|
|
|
self.session.close()
|
|
|
|
self.engine.dispose()
|
|
|
|
self.setup_db(config, app_db_path)
|
2020-05-23 10:16:29 +02:00
|
|
|
|
2020-05-23 12:51:48 +02:00
|
|
|
def lcase(s):
|
|
|
|
try:
|
|
|
|
return unidecode.unidecode(s.lower())
|
|
|
|
except Exception as e:
|
|
|
|
log = logger.create()
|
|
|
|
log.exception(e)
|
|
|
|
return s.lower()
|