Improved detection of changed database
This commit is contained in:
parent
89877835b3
commit
895f68033f
@ -27,8 +27,9 @@ from flask import session
|
||||
|
||||
class MyLoginManager(LoginManager):
|
||||
def _session_protection_failed(self):
|
||||
sess = session._get_current_object()
|
||||
_session = session._get_current_object()
|
||||
ident = self._session_identifier_generator()
|
||||
if(sess and not (len(sess) == 1 and sess.get('csrf_token', None))) and ident != sess.get('_id', None):
|
||||
if(_session and not (len(_session) == 1
|
||||
and _session.get('csrf_token', None))) and ident != _session.get('_id', None):
|
||||
return super(). _session_protection_failed()
|
||||
return False
|
||||
|
@ -156,7 +156,7 @@ def create_app():
|
||||
services.goodreads_support.connect(config.config_goodreads_api_key,
|
||||
config.config_goodreads_api_secret,
|
||||
config.config_use_goodreads)
|
||||
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
return app
|
||||
|
||||
@babel.localeselector
|
||||
|
11
cps/admin.py
11
cps/admin.py
@ -1192,8 +1192,10 @@ def _db_simulate_change():
|
||||
'',
|
||||
param['config_calibre_dir'],
|
||||
flags=re.IGNORECASE).strip()
|
||||
db_change = config.config_calibre_dir != to_save["config_calibre_dir"] and config.config_calibre_dir
|
||||
db_valid = calibre_db.check_valid_db(to_save["config_calibre_dir"], ub.app_DB_path)
|
||||
db_valid, db_change = calibre_db.check_valid_db(to_save["config_calibre_dir"],
|
||||
ub.app_DB_path,
|
||||
config.config_calibre_uuid)
|
||||
db_change = bool(db_change and config.config_calibre_dir)
|
||||
return db_change, db_valid
|
||||
|
||||
|
||||
@ -1223,12 +1225,15 @@ def _db_configuration_update_helper():
|
||||
except Exception as ex:
|
||||
return _db_configuration_result('{}'.format(ex), gdrive_error)
|
||||
|
||||
if db_change or not db_valid or not config.db_configured:
|
||||
if db_change or not db_valid or not config.db_configured \
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||
if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path):
|
||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
||||
gdrive_error)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
||||
if db_change:
|
||||
log.info("Calibre Database changed, delete all Calibre-Web info related to old Database")
|
||||
ub.session.query(ub.Downloads).delete()
|
||||
ub.session.query(ub.ArchivedBook).delete()
|
||||
ub.session.query(ub.ReadBook).delete()
|
||||
|
@ -62,6 +62,7 @@ class _Settings(_Base):
|
||||
mail_gmail_token = Column(JSON, default={})
|
||||
|
||||
config_calibre_dir = Column(String)
|
||||
config_calibre_uuid = Column(String)
|
||||
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||
config_external_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||
config_certfile = Column(String)
|
||||
@ -350,6 +351,14 @@ class _ConfigSQL(object):
|
||||
# self.config_calibre_dir = None
|
||||
self.save()
|
||||
|
||||
def store_calibre_uuid(self, calibre_db, Library_table):
|
||||
try:
|
||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||
if self.config_calibre_uuid != calibre_uuid.uuid:
|
||||
self.config_calibre_uuid = calibre_uuid.uuid
|
||||
self.save()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def _migrate_table(session, orm_class):
|
||||
changed = False
|
||||
@ -438,19 +447,12 @@ def load_configuration(session):
|
||||
session.add(_Settings())
|
||||
session.commit()
|
||||
conf = _ConfigSQL(session)
|
||||
# Migrate from global restrictions to user based restrictions
|
||||
#if bool(conf.config_default_show & constants.MATURE_CONTENT) and conf.config_denied_tags == "":
|
||||
# conf.config_denied_tags = conf.config_mature_content_tags
|
||||
# conf.save()
|
||||
# session.query(ub.User).filter(ub.User.mature_content != True). \
|
||||
# update({"denied_tags": conf.config_mature_content_tags}, synchronize_session=False)
|
||||
# session.commit()
|
||||
return conf
|
||||
|
||||
def get_flask_session_key(session):
|
||||
flask_settings = session.query(_Flask_Settings).one_or_none()
|
||||
def get_flask_session_key(_session):
|
||||
flask_settings = _session.query(_Flask_Settings).one_or_none()
|
||||
if flask_settings == None:
|
||||
flask_settings = _Flask_Settings(os.urandom(32))
|
||||
session.add(flask_settings)
|
||||
session.commit()
|
||||
_session.add(flask_settings)
|
||||
_session.commit()
|
||||
return flask_settings.flask_session_key
|
||||
|
22
cps/db.py
22
cps/db.py
@ -93,6 +93,12 @@ books_publishers_link = Table('books_publishers_link', Base.metadata,
|
||||
)
|
||||
|
||||
|
||||
class Library_Id(Base):
|
||||
__tablename__ = 'library_id'
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String, nullable=False)
|
||||
|
||||
|
||||
class Identifiers(Base):
|
||||
__tablename__ = 'identifiers'
|
||||
|
||||
@ -525,12 +531,12 @@ class CalibreDB():
|
||||
return cc_classes
|
||||
|
||||
@classmethod
|
||||
def check_valid_db(cls, config_calibre_dir, app_db_path):
|
||||
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
|
||||
if not config_calibre_dir:
|
||||
return False
|
||||
return False, False
|
||||
dbpath = os.path.join(config_calibre_dir, "metadata.db")
|
||||
if not os.path.exists(dbpath):
|
||||
return False
|
||||
return False, False
|
||||
try:
|
||||
check_engine = create_engine('sqlite://',
|
||||
echo=False,
|
||||
@ -540,10 +546,16 @@ class CalibreDB():
|
||||
with check_engine.begin() as connection:
|
||||
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
||||
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
||||
local_session = scoped_session(sessionmaker())
|
||||
local_session.configure(bind=connection)
|
||||
database_uuid = local_session().query(Library_Id).one_or_none()
|
||||
# local_session.dispose()
|
||||
|
||||
check_engine.connect()
|
||||
db_change = config_calibre_uuid != database_uuid.uuid
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
return False, False
|
||||
return True, db_change
|
||||
|
||||
@classmethod
|
||||
def update_config(cls, config):
|
||||
|
@ -196,7 +196,8 @@ def migrate():
|
||||
if not os.path.exists(cli.gdpath):
|
||||
try:
|
||||
Base.metadata.create_all(engine)
|
||||
except Exception:
|
||||
except Exception as ex:
|
||||
log.error("Error connect to database: {} - {}".format(cli.gdpath, ex))
|
||||
raise
|
||||
migrate()
|
||||
|
||||
|
@ -47,7 +47,7 @@ def remove_synced_book(book_id, all=False, session=None):
|
||||
ub.session_commit()
|
||||
else:
|
||||
session.query(ub.KoboSyncedBooks).filter(ub.KoboSyncedBooks.book_id == book_id).filter(user).delete()
|
||||
ub.session_commit(sess=session)
|
||||
ub.session_commit(_session=session)
|
||||
|
||||
|
||||
|
||||
|
@ -31,7 +31,7 @@ from cps import logger, config
|
||||
from cps.subproc_wrapper import process_open
|
||||
from flask_babel import gettext as _
|
||||
from cps.kobo_sync_status import remove_synced_book
|
||||
from cps.ub import ini
|
||||
from cps.ub import init_db_thread
|
||||
|
||||
from cps.tasks.mail import TaskEmail
|
||||
from cps import gdriveutils
|
||||
@ -148,7 +148,7 @@ class TaskConvert(CalibreTask):
|
||||
local_db.session.merge(new_format)
|
||||
local_db.session.commit()
|
||||
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
||||
ub_session = ini()
|
||||
ub_session = init_db_thread()
|
||||
remove_synced_book(book_id, True, ub_session)
|
||||
ub_session.close()
|
||||
except SQLAlchemyError as e:
|
||||
|
123
cps/ub.py
123
cps/ub.py
@ -37,6 +37,7 @@ except ImportError as e:
|
||||
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin
|
||||
oauth_support = True
|
||||
except ImportError as e:
|
||||
OAuthConsumerMixin = BaseException
|
||||
oauth_support = False
|
||||
from sqlalchemy import create_engine, exc, exists, event, text
|
||||
from sqlalchemy import Column, ForeignKey
|
||||
@ -510,7 +511,7 @@ class RemoteAuthToken(Base):
|
||||
|
||||
|
||||
# Add missing tables during migration of database
|
||||
def add_missing_tables(engine, session):
|
||||
def add_missing_tables(engine, _session):
|
||||
if not engine.dialect.has_table(engine.connect(), "book_read_link"):
|
||||
ReadBook.__table__.create(bind=engine)
|
||||
if not engine.dialect.has_table(engine.connect(), "bookmark"):
|
||||
@ -527,26 +528,26 @@ def add_missing_tables(engine, session):
|
||||
Registration.__table__.create(bind=engine)
|
||||
with engine.connect() as conn:
|
||||
conn.execute("insert into registration (domain, allow) values('%.%',1)")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
# migrate all settings missing in registration table
|
||||
def migrate_registration_table(engine, session):
|
||||
def migrate_registration_table(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(Registration.allow)).scalar()
|
||||
session.commit()
|
||||
_session.query(exists().where(Registration.allow)).scalar()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
|
||||
conn.execute("update registration set 'allow' = 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
# Handle table exists, but no content
|
||||
cnt = session.query(Registration).count()
|
||||
cnt = _session.query(Registration).count()
|
||||
if not cnt:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("insert into registration (domain, allow) values('%.%',1)")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not writeable
|
||||
print('Settings database is not writeable. Exiting...')
|
||||
sys.exit(2)
|
||||
@ -564,9 +565,9 @@ def migrate_guest_password(engine):
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def migrate_shelfs(engine, session):
|
||||
def migrate_shelfs(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(Shelf.uuid)).scalar()
|
||||
_session.query(exists().where(Shelf.uuid)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
|
||||
@ -574,33 +575,33 @@ def migrate_shelfs(engine, session):
|
||||
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
for shelf in session.query(Shelf).all():
|
||||
for shelf in _session.query(Shelf).all():
|
||||
shelf.uuid = str(uuid.uuid4())
|
||||
shelf.created = datetime.datetime.now()
|
||||
shelf.last_modified = datetime.datetime.now()
|
||||
for book_shelf in session.query(BookShelf).all():
|
||||
for book_shelf in _session.query(BookShelf).all():
|
||||
book_shelf.date_added = datetime.datetime.now()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
session.query(exists().where(Shelf.kobo_sync)).scalar()
|
||||
_session.query(exists().where(Shelf.kobo_sync)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
session.query(exists().where(BookShelf.order)).scalar()
|
||||
_session.query(exists().where(BookShelf.order)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
def migrate_readBook(engine, session):
|
||||
def migrate_readBook(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(ReadBook.read_status)).scalar()
|
||||
_session.query(exists().where(ReadBook.read_status)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
|
||||
@ -608,46 +609,46 @@ def migrate_readBook(engine, session):
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
test = session.query(ReadBook).filter(ReadBook.last_modified == None).all()
|
||||
_session.commit()
|
||||
test = _session.query(ReadBook).filter(ReadBook.last_modified == None).all()
|
||||
for book in test:
|
||||
book.last_modified = datetime.datetime.utcnow()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
def migrate_remoteAuthToken(engine, session):
|
||||
def migrate_remoteAuthToken(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(RemoteAuthToken.token_type)).scalar()
|
||||
session.commit()
|
||||
_session.query(exists().where(RemoteAuthToken.token_type)).scalar()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
|
||||
conn.execute("update remote_auth_token set 'token_type' = 0")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
# Migrate database to current version, has to be updated after every database change. Currently migration from
|
||||
# everywhere to current should work. Migration is done by checking if relevant columns are existing, and than adding
|
||||
# rows with SQL commands
|
||||
def migrate_Database(session):
|
||||
engine = session.bind
|
||||
add_missing_tables(engine, session)
|
||||
migrate_registration_table(engine, session)
|
||||
migrate_readBook(engine, session)
|
||||
migrate_remoteAuthToken(engine, session)
|
||||
migrate_shelfs(engine, session)
|
||||
def migrate_Database(_session):
|
||||
engine = _session.bind
|
||||
add_missing_tables(engine, _session)
|
||||
migrate_registration_table(engine, _session)
|
||||
migrate_readBook(engine, _session)
|
||||
migrate_remoteAuthToken(engine, _session)
|
||||
migrate_shelfs(engine, _session)
|
||||
try:
|
||||
create = False
|
||||
session.query(exists().where(User.sidebar_view)).scalar()
|
||||
_session.query(exists().where(User.sidebar_view)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
create = True
|
||||
try:
|
||||
if create:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("SELECT language_books FROM user")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||
@ -657,32 +658,32 @@ def migrate_Database(session):
|
||||
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
|
||||
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
|
||||
'detail_random': constants.DETAIL_RANDOM})
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.denied_tags)).scalar()
|
||||
_session.query(exists().where(User.denied_tags)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.view_settings)).scalar()
|
||||
_session.query(exists().where(User.view_settings)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
|
||||
_session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `kobo_only_shelves_sync` SMALLINT DEFAULT 0")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
# check if name is in User table instead of nickname
|
||||
session.query(exists().where(User.name)).scalar()
|
||||
_session.query(exists().where(User.name)).scalar()
|
||||
except exc.OperationalError:
|
||||
# Create new table user_id and copy contents of table user into it
|
||||
with engine.connect() as conn:
|
||||
@ -712,20 +713,20 @@ def migrate_Database(session):
|
||||
# delete old user table and rename new user_id table to user:
|
||||
conn.execute(text("DROP TABLE user"))
|
||||
conn.execute(text("ALTER TABLE user_id RENAME TO user"))
|
||||
session.commit()
|
||||
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
|
||||
_session.commit()
|
||||
if _session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
|
||||
is None:
|
||||
create_anonymous_user(session)
|
||||
create_anonymous_user(_session)
|
||||
|
||||
migrate_guest_password(engine)
|
||||
|
||||
|
||||
def clean_database(session):
|
||||
def clean_database(_session):
|
||||
# Remove expired remote login tokens
|
||||
now = datetime.datetime.now()
|
||||
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
||||
_session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
||||
filter(RemoteAuthToken.token_type != 1).delete()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
# Save downloaded books per user in calibre-web's own database
|
||||
@ -750,22 +751,22 @@ def delete_download(book_id):
|
||||
session.rollback()
|
||||
|
||||
# Generate user Guest (translated text), as anonymous user, no rights
|
||||
def create_anonymous_user(session):
|
||||
def create_anonymous_user(_session):
|
||||
user = User()
|
||||
user.name = "Guest"
|
||||
user.email = 'no@email'
|
||||
user.role = constants.ROLE_ANONYMOUS
|
||||
user.password = ''
|
||||
|
||||
session.add(user)
|
||||
_session.add(user)
|
||||
try:
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
_session.rollback()
|
||||
|
||||
|
||||
# Generate User admin with admin123 password, and access to everything
|
||||
def create_admin_user(session):
|
||||
def create_admin_user(_session):
|
||||
user = User()
|
||||
user.name = "admin"
|
||||
user.role = constants.ADMIN_USER_ROLES
|
||||
@ -773,13 +774,13 @@ def create_admin_user(session):
|
||||
|
||||
user.password = generate_password_hash(constants.DEFAULT_PASSWORD)
|
||||
|
||||
session.add(user)
|
||||
_session.add(user)
|
||||
try:
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
_session.rollback()
|
||||
|
||||
def ini():
|
||||
def init_db_thread():
|
||||
global app_DB_path
|
||||
engine = create_engine(u'sqlite:///{0}'.format(app_DB_path), echo=False)
|
||||
|
||||
@ -844,8 +845,8 @@ def dispose():
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def session_commit(success=None, sess=None):
|
||||
s = sess if sess else session
|
||||
def session_commit(success=None, _session=None):
|
||||
s = _session if _session else session
|
||||
try:
|
||||
s.commit()
|
||||
if success:
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user