2019-01-20 08:45:42 -08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-01-20 10:37:45 -08:00
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
# Copyright (C) 2018-2019 OzzieIsaacs
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2019-07-13 11:45:48 -07:00
|
|
|
from __future__ import division, print_function, unicode_literals
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import datetime
|
|
|
|
import json
|
|
|
|
import shutil
|
2019-01-20 08:45:42 -08:00
|
|
|
import threading
|
|
|
|
import time
|
2019-07-13 11:45:48 -07:00
|
|
|
import zipfile
|
2019-01-20 08:45:42 -08:00
|
|
|
from io import BytesIO
|
|
|
|
from tempfile import gettempdir
|
2019-07-13 11:45:48 -07:00
|
|
|
|
|
|
|
import requests
|
2019-01-20 08:45:42 -08:00
|
|
|
from babel.dates import format_datetime
|
2019-07-13 11:45:48 -07:00
|
|
|
from flask_babel import gettext as _
|
|
|
|
|
|
|
|
from . import constants, logger, config, get_locale, web_server
|
|
|
|
|
|
|
|
|
|
|
|
log = logger.create()
|
|
|
|
_REPOSITORY_API_URL = 'https://api.github.com/repos/janeczku/calibre-web'
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
|
|
|
|
def is_sha1(sha1):
|
|
|
|
if len(sha1) != 40:
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
int(sha1, 16)
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class Updater(threading.Thread):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
threading.Thread.__init__(self)
|
2019-03-24 07:15:36 -07:00
|
|
|
self.status = -1
|
2019-01-20 08:45:42 -08:00
|
|
|
self.updateIndex = None
|
|
|
|
|
|
|
|
def get_current_version_info(self):
|
2019-07-13 11:45:48 -07:00
|
|
|
if config.config_updatechannel == constants.UPDATE_STABLE:
|
2019-01-20 08:45:42 -08:00
|
|
|
return self._stable_version_info()
|
2019-07-13 11:45:48 -07:00
|
|
|
return self._nightly_version_info()
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
def get_available_updates(self, request_method):
|
2019-07-13 11:45:48 -07:00
|
|
|
if config.config_updatechannel == constants.UPDATE_STABLE:
|
2019-01-20 08:45:42 -08:00
|
|
|
return self._stable_available_updates(request_method)
|
2019-07-13 11:45:48 -07:00
|
|
|
return self._nightly_available_updates(request_method)
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
try:
|
|
|
|
self.status = 1
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug(u'Download update file')
|
|
|
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
|
|
r = requests.get(self._get_request_path(), stream=True, headers=headers)
|
2019-01-20 08:45:42 -08:00
|
|
|
r.raise_for_status()
|
|
|
|
|
|
|
|
self.status = 2
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug(u'Opening zipfile')
|
2019-01-20 08:45:42 -08:00
|
|
|
z = zipfile.ZipFile(BytesIO(r.content))
|
|
|
|
self.status = 3
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug(u'Extracting zipfile')
|
2019-01-20 08:45:42 -08:00
|
|
|
tmp_dir = gettempdir()
|
|
|
|
z.extractall(tmp_dir)
|
|
|
|
foldername = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
|
|
|
if not os.path.isdir(foldername):
|
|
|
|
self.status = 11
|
2019-07-13 11:45:48 -07:00
|
|
|
log.info(u'Extracted contents of zipfile not found in temp folder')
|
2019-01-20 08:45:42 -08:00
|
|
|
return
|
|
|
|
self.status = 4
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug(u'Replacing files')
|
|
|
|
self.update_source(foldername, constants.BASE_DIR)
|
2019-01-20 08:45:42 -08:00
|
|
|
self.status = 6
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug(u'Preparing restart of server')
|
2019-01-20 08:45:42 -08:00
|
|
|
time.sleep(2)
|
2019-07-13 11:45:48 -07:00
|
|
|
web_server.stop(True)
|
2019-01-20 08:45:42 -08:00
|
|
|
self.status = 7
|
|
|
|
time.sleep(2)
|
|
|
|
except requests.exceptions.HTTPError as ex:
|
2019-07-13 11:45:48 -07:00
|
|
|
log.info(u'HTTP Error %s', ex)
|
2019-01-20 08:45:42 -08:00
|
|
|
self.status = 8
|
|
|
|
except requests.exceptions.ConnectionError:
|
2019-07-13 11:45:48 -07:00
|
|
|
log.info(u'Connection error')
|
2019-01-20 08:45:42 -08:00
|
|
|
self.status = 9
|
|
|
|
except requests.exceptions.Timeout:
|
2019-07-13 11:45:48 -07:00
|
|
|
log.info(u'Timeout while establishing connection')
|
2019-01-20 08:45:42 -08:00
|
|
|
self.status = 10
|
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
self.status = 11
|
2019-07-13 11:45:48 -07:00
|
|
|
log.info(u'General error')
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
def get_update_status(self):
|
|
|
|
return self.status
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-13 11:45:48 -07:00
|
|
|
def file_to_list(cls, filelist):
|
2019-01-20 08:45:42 -08:00
|
|
|
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-13 11:45:48 -07:00
|
|
|
def one_minus_two(cls, one, two):
|
2019-01-20 08:45:42 -08:00
|
|
|
return [x for x in one if x not in set(two)]
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-13 11:45:48 -07:00
|
|
|
def reduce_dirs(cls, delete_files, new_list):
|
2019-01-20 08:45:42 -08:00
|
|
|
new_delete = []
|
|
|
|
for filename in delete_files:
|
|
|
|
parts = filename.split(os.sep)
|
|
|
|
sub = ''
|
|
|
|
for part in parts:
|
|
|
|
sub = os.path.join(sub, part)
|
|
|
|
if sub == '':
|
|
|
|
sub = os.sep
|
|
|
|
count = 0
|
|
|
|
for song in new_list:
|
|
|
|
if song.startswith(sub):
|
|
|
|
count += 1
|
|
|
|
break
|
|
|
|
if count == 0:
|
|
|
|
if sub != '\\':
|
|
|
|
new_delete.append(sub)
|
|
|
|
break
|
|
|
|
return list(set(new_delete))
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-13 11:45:48 -07:00
|
|
|
def reduce_files(cls, remove_items, exclude_items):
|
2019-01-20 08:45:42 -08:00
|
|
|
rf = []
|
|
|
|
for item in remove_items:
|
|
|
|
if not item.startswith(exclude_items):
|
|
|
|
rf.append(item)
|
|
|
|
return rf
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-13 11:45:48 -07:00
|
|
|
def moveallfiles(cls, root_src_dir, root_dst_dir):
|
2019-01-20 08:45:42 -08:00
|
|
|
change_permissions = True
|
2019-07-13 11:45:48 -07:00
|
|
|
new_permissions = os.stat(root_dst_dir)
|
2019-01-20 08:45:42 -08:00
|
|
|
if sys.platform == "win32" or sys.platform == "darwin":
|
|
|
|
change_permissions = False
|
|
|
|
else:
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug('Update on OS-System : %s', sys.platform)
|
2019-01-20 08:45:42 -08:00
|
|
|
for src_dir, __, files in os.walk(root_src_dir):
|
|
|
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
|
|
|
if not os.path.exists(dst_dir):
|
|
|
|
os.makedirs(dst_dir)
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug('Create-Dir: %s', dst_dir)
|
2019-01-20 08:45:42 -08:00
|
|
|
if change_permissions:
|
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
|
|
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
|
|
|
for file_ in files:
|
|
|
|
src_file = os.path.join(src_dir, file_)
|
|
|
|
dst_file = os.path.join(dst_dir, file_)
|
2019-07-13 11:45:48 -07:00
|
|
|
permission = os.stat(dst_file)
|
2019-01-20 08:45:42 -08:00
|
|
|
if os.path.exists(dst_file):
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug('Remove file before copy: %s', dst_file)
|
2019-01-20 08:45:42 -08:00
|
|
|
os.remove(dst_file)
|
|
|
|
else:
|
|
|
|
if change_permissions:
|
|
|
|
permission = new_permissions
|
|
|
|
shutil.move(src_file, dst_dir)
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug('Move File %s to %s', src_file, dst_dir)
|
2019-01-20 08:45:42 -08:00
|
|
|
if change_permissions:
|
|
|
|
try:
|
|
|
|
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
2019-07-13 11:45:48 -07:00
|
|
|
except OSError as e:
|
2019-01-20 08:45:42 -08:00
|
|
|
old_permissions = os.stat(dst_file)
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug('Fail change permissions of %s. Before: %s:%s After %s:%s error: %s',
|
|
|
|
dst_file, old_permissions.st_uid, old_permissions.st_gid,
|
|
|
|
permission.st_uid, permission.st_gid, e)
|
2019-01-20 08:45:42 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
def update_source(self, source, destination):
|
|
|
|
# destination files
|
|
|
|
old_list = list()
|
|
|
|
exclude = (
|
2019-02-03 07:38:27 -08:00
|
|
|
os.sep + 'app.db', os.sep + 'calibre-web.log1', os.sep + 'calibre-web.log2', os.sep + 'gdrive.db',
|
2019-07-13 11:45:48 -07:00
|
|
|
os.sep + 'vendor', os.sep + 'calibre-web.log', os.sep + '.git', os.sep + 'client_secrets.json',
|
2019-02-03 07:38:27 -08:00
|
|
|
os.sep + 'gdrive_credentials', os.sep + 'settings.yaml')
|
2019-01-20 08:45:42 -08:00
|
|
|
for root, dirs, files in os.walk(destination, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
for name in dirs:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
# source files
|
|
|
|
new_list = list()
|
|
|
|
for root, dirs, files in os.walk(source, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
for name in dirs:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
|
|
|
delete_files = self.one_minus_two(old_list, new_list)
|
|
|
|
|
|
|
|
rf = self.reduce_files(delete_files, exclude)
|
|
|
|
|
|
|
|
remove_items = self.reduce_dirs(rf, new_list)
|
|
|
|
|
|
|
|
self.moveallfiles(source, destination)
|
|
|
|
|
|
|
|
for item in remove_items:
|
|
|
|
item_path = os.path.join(destination, item[1:])
|
|
|
|
if os.path.isdir(item_path):
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug("Delete dir %s", item_path)
|
2019-01-23 09:14:45 -08:00
|
|
|
shutil.rmtree(item_path, ignore_errors=True)
|
2019-01-20 08:45:42 -08:00
|
|
|
else:
|
|
|
|
try:
|
2019-07-13 11:45:48 -07:00
|
|
|
log.debug("Delete file %s", item_path)
|
2019-01-20 08:45:42 -08:00
|
|
|
# log_from_thread("Delete file " + item_path)
|
|
|
|
os.remove(item_path)
|
2019-07-13 11:45:48 -07:00
|
|
|
except OSError:
|
|
|
|
logger.debug("Could not remove: %s", item_path)
|
2019-01-20 08:45:42 -08:00
|
|
|
shutil.rmtree(source, ignore_errors=True)
|
|
|
|
|
2019-07-13 11:45:48 -07:00
|
|
|
@classmethod
|
|
|
|
def _nightly_version_info(cls):
|
|
|
|
if is_sha1(constants.NIGHTLY_VERSION[0]) and len(constants.NIGHTLY_VERSION[1]) > 0:
|
|
|
|
return {'version': constants.NIGHTLY_VERSION[0], 'datetime': constants.NIGHTLY_VERSION[1]}
|
2019-01-20 08:45:42 -08:00
|
|
|
return False
|
|
|
|
|
2019-07-13 11:45:48 -07:00
|
|
|
@classmethod
|
|
|
|
def _stable_version_info(cls):
|
|
|
|
return constants.STABLE_VERSION # Current version
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
def _nightly_available_updates(self, request_method):
|
|
|
|
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
|
|
|
if request_method == "GET":
|
2019-07-13 11:45:48 -07:00
|
|
|
repository_url = _REPOSITORY_API_URL
|
|
|
|
status, commit = self._load_remote_data(repository_url + '/git/refs/heads/master')
|
2019-01-20 08:45:42 -08:00
|
|
|
parents = []
|
|
|
|
if status['message'] != '':
|
|
|
|
return json.dumps(status)
|
|
|
|
if 'object' not in commit:
|
|
|
|
status['message'] = _(u'Unexpected data while reading update information')
|
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
if commit['object']['sha'] == status['current_commit_hash']:
|
|
|
|
status.update({
|
|
|
|
'update': False,
|
|
|
|
'success': True,
|
|
|
|
'message': _(u'No update available. You already have the latest version installed')
|
|
|
|
})
|
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
# a new update is available
|
|
|
|
status['update'] = True
|
|
|
|
|
|
|
|
try:
|
2019-07-13 11:45:48 -07:00
|
|
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
|
|
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'], headers=headers)
|
2019-01-20 08:45:42 -08:00
|
|
|
r.raise_for_status()
|
|
|
|
update_data = r.json()
|
|
|
|
except requests.exceptions.HTTPError as e:
|
|
|
|
status['error'] = _(u'HTTP Error') + ' ' + str(e)
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
status['error'] = _(u'Connection error')
|
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
status['error'] = _(u'Timeout while establishing connection')
|
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
status['error'] = _(u'General error')
|
|
|
|
|
|
|
|
if status['message'] != '':
|
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
if 'committer' in update_data and 'message' in update_data:
|
|
|
|
status['success'] = True
|
2019-01-20 10:37:45 -08:00
|
|
|
status['message'] = _(
|
|
|
|
u'A new update is available. Click on the button below to update to the latest version.')
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
new_commit_date = datetime.datetime.strptime(
|
|
|
|
update_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
|
|
|
parents.append(
|
|
|
|
[
|
2019-07-13 11:45:48 -07:00
|
|
|
format_datetime(new_commit_date, format='short', locale=get_locale()),
|
2019-01-20 08:45:42 -08:00
|
|
|
update_data['message'],
|
|
|
|
update_data['sha']
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# it only makes sense to analyze the parents if we know the current commit hash
|
|
|
|
if status['current_commit_hash'] != '':
|
|
|
|
try:
|
|
|
|
parent_commit = update_data['parents'][0]
|
|
|
|
# limit the maximum search depth
|
|
|
|
remaining_parents_cnt = 10
|
|
|
|
except IndexError:
|
|
|
|
remaining_parents_cnt = None
|
|
|
|
|
|
|
|
if remaining_parents_cnt is not None:
|
|
|
|
while True:
|
|
|
|
if remaining_parents_cnt == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
# check if we are more than one update behind if so, go up the tree
|
|
|
|
if parent_commit['sha'] != status['current_commit_hash']:
|
|
|
|
try:
|
2019-07-13 11:45:48 -07:00
|
|
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
|
|
r = requests.get(parent_commit['url'], headers=headers)
|
2019-01-20 08:45:42 -08:00
|
|
|
r.raise_for_status()
|
|
|
|
parent_data = r.json()
|
|
|
|
|
|
|
|
parent_commit_date = datetime.datetime.strptime(
|
|
|
|
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
|
|
|
parent_commit_date = format_datetime(
|
2019-07-13 11:45:48 -07:00
|
|
|
parent_commit_date, format='short', locale=get_locale())
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
parents.append([parent_commit_date,
|
2019-07-13 11:45:48 -07:00
|
|
|
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
2019-01-20 08:45:42 -08:00
|
|
|
parent_commit = parent_data['parents'][0]
|
|
|
|
remaining_parents_cnt -= 1
|
|
|
|
except Exception:
|
|
|
|
# it isn't crucial if we can't get information about the parent
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# parent is our current version
|
|
|
|
break
|
|
|
|
|
|
|
|
else:
|
|
|
|
status['success'] = False
|
|
|
|
status['message'] = _(u'Could not fetch update information')
|
|
|
|
return json.dumps(status)
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def _stable_available_updates(self, request_method):
|
|
|
|
if request_method == "GET":
|
|
|
|
parents = []
|
|
|
|
# repository_url = 'https://api.github.com/repos/flatpak/flatpak/releases' # test URL
|
2019-07-13 11:45:48 -07:00
|
|
|
repository_url = _REPOSITORY_API_URL + '/releases?per_page=100'
|
2019-01-20 08:45:42 -08:00
|
|
|
status, commit = self._load_remote_data(repository_url)
|
|
|
|
if status['message'] != '':
|
|
|
|
return json.dumps(status)
|
|
|
|
if not commit:
|
|
|
|
status['success'] = True
|
|
|
|
status['message'] = _(u'No release information available')
|
|
|
|
return json.dumps(status)
|
|
|
|
version = status['current_commit_hash']
|
|
|
|
current_version = status['current_commit_hash'].split('.')
|
|
|
|
|
|
|
|
# we are already on newest version, no update available
|
|
|
|
if 'tag_name' not in commit[0]:
|
|
|
|
status['message'] = _(u'Unexpected data while reading update information')
|
|
|
|
return json.dumps(status)
|
|
|
|
if commit[0]['tag_name'] == version:
|
|
|
|
status.update({
|
|
|
|
'update': False,
|
|
|
|
'success': True,
|
|
|
|
'message': _(u'No update available. You already have the latest version installed')
|
|
|
|
})
|
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
i = len(commit) - 1
|
|
|
|
while i >= 0:
|
|
|
|
if 'tag_name' not in commit[i] or 'body' not in commit[i]:
|
|
|
|
status['message'] = _(u'Unexpected data while reading update information')
|
|
|
|
return json.dumps(status)
|
|
|
|
major_version_update = int(commit[i]['tag_name'].split('.')[0])
|
|
|
|
minor_version_update = int(commit[i]['tag_name'].split('.')[1])
|
|
|
|
patch_version_update = int(commit[i]['tag_name'].split('.')[2])
|
|
|
|
|
2019-06-02 09:43:09 -07:00
|
|
|
current_version[0] = int(current_version[0])
|
|
|
|
current_version[1] = int(current_version[1])
|
|
|
|
try:
|
|
|
|
current_version[2] = int(current_version[2])
|
|
|
|
except ValueError:
|
|
|
|
current_version[2] = int(current_version[2].split(' ')[0])-1
|
|
|
|
|
2019-01-20 08:45:42 -08:00
|
|
|
# Check if major versions are identical search for newest nonenqual commit and update to this one
|
2019-06-02 09:43:09 -07:00
|
|
|
if major_version_update == current_version[0]:
|
|
|
|
if (minor_version_update == current_version[1] and
|
|
|
|
patch_version_update > current_version[2]) or \
|
|
|
|
minor_version_update > current_version[1]:
|
2019-07-13 11:45:48 -07:00
|
|
|
parents.append([commit[i]['tag_name'], commit[i]['body'].replace('\r\n', '<p>')])
|
2019-01-20 08:45:42 -08:00
|
|
|
i -= 1
|
|
|
|
continue
|
2019-06-02 09:43:09 -07:00
|
|
|
if major_version_update < current_version[0]:
|
2019-01-20 08:45:42 -08:00
|
|
|
i -= 1
|
|
|
|
continue
|
2019-06-02 09:43:09 -07:00
|
|
|
if major_version_update > current_version[0]:
|
2019-01-20 08:45:42 -08:00
|
|
|
# found update update to last version before major update, unless current version is on last version
|
|
|
|
# before major update
|
|
|
|
if commit[i+1]['tag_name'].split('.')[1] == current_version[1]:
|
|
|
|
parents.append([commit[i]['tag_name'],
|
|
|
|
commit[i]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
|
|
|
status.update({
|
|
|
|
'update': True,
|
|
|
|
'success': True,
|
2019-01-20 10:37:45 -08:00
|
|
|
'message': _(u'A new update is available. Click on the button below to '
|
|
|
|
u'update to version: %(version)s', version=commit[i]['tag_name']),
|
2019-01-20 08:45:42 -08:00
|
|
|
'history': parents
|
|
|
|
})
|
|
|
|
self.updateFile = commit[i]['zipball_url']
|
|
|
|
else:
|
|
|
|
status.update({
|
|
|
|
'update': True,
|
|
|
|
'success': True,
|
2019-01-20 10:37:45 -08:00
|
|
|
'message': _(u'A new update is available. Click on the button below to '
|
|
|
|
u'update to version: %(version)s', version=commit[i]['tag_name']),
|
2019-01-20 08:45:42 -08:00
|
|
|
'history': parents
|
|
|
|
})
|
2019-07-13 11:45:48 -07:00
|
|
|
self.updateFile = commit[i+1]['zipball_url']
|
2019-01-20 08:45:42 -08:00
|
|
|
break
|
|
|
|
if i == -1:
|
|
|
|
status.update({
|
|
|
|
'update': True,
|
|
|
|
'success': True,
|
2019-01-20 10:37:45 -08:00
|
|
|
'message': _(
|
2019-06-02 09:43:09 -07:00
|
|
|
u'Click on the button below to update to the latest stable version.'),
|
2019-01-20 08:45:42 -08:00
|
|
|
'history': parents
|
|
|
|
})
|
|
|
|
self.updateFile = commit[0]['zipball_url']
|
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
def _get_request_path(self):
|
2019-07-13 11:45:48 -07:00
|
|
|
if config.config_updatechannel == constants.UPDATE_STABLE:
|
2019-01-20 08:45:42 -08:00
|
|
|
return self.updateFile
|
2019-07-13 11:45:48 -07:00
|
|
|
return _REPOSITORY_API_URL + '/zipball/master'
|
2019-01-20 08:45:42 -08:00
|
|
|
|
|
|
|
def _load_remote_data(self, repository_url):
|
|
|
|
status = {
|
|
|
|
'update': False,
|
|
|
|
'success': False,
|
|
|
|
'message': '',
|
|
|
|
'current_commit_hash': ''
|
|
|
|
}
|
|
|
|
commit = None
|
|
|
|
version = self.get_current_version_info()
|
|
|
|
if version is False:
|
|
|
|
status['current_commit_hash'] = _(u'Unknown')
|
|
|
|
else:
|
|
|
|
status['current_commit_hash'] = version['version']
|
|
|
|
try:
|
2019-07-13 11:45:48 -07:00
|
|
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
|
|
r = requests.get(repository_url, headers=headers)
|
2019-01-20 08:45:42 -08:00
|
|
|
commit = r.json()
|
|
|
|
r.raise_for_status()
|
|
|
|
except requests.exceptions.HTTPError as e:
|
|
|
|
if commit:
|
|
|
|
if 'message' in commit:
|
|
|
|
status['message'] = _(u'HTTP Error') + ': ' + commit['message']
|
|
|
|
else:
|
|
|
|
status['message'] = _(u'HTTP Error') + ': ' + str(e)
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
status['message'] = _(u'Connection error')
|
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
status['message'] = _(u'Timeout while establishing connection')
|
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
status['message'] = _(u'General error')
|
|
|
|
|
|
|
|
return status, commit
|