update_manager: support database changes
Signed-off-by: Eric Callahan <arksine.code@gmail.com>
This commit is contained in:
parent
c081fa49a1
commit
9ab6d0e83d
|
@ -80,9 +80,6 @@ class AppDeploy(BaseDeploy):
|
|||
|
||||
self.info_tags: List[str] = config.getlist("info_tags", [])
|
||||
self.is_service = config.getboolean("is_system_service", True)
|
||||
storage = self._load_storage()
|
||||
self.need_channel_update = storage.get('need_channel_upate', False)
|
||||
self._is_valid = storage.get('is_valid', False)
|
||||
|
||||
# We need to fetch all potential options for an Application. Not
|
||||
# all options apply to each subtype, however we can't limit the
|
||||
|
@ -111,6 +108,12 @@ class AppDeploy(BaseDeploy):
|
|||
app_path = pathlib.Path(app_path).expanduser()
|
||||
return app_path.joinpath('.git').exists()
|
||||
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
storage = await super().initialize()
|
||||
self.need_channel_update = storage.get('need_channel_upate', False)
|
||||
self._is_valid = storage.get('is_valid', False)
|
||||
return storage
|
||||
|
||||
def _calc_config_hash(self) -> str:
|
||||
cfg_hash = self.config.get_hash()
|
||||
if self.app_params is None:
|
||||
|
|
|
@ -37,25 +37,24 @@ class BaseDeploy:
|
|||
if cfg_hash is None:
|
||||
cfg_hash = config.get_hash().hexdigest()
|
||||
self.cfg_hash = cfg_hash
|
||||
storage: Dict[str, Any] = self._load_storage()
|
||||
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
storage: Dict[str, Any] = await umdb.get(self.name, {})
|
||||
self.last_refresh_time: float = storage.get('last_refresh_time', 0.0)
|
||||
self.last_cfg_hash: str = storage.get('last_config_hash', "")
|
||||
return storage
|
||||
|
||||
def needs_refresh(self) -> bool:
|
||||
storage = self._load_storage()
|
||||
last_cfg_hash = storage.get('last_config_hash', "")
|
||||
next_refresh_time = self.last_refresh_time + self.refresh_interval
|
||||
return (
|
||||
self.cfg_hash != last_cfg_hash or
|
||||
self.cfg_hash != self.last_cfg_hash or
|
||||
time.time() > next_refresh_time
|
||||
)
|
||||
|
||||
def get_last_refresh_time(self) -> float:
|
||||
return self.last_refresh_time
|
||||
|
||||
def _load_storage(self) -> Dict[str, Any]:
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
return umdb.get(self.name, {})
|
||||
|
||||
async def refresh(self) -> None:
|
||||
pass
|
||||
|
||||
|
@ -74,6 +73,7 @@ class BaseDeploy:
|
|||
def _save_state(self) -> None:
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
self.last_refresh_time = time.time()
|
||||
self.last_cfg_hash = self.cfg_hash
|
||||
umdb[self.name] = self.get_persistent_data()
|
||||
|
||||
def log_exc(self, msg: str, traceback: bool = True) -> ServerError:
|
||||
|
|
|
@ -34,10 +34,8 @@ class GitDeploy(AppDeploy):
|
|||
app_params: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
super().__init__(config, cmd_helper, app_params)
|
||||
storage = self._load_storage()
|
||||
self.repo = GitRepo(cmd_helper, self.path, self.name,
|
||||
self.origin, self.moved_origin,
|
||||
storage)
|
||||
self.origin, self.moved_origin)
|
||||
if self.type != 'git_repo':
|
||||
self.need_channel_update = True
|
||||
|
||||
|
@ -47,6 +45,11 @@ class GitDeploy(AppDeploy):
|
|||
await new_app.reinstall()
|
||||
return new_app
|
||||
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
storage = await super().initialize()
|
||||
self.repo.restore_state(storage)
|
||||
return storage
|
||||
|
||||
async def refresh(self) -> None:
|
||||
try:
|
||||
await self._update_repo_state()
|
||||
|
@ -126,6 +129,12 @@ class GitDeploy(AppDeploy):
|
|||
self.notify_status("Reinstall Complete", is_complete=True)
|
||||
|
||||
async def reinstall(self):
|
||||
# Clear the persistent storage prior to a channel swap.
|
||||
# After the next update is complete new data will be
|
||||
# restored.
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
await umdb.pop(self.name, None)
|
||||
await self.initialize()
|
||||
await self.recover(True, True)
|
||||
|
||||
def get_update_status(self) -> Dict[str, Any]:
|
||||
|
@ -210,8 +219,7 @@ class GitRepo:
|
|||
git_path: pathlib.Path,
|
||||
alias: str,
|
||||
origin_url: str,
|
||||
moved_origin_url: Optional[str],
|
||||
storage: Dict[str, Any]
|
||||
moved_origin_url: Optional[str]
|
||||
) -> None:
|
||||
self.server = cmd_helper.get_server()
|
||||
self.cmd_helper = cmd_helper
|
||||
|
@ -222,6 +230,23 @@ class GitRepo:
|
|||
self.backup_path = git_dir.joinpath(f".{git_base}_repo_backup")
|
||||
self.origin_url = origin_url
|
||||
self.moved_origin_url = moved_origin_url
|
||||
self.recovery_message = \
|
||||
f"""
|
||||
Manually restore via SSH with the following commands:
|
||||
sudo service {self.alias} stop
|
||||
cd {git_dir}
|
||||
rm -rf {git_base}
|
||||
git clone {self.origin_url}
|
||||
sudo service {self.alias} start
|
||||
"""
|
||||
|
||||
self.init_evt: Optional[asyncio.Event] = None
|
||||
self.initialized: bool = False
|
||||
self.git_operation_lock = asyncio.Lock()
|
||||
self.fetch_timeout_handle: Optional[asyncio.Handle] = None
|
||||
self.fetch_input_recd: bool = False
|
||||
|
||||
def restore_state(self, storage: Dict[str, Any]) -> None:
|
||||
self.valid_git_repo: bool = storage.get('repo_valid', False)
|
||||
self.git_owner: str = storage.get('git_owner', "?")
|
||||
self.git_repo_name: str = storage.get('git_repo_name', "?")
|
||||
|
@ -239,21 +264,6 @@ class GitRepo:
|
|||
self.git_messages: List[str] = storage.get('git_messages', [])
|
||||
self.commits_behind: List[Dict[str, Any]] = storage.get(
|
||||
'commits_behind', [])
|
||||
self.recovery_message = \
|
||||
f"""
|
||||
Manually restore via SSH with the following commands:
|
||||
sudo service {self.alias} stop
|
||||
cd {git_dir}
|
||||
rm -rf {git_base}
|
||||
git clone {self.origin_url}
|
||||
sudo service {self.alias} start
|
||||
"""
|
||||
|
||||
self.init_evt: Optional[asyncio.Event] = None
|
||||
self.initialized: bool = False
|
||||
self.git_operation_lock = asyncio.Lock()
|
||||
self.fetch_timeout_handle: Optional[asyncio.Handle] = None
|
||||
self.fetch_input_recd: bool = False
|
||||
|
||||
def get_persisent_data(self) -> Dict[str, Any]:
|
||||
return {
|
||||
|
|
|
@ -88,9 +88,9 @@ class UpdateManager:
|
|||
self.updaters['system'] = PackageDeploy(config, self.cmd_helper)
|
||||
db: DBComp = self.server.lookup_component('database')
|
||||
kpath = db.get_item("moonraker", "update_manager.klipper_path",
|
||||
KLIPPER_DEFAULT_PATH)
|
||||
KLIPPER_DEFAULT_PATH).result()
|
||||
kenv_path = db.get_item("moonraker", "update_manager.klipper_exec",
|
||||
KLIPPER_DEFAULT_EXEC)
|
||||
KLIPPER_DEFAULT_EXEC).result()
|
||||
if (
|
||||
os.path.exists(kpath) and
|
||||
os.path.exists(kenv_path)
|
||||
|
@ -140,14 +140,6 @@ class UpdateManager:
|
|||
raise config.error(
|
||||
f"Invalid type '{client_type}' for section [{section}]")
|
||||
|
||||
# Prune stale data from the database
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
db_keys = umdb.keys()
|
||||
for key in db_keys:
|
||||
if key not in self.updaters:
|
||||
logging.info(f"Removing stale update_manager data: {key}")
|
||||
umdb.pop(key, None)
|
||||
|
||||
self.cmd_request_lock = asyncio.Lock()
|
||||
self.klippy_identified_evt: Optional[asyncio.Event] = None
|
||||
|
||||
|
@ -186,10 +178,17 @@ class UpdateManager:
|
|||
"server:klippy_identified", self._set_klipper_repo)
|
||||
|
||||
async def component_init(self) -> None:
|
||||
# Prune stale data from the database
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
db_keys = await umdb.keys()
|
||||
for key in db_keys:
|
||||
if key not in self.updaters:
|
||||
logging.info(f"Removing stale update_manager data: {key}")
|
||||
await umdb.pop(key, None)
|
||||
|
||||
async with self.cmd_request_lock:
|
||||
for updater in list(self.updaters.values()):
|
||||
if isinstance(updater, PackageDeploy):
|
||||
await updater.initialize()
|
||||
await updater.initialize()
|
||||
if updater.needs_refresh():
|
||||
await updater.refresh()
|
||||
if self.refresh_timer is not None:
|
||||
|
@ -224,6 +223,9 @@ class UpdateManager:
|
|||
'executable': executable
|
||||
})
|
||||
async with self.cmd_request_lock:
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
await umdb.pop('klipper', None)
|
||||
await self.updaters['klipper'].initialize()
|
||||
await self.updaters['klipper'].refresh()
|
||||
if need_notification:
|
||||
vinfo: Dict[str, Any] = {}
|
||||
|
@ -789,11 +791,12 @@ class PackageDeploy(BaseDeploy):
|
|||
) -> None:
|
||||
super().__init__(config, cmd_helper, "system", "", "")
|
||||
cmd_helper.set_package_updater(self)
|
||||
storage = self._load_storage()
|
||||
self.use_packagekit = config.getboolean("enable_packagekit", True)
|
||||
self.available_packages: List[str] = storage.get('packages', [])
|
||||
self.available_packages: List[str] = []
|
||||
|
||||
async def initialize(self) -> None:
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
storage = await super().initialize()
|
||||
self.available_packages = storage.get('packages', [])
|
||||
provider: BasePackageProvider
|
||||
try_fallback = True
|
||||
if self.use_packagekit:
|
||||
|
@ -820,6 +823,7 @@ class PackageDeploy(BaseDeploy):
|
|||
logging.info("PackageDeploy: Using APT CLI Provider")
|
||||
provider = fallback
|
||||
self.provider = provider
|
||||
return storage
|
||||
|
||||
async def _get_fallback_provider(self) -> Optional[BasePackageProvider]:
|
||||
# Currently only the API Fallback provider is available
|
||||
|
@ -1298,7 +1302,9 @@ class WebClientDeploy(BaseDeploy):
|
|||
raise config.error(
|
||||
"Invalid value for option 'persistent_files': "
|
||||
"'.version' can not be persistent")
|
||||
storage = self._load_storage()
|
||||
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
storage = await super().initialize()
|
||||
self.version: str = storage.get('version', "?")
|
||||
self.remote_version: str = storage.get('remote_version', "?")
|
||||
dl_info: List[Any] = storage.get('dl_info', ["?", "?", 0])
|
||||
|
@ -1307,6 +1313,7 @@ class WebClientDeploy(BaseDeploy):
|
|||
logging.info(f"\nInitializing Client Updater: '{self.name}',"
|
||||
f"\nChannel: {self.channel}"
|
||||
f"\npath: {self.path}")
|
||||
return storage
|
||||
|
||||
async def _get_local_version(self) -> None:
|
||||
version_path = self.path.joinpath(".version")
|
||||
|
|
|
@ -55,7 +55,18 @@ class ZipDeploy(AppDeploy):
|
|||
"Invalid url set for 'origin' option in section "
|
||||
f"[{config.get_name()}]. Unable to extract owner/repo.")
|
||||
self.host_repo: str = config.get('host_repo', self.official_repo)
|
||||
storage = self._load_storage()
|
||||
self.package_list: List[str] = []
|
||||
self.python_pkg_list: List[str] = []
|
||||
self.release_download_info: Tuple[str, str, int] = ("?", "?", 0)
|
||||
|
||||
@staticmethod
|
||||
async def from_application(app: AppDeploy) -> ZipDeploy:
|
||||
new_app = ZipDeploy(app.config, app.cmd_helper, app.app_params)
|
||||
await new_app.reinstall()
|
||||
return new_app
|
||||
|
||||
async def initialize(self) -> Dict[str, Any]:
|
||||
storage = await super().initialize()
|
||||
self.detected_type: str = storage.get('detected_type', "?")
|
||||
self.source_checksum: str = storage.get("source_checksum", "?")
|
||||
self.pristine = storage.get('pristine', False)
|
||||
|
@ -70,15 +81,7 @@ class ZipDeploy(AppDeploy):
|
|||
self.latest_build_date: int = storage.get('latest_build_date', 0)
|
||||
self.errors: List[str] = storage.get('errors', [])
|
||||
self.commit_log: List[Dict[str, Any]] = storage.get('commit_log', [])
|
||||
self.package_list: List[str] = []
|
||||
self.python_pkg_list: List[str] = []
|
||||
self.release_download_info: Tuple[str, str, int] = ("?", "?", 0)
|
||||
|
||||
@staticmethod
|
||||
async def from_application(app: AppDeploy) -> ZipDeploy:
|
||||
new_app = ZipDeploy(app.config, app.cmd_helper, app.app_params)
|
||||
await new_app.reinstall()
|
||||
return new_app
|
||||
return storage
|
||||
|
||||
def get_persistent_data(self) -> Dict[str, Any]:
|
||||
storage = super().get_persistent_data()
|
||||
|
@ -399,6 +402,12 @@ class ZipDeploy(AppDeploy):
|
|||
await self.update(force_dep_update=force_dep_update)
|
||||
|
||||
async def reinstall(self) -> None:
|
||||
# Clear the persistent storage prior to a channel swap.
|
||||
# After the next update is complete new data will be
|
||||
# restored.
|
||||
umdb = self.cmd_helper.get_umdb()
|
||||
await umdb.pop(self.name, None)
|
||||
await self.initialize()
|
||||
await self.recover(force_dep_update=True)
|
||||
|
||||
def get_update_status(self) -> Dict[str, Any]:
|
||||
|
|
Loading…
Reference in New Issue