diff --git a/.devcontainer/requirements-dev.txt b/.devcontainer/requirements-dev.txt index 2aa9d847..a88386cf 100644 --- a/.devcontainer/requirements-dev.txt +++ b/.devcontainer/requirements-dev.txt @@ -27,4 +27,5 @@ aiohttp-jinja2 beautifulsoup4 firebase-admin aiofile -grpcio \ No newline at end of file +grpcio +aioping \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 1a92a5ed..f97493d3 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -16,5 +16,6 @@ "editor.defaultFormatter": "vscode.html-language-features", "editor.tabSize": 2, }, - "python.analysis.completeFunctionParens": true + "python.analysis.completeFunctionParens": true, + "files.eol": "\n" } diff --git a/hassio-google-drive-backup/CHANGELOG.md b/hassio-google-drive-backup/CHANGELOG.md index 685179c8..417eb0d5 100644 --- a/hassio-google-drive-backup/CHANGELOG.md +++ b/hassio-google-drive-backup/CHANGELOG.md @@ -1,3 +1,6 @@ +## v0.109.2 [2022-11-15] +* Fixed a bug where disabling deletion from Google Drive and enabling deltes after upload could cause backups in Google Drive to be deleted. + ## v0.109.1 [2022-11-07] * If configured from the browser, defaults to a "dark" theme if haven't already configured custom colors * Makes the interval at which the addon publishes sensors to Home Assistant configurable (see the "Uncommon Options" settings) @@ -15,8 +18,3 @@ * Fixed an error preventing stopped addons form being started if they hit errors while stopping. * Fixed many, many, many gramatical errors thanks to [@markvader's](https://github.com/markvader) [#665](https://github.com/sabeechen/hassio-google-drive-backup/pull/665). * Fixed a missing config option in the addon schema, maximum_upload_chunk_bytes. - -## v0.108.2 [2022-06-03] -* Switched to ignoring 'upgrade' backups by default for new users. -* Added a warning for existing users if you're not ignoring upgrade backups. -* Added a warning about google's OOB deprecation for private credential users. diff --git a/hassio-google-drive-backup/backup/config/settings.py b/hassio-google-drive-backup/backup/config/settings.py index 66255669..b015ecba 100644 --- a/hassio-google-drive-backup/backup/config/settings.py +++ b/hassio-google-drive-backup/backup/config/settings.py @@ -127,6 +127,7 @@ class Setting(Enum): EXCHANGER_TIMEOUT_SECONDS = "exchanger_timeout_seconds" HA_REPORTING_INTERVAL_SECONDS = "ha_reporting_interval_seconds" LONG_TERM_STALE_BACKUP_SECONDS = "long_term_stale_backup_seconds" + PING_TIMEOUT = "ping_timeout" # Old, deprecated settings DEPRECTAED_MAX_BACKUPS_IN_HA = "max_snapshots_in_hassio" @@ -279,6 +280,7 @@ def key(self): Setting.EXCHANGER_TIMEOUT_SECONDS: 10, Setting.HA_REPORTING_INTERVAL_SECONDS: 10, Setting.LONG_TERM_STALE_BACKUP_SECONDS: 60 * 60 * 24, + Setting.PING_TIMEOUT: 5 } _STAGING_DEFAULTS = { @@ -410,7 +412,8 @@ def key(self): Setting.BACKUP_STARTUP_DELAY_MINUTES: "float(0,)?", Setting.EXCHANGER_TIMEOUT_SECONDS: "float(0,)?", Setting.HA_REPORTING_INTERVAL_SECONDS: "int(1,)?", - Setting.LONG_TERM_STALE_BACKUP_SECONDS: "int(1,)?" + Setting.LONG_TERM_STALE_BACKUP_SECONDS: "int(1,)?", + Setting.PING_TIMEOUT: "float(0,)?" } PRIVATE = [ diff --git a/hassio-google-drive-backup/backup/debugworker.py b/hassio-google-drive-backup/backup/debugworker.py index 1b3eb333..e98d446a 100644 --- a/hassio-google-drive-backup/backup/debugworker.py +++ b/hassio-google-drive-backup/backup/debugworker.py @@ -1,6 +1,6 @@ import asyncio import socket -import subprocess +import aioping from datetime import datetime, timedelta from aiohttp import ClientSession, ClientTimeout @@ -184,26 +184,21 @@ def formatDate(self, date: datetime): async def getPingInfo(self): who = self.config.get(Setting.DRIVE_HOST_NAME) ips = await self.resolve(who) - pings = {who: {}} + results = {who: {}} + tasks = {who: {}} for ip in ips: - pings[who][ip] = "Unknown" - command = "fping -t 5000 " + " ".join(ips) - - # fping each server - process = await asyncio.create_subprocess_shell(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout_data, stderr_data = await process.communicate() - - for line in stdout_data.decode().split("\n"): - for host in pings.keys(): - for address in pings[host].keys(): - if line.startswith(address): - response = line[len(address):].strip() - if response.startswith(":"): - response = response[2:].strip() - if response.startswith("is"): - response = response[3:].strip() - pings[host][address] = response - return pings + results[who][ip] = "Unknown" + tasks[who][ip] = asyncio.create_task(aioping.ping(ip, timeout=self.config.get(Setting.PING_TIMEOUT))) + + # ping each server + for server in tasks.keys(): + for ip in tasks[server].keys(): + try: + time = await tasks[server][ip] + results[server][ip] = f"{round(time * 1000, 0)} ms" + except Exception as e: + results[server][ip] = str(e) + return results async def resolve(self, who: str): try: diff --git a/hassio-google-drive-backup/backup/model/model.py b/hassio-google-drive-backup/backup/model/model.py index 4461f11b..d7f36531 100644 --- a/hassio-google-drive-backup/backup/model/model.py +++ b/hassio-google-drive-backup/backup/model/model.py @@ -78,6 +78,9 @@ def postSync(self) -> None: def detail(self) -> str: return "" + def isDestination(self) -> bool: + return False + # Gets called after reading state but before any changes are made # to check for additional errors. def checkBeforeChanges(self) -> None: @@ -92,6 +95,9 @@ def isWorking(self): def might_be_oob_creds(self) -> bool: return False + def isDestination(self) -> bool: + return True + @singleton class Model(): @@ -336,6 +342,9 @@ def _nextPurge(self, source: BackupSource, backups, findNext=False): """ if not source.enabled() or len(backups) == 0: return None, None + if source.maxCount() == 0 and source.isDestination(): + # When maxCount is zero for a destination, we should never delete from it. + return None, None if source.maxCount() == 0 and not self.config.get(Setting.DELETE_AFTER_UPLOAD): return None, None diff --git a/hassio-google-drive-backup/backup/model/simulatedsource.py b/hassio-google-drive-backup/backup/model/simulatedsource.py index 2c50d338..e7289209 100644 --- a/hassio-google-drive-backup/backup/model/simulatedsource.py +++ b/hassio-google-drive-backup/backup/model/simulatedsource.py @@ -10,7 +10,7 @@ class SimulatedSource(BackupDestination): - def __init__(self, name): + def __init__(self, name, is_destination=False): self._name = name self.current: Dict[str, DummyBackupSource] = {} self.saved = [] @@ -25,6 +25,10 @@ def __init__(self, name): self.backup_type = "Full" self.working = False self.needConfig = None + self.is_destination = is_destination + + def isDestination(self): + return self.is_destination def setEnabled(self, value): self._enabled = value diff --git a/hassio-google-drive-backup/config.json b/hassio-google-drive-backup/config.json index 6e29f0ca..4ce64d1a 100644 --- a/hassio-google-drive-backup/config.json +++ b/hassio-google-drive-backup/config.json @@ -1,6 +1,6 @@ { "name": "Home Assistant Google Drive Backup", - "version": "0.109.1", + "version": "0.109.2", "slug": "hassio_google_drive_backup", "description": "Automatically manage backups between Home Assistant and Google Drive", "arch": ["armhf", "armv7", "aarch64", "amd64", "i386"], diff --git a/hassio-google-drive-backup/dev/request_interceptor.py b/hassio-google-drive-backup/dev/request_interceptor.py index 0d24da11..d5e573dd 100644 --- a/hassio-google-drive-backup/dev/request_interceptor.py +++ b/hassio-google-drive-backup/dev/request_interceptor.py @@ -1,12 +1,14 @@ import re from aiohttp.web import Request, Response -from asyncio import Event, sleep +from asyncio import Event from aiohttp.web_response import json_response from injector import singleton, inject +from backup.time import Time class UrlMatch(): - def __init__(self, url, fail_after=None, status=None, response=None, wait=False, sleep=None, fail_for=None): + def __init__(self, time: Time, url, fail_after=None, status=None, response=None, wait=False, sleep=None, fail_for=None): + self.time = time self.url: str = url self.fail_after: int = fail_after self.status: int = status @@ -21,10 +23,15 @@ def __init__(self, url, fail_after=None, status=None, response=None, wait=False, self.fail_for = fail_for self.responses = [] self._calls = 0 + self.time = time def addResponse(self, response): self.responses.append(response) + def stop(self): + self.wait_event.set() + self.trigger_event.set() + def isMatch(self, request): return re.match(self.url, request.url.path) @@ -51,7 +58,7 @@ async def _doAction(self, request: Request): self.trigger_event.set() await self.wait_event.wait() elif self.sleep is not None: - await sleep(self.sleep) + await self.time.sleepAsync(self.sleep, early_exit=self.wait_event) async def called(self, request: Request): if self.fail_after is None or self.fail_after <= 0: @@ -82,9 +89,14 @@ class RequestInterceptor: def __init__(self): self._matchers = [] self._history = [] + self.time = Time() + + def stop(self): + for matcher in self._matchers: + matcher.stop() def setError(self, url, status=None, fail_after=None, fail_for=None, response=None) -> UrlMatch: - matcher = UrlMatch(url, fail_after, status=status, response=response, fail_for=fail_for) + matcher = UrlMatch(self.time, url, fail_after, status=status, response=response, fail_for=fail_for) self._matchers.append(matcher) return matcher @@ -93,12 +105,12 @@ def clear(self): self._history.clear() def setWaiter(self, url, attempts=None): - matcher = UrlMatch(url, attempts, wait=True) + matcher = UrlMatch(self.time, url, attempts, wait=True) self._matchers.append(matcher) return matcher def setSleep(self, url, attempts=None, sleep=None, wait_for=None): - matcher = UrlMatch(url, attempts, sleep=sleep, fail_for=wait_for) + matcher = UrlMatch(self.time, url, attempts, sleep=sleep, fail_for=wait_for) self._matchers.append(matcher) return matcher diff --git a/hassio-google-drive-backup/dev/simulationserver.py b/hassio-google-drive-backup/dev/simulationserver.py index d0427bfc..cfc555d0 100644 --- a/hassio-google-drive-backup/dev/simulationserver.py +++ b/hassio-google-drive-backup/dev/simulationserver.py @@ -106,6 +106,7 @@ async def start(self, port): await site.start() async def stop(self): + self.interceptor.stop() await self.runner.shutdown() await self.runner.cleanup() diff --git a/hassio-google-drive-backup/requirements-addon.txt b/hassio-google-drive-backup/requirements-addon.txt index ca38ebc7..78ddeeae 100644 --- a/hassio-google-drive-backup/requirements-addon.txt +++ b/hassio-google-drive-backup/requirements-addon.txt @@ -14,3 +14,4 @@ aiofiles aiofile colorlog aiohttp-jinja2 +aioping diff --git a/hassio-google-drive-backup/tests/conftest.py b/hassio-google-drive-backup/tests/conftest.py index e983d5f4..e7e0c737 100644 --- a/hassio-google-drive-backup/tests/conftest.py +++ b/hassio-google-drive-backup/tests/conftest.py @@ -164,6 +164,7 @@ async def generate_config(server_url: URL, ports, cleandir): Setting.PORT: ports.ui, Setting.INGRESS_PORT: ports.ingress, Setting.BACKUP_STARTUP_DELAY_MINUTES: 0, + Setting.PING_TIMEOUT: 0.1, }) diff --git a/hassio-google-drive-backup/tests/helpers.py b/hassio-google-drive-backup/tests/helpers.py index 574be9bb..b778c83c 100644 --- a/hassio-google-drive-backup/tests/helpers.py +++ b/hassio-google-drive-backup/tests/helpers.py @@ -162,8 +162,8 @@ class IntentionalFailure(Exception): class HelperTestSource(SimulatedSource): - def __init__(self, name): - super().__init__(name) + def __init__(self, name, is_destination=False): + super().__init__(name, is_destination=is_destination) self.allow_create = True self.allow_save = True diff --git a/hassio-google-drive-backup/tests/test_debugworker.py b/hassio-google-drive-backup/tests/test_debugworker.py index d6582612..fdc2213c 100644 --- a/hassio-google-drive-backup/tests/test_debugworker.py +++ b/hassio-google-drive-backup/tests/test_debugworker.py @@ -16,12 +16,8 @@ async def test_dns_info(debug_worker: DebugWorker, config: Config): config.override(Setting.SEND_ERROR_REPORTS, True) config.override(Setting.DRIVE_HOST_NAME, "localhost") await debug_worker.doWork() - assert debug_worker.dns_info == { - 'localhost': { - '127.0.0.1': 'alive', - 'localhost': 'alive' - } - } + assert '127.0.0.1' in debug_worker.dns_info['localhost'] + assert 'localhost' in debug_worker.dns_info['localhost'] @pytest.mark.asyncio @@ -29,12 +25,7 @@ async def test_bad_host(debug_worker: DebugWorker, config: Config): skipForWindows() config.override(Setting.DRIVE_HOST_NAME, "dasdfdfgvxcvvsoejbr.com") await debug_worker.doWork() - assert debug_worker.dns_info == { - 'dasdfdfgvxcvvsoejbr.com': { - 'dasdfdfgvxcvvsoejbr.com': "Name or service not known" - } - } - + assert "Name or service not known" in debug_worker.dns_info['dasdfdfgvxcvvsoejbr.com']['dasdfdfgvxcvvsoejbr.com'] @pytest.mark.asyncio async def test_send_error_report(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server, error_store: ErrorStore): diff --git a/hassio-google-drive-backup/tests/test_model.py b/hassio-google-drive-backup/tests/test_model.py index 05d72553..c5e7f625 100644 --- a/hassio-google-drive-backup/tests/test_model.py +++ b/hassio-google-drive-backup/tests/test_model.py @@ -17,12 +17,12 @@ @pytest.fixture def source(): - return HelperTestSource("Source") + return HelperTestSource("Source", is_destination=False) @pytest.fixture def dest(): - return HelperTestSource("Dest") + return HelperTestSource("Dest", is_destination=True) @pytest.fixture @@ -855,6 +855,29 @@ async def test_delete_ignored_upgrade_backup_after_some_time(time: FakeTime, mod dest.assertThat(current=1) +@pytest.mark.asyncio +async def test_zero_config_whiled_deleting_backups(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + """ + Issue #745 identified that setting setting destination max backups to 0 and "delete after upload"=True would cause destination + backups to get deleted due to an error in the logic for handling purges. This test verifies that no longer happens. + """ + source.setMax(1) + dest.setMax(1) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.DELETE_AFTER_UPLOAD, True) + source.insert("Backup", time.now()) + await model.sync(time.now()) + source.assertThat(current=0, deleted=1) + dest.assertThat(current=1, saved=1) + source.reset() + dest.reset() + + dest.setMax(0) + await model.sync(time.now()) + dest.assertThat(current=1) + source.assertThat() + + @pytest.mark.asyncio async def test_generational_delete_issue602(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): time.setTimeZone("Europe/Rome")