Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include extra metadata in backup WS API #137296

Merged
merged 3 commits into from
Feb 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions homeassistant/components/backup/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,6 @@ class BaseBackup:
homeassistant_version: str | None # None if homeassistant_included is False
name: str

def as_frontend_json(self) -> dict:
emontnemery marked this conversation as resolved.
Show resolved Hide resolved
"""Return a dict representation of this backup for sending to frontend."""
return {
key: val for key, val in asdict(self).items() if key != "extra_metadata"
}


@dataclass(frozen=True, kw_only=True)
class AgentBackup(BaseBackup):
Expand Down
4 changes: 2 additions & 2 deletions homeassistant/components/backup/websocket.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ async def handle_info(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
"backups": [backup.as_frontend_json() for backup in backups.values()],
"backups": list(backups.values()),
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
"last_non_idle_event": manager.last_non_idle_event,
Expand Down Expand Up @@ -91,7 +91,7 @@ async def handle_details(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
"backup": backup.as_frontend_json() if backup else None,
"backup": backup,
},
)

Expand Down
2 changes: 1 addition & 1 deletion homeassistant/components/onboarding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ async def get(self, manager: BackupManager, request: web.Request) -> web.Respons
backups, _ = await manager.async_get_backups()
return self.json(
{
"backups": [backup.as_frontend_json() for backup in backups.values()],
"backups": list(backups.values()),
"state": manager.state,
"last_non_idle_event": manager.last_non_idle_event,
}
Expand Down
4 changes: 2 additions & 2 deletions tests/components/backup/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from collections.abc import AsyncIterator, Callable, Coroutine, Iterable
from pathlib import Path
from typing import Any
from unittest.mock import ANY, AsyncMock, Mock, patch
from unittest.mock import AsyncMock, Mock, patch

from homeassistant.components.backup import (
DOMAIN,
Expand All @@ -29,7 +29,7 @@
backup_id="abc123",
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
extra_metadata={"instance_id": "our_uuid", "with_automatic_settings": True},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
Expand Down
10 changes: 10 additions & 0 deletions tests/components/backup/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@
from tests.common import get_fixture_path


@pytest.fixture(name="instance_id", autouse=True)
def instance_id_fixture(hass: HomeAssistant) -> Generator[None]:
"""Mock instance ID."""
with patch(
"homeassistant.components.backup.manager.instance_id.async_get",
return_value="our_uuid",
):
yield


@pytest.fixture(name="mocked_json_bytes")
def mocked_json_bytes_fixture() -> Generator[Mock]:
"""Mock json_bytes."""
Expand Down
8 changes: 8 additions & 0 deletions tests/components/backup/snapshots/test_backup.ambr
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
Comment on lines +75 to +76
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could filter out these keys, but it's maybe simpler to just send everything (as the PR does now)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's send everything.

}),
'failed_agent_ids': list([
]),
'folders': list([
Expand All @@ -94,6 +98,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'unknown_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down
80 changes: 80 additions & 0 deletions tests/components/backup/snapshots/test_websocket.ambr
Original file line number Diff line number Diff line change
Expand Up @@ -3040,6 +3040,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3117,6 +3121,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3175,6 +3183,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3217,6 +3229,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'unknown_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3270,6 +3286,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'unknown_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3321,6 +3341,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3379,6 +3403,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3438,6 +3466,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3497,6 +3527,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
'test.remote',
]),
Expand Down Expand Up @@ -3556,6 +3588,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3614,6 +3648,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3672,6 +3708,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3730,6 +3768,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
'extra_metadata': dict({
}),
'failed_agent_ids': list([
'test.remote',
]),
Expand Down Expand Up @@ -3789,6 +3829,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3828,6 +3872,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3883,6 +3931,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -3923,6 +3975,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4199,6 +4255,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4246,6 +4306,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4297,6 +4361,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4339,6 +4407,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'unknown_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4367,6 +4439,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down Expand Up @@ -4415,6 +4491,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
'extra_metadata': dict({
'instance_id': 'our_uuid',
'with_automatic_settings': True,
}),
'failed_agent_ids': list([
]),
'folders': list([
Expand Down
Loading
Loading