Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: Update status storage, fragments #1249

Merged
merged 25 commits into from
Feb 2, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
68f816d
refactor: Rename TimelineStatusEntity to StatusEntity
nikclayton Jan 25, 2025
b11be9e
refactor: Create StatusDao for operations that act on individual stat…
nikclayton Jan 25, 2025
ffa580b
wip: Works, needs cleaning up
nikclayton Jan 30, 2025
e8aa586
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Jan 30, 2025
1ef22c7
Delete dead refresh code
nikclayton Jan 31, 2025
5c02d3d
Sync notif timeline with cachedtimeline code. Remove dead code
nikclayton Feb 1, 2025
a5580f7
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Feb 1, 2025
a06cd52
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Feb 1, 2025
5312a0b
wip: Kind as class
nikclayton Feb 1, 2025
75cd08e
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Feb 1, 2025
4087268
Renames
nikclayton Feb 1, 2025
9a90a61
fmtsql
nikclayton Feb 1, 2025
61d9a92
refactor: Extract bind loadstate functionality to a method
nikclayton Feb 1, 2025
bbb949c
ktlintformat
nikclayton Feb 1, 2025
96f1d58
Remove obsolete comment
nikclayton Feb 1, 2025
05ea390
Add comment
nikclayton Feb 1, 2025
26030b2
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Feb 1, 2025
2bbe459
Merge remote-tracking branch 'upstream/main' into timeline-relation
nikclayton Feb 1, 2025
424459b
Be explicit about the return type
nikclayton Feb 1, 2025
8eb40df
Remove dead code
nikclayton Feb 1, 2025
2d4befc
wip: Debugging test
nikclayton Feb 2, 2025
ab20528
Convert TimelineDaoTest to an instrumented test
nikclayton Feb 2, 2025
83bcf64
ci: Configure emulator tests in GitHub actions
nikclayton Feb 2, 2025
28dfadc
arch = x86_64
nikclayton Feb 2, 2025
a407da3
Lint
nikclayton Feb 2, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -148,3 +148,57 @@ jobs:

- name: assemble ${{ matrix.color }}${{ matrix.store }}${{ matrix.type }}
run: ./gradlew assemble${{ matrix.color }}${{ matrix.store }}${{ matrix.type }}

# Connected tests are per-store-variant, debug builds only.
connected:
strategy:
matrix:
color: ["Orange"]
store: ["Fdroid", "Github", "Google"]
type: ["Debug"]
api-level: [31]
target: [default]
name: Android Emulator Tests
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4

- name: Enable KVM
run: |
echo 'KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"' | sudo tee /etc/udev/rules.d/99-kvm4all.rules
sudo udevadm control --reload-rules
sudo udevadm trigger --name-match=kvm

- uses: ./.github/actions/setup-build-env
with:
gradle-cache-encryption-key: ${{ secrets.GRADLE_ENCRYPTION_KEY }}

- name: AVD cache
uses: actions/cache@v4
id: avd-cache
with:
path: |
~/.android/avd/*
~/.android/adb*
key: avd-${{ matrix.api-level }}

- name: Create AVD and generate snapshot for caching
if: steps.avd-cache.outputs.cache-hit != 'true'
uses: reactivecircus/android-emulator-runner@v2
with:
arch: x86_64
api-level: ${{ matrix.api-level }}
force-avd-creation: false
emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
disable-animations: false
script: echo "Generated AVD snapshot for caching."

- name: Run tests
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ matrix.api-level }}
target: ${{ matrix.target }}
arch: x86_64
script: ./gradlew connected${{ matrix.color }}${{ matrix.store }}${{ matrix.type }}AndroidTest
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.flow.combine
import kotlinx.coroutines.flow.conflate
import kotlinx.coroutines.flow.distinctUntilChangedBy
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.filterNotNull
Expand Down Expand Up @@ -313,12 +312,13 @@ class NotificationsFragment :
// remove it.
is NotificationActionSuccess.AcceptFollowRequest,
is NotificationActionSuccess.RejectFollowRequest,
-> refreshAdapterAndScrollToVisibleId()
-> adapter.refresh()
}
}

when (uiSuccess) {
is UiSuccess.Block, is UiSuccess.Mute, is UiSuccess.MuteConversation -> refreshAdapterAndScrollToVisibleId()
is UiSuccess.Block, is UiSuccess.Mute, is UiSuccess.MuteConversation,
-> adapter.refresh()

is UiSuccess.LoadNewest -> {
// Scroll to the top when prepending completes.
Expand Down Expand Up @@ -379,25 +379,6 @@ class NotificationsFragment :
}
}

/**
* Refreshes the adapter, waits for the first page to be updated, and scrolls the
* recyclerview to the first notification that was visible before the refresh.
*
* This ensures the user's position is not lost during adapter refreshes.
*/
private fun refreshAdapterAndScrollToVisibleId() {
getFirstVisibleNotification()?.id?.let { id ->
viewLifecycleOwner.lifecycleScope.launch {
adapter.onPagesUpdatedFlow.conflate().take(1).collect {
binding.recyclerView.scrollToPosition(
adapter.snapshot().items.indexOfFirst { it.id == id },
)
}
}
}
adapter.refresh()
}

override fun onCreateMenu(menu: Menu, menuInflater: MenuInflater) {
menuInflater.inflate(R.menu.fragment_notifications, menu)
menu.findItem(R.id.action_refresh)?.apply {
Expand Down Expand Up @@ -446,7 +427,7 @@ class NotificationsFragment :
}

binding.swipeRefreshLayout.isRefreshing = false
refreshAdapterAndScrollToVisibleId()
adapter.refresh()
clearNotificationsForAccount(requireContext(), pachliAccountId)
}

Expand Down Expand Up @@ -613,11 +594,11 @@ class NotificationsFragment :
}

override fun onMute(mute: Boolean, id: String, position: Int, notifications: Boolean) {
refreshAdapterAndScrollToVisibleId()
adapter.refresh()
}

override fun onBlock(block: Boolean, id: String, position: Int) {
refreshAdapterAndScrollToVisibleId()
adapter.refresh()
}

override fun onRespondToFollowRequest(accept: Boolean, accountId: String, position: Int) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,12 @@ class CachedTimelineRepository @Inject constructor(
factory = InvalidatingPagingSourceFactory { timelineDao.getStatuses(account.id) }

val initialKey = remoteKeyDao.remoteKeyForKind(account.id, RKE_TIMELINE_ID, RemoteKeyKind.REFRESH)?.key
val row = initialKey?.let { timelineDao.getStatusRowNumber(account.id, it) } ?: 0
val row = initialKey?.let { timelineDao.getStatusRowNumber(account.id, it) }

Timber.d("initialKey: %s is row: %d", initialKey, row)

return Pager(
initialKey = (row - ((PAGE_SIZE * 3) / 2)).coerceAtLeast(0),
initialKey = row?.let { (row - ((PAGE_SIZE * 3) / 2)).coerceAtLeast(0) },
config = PagingConfig(
pageSize = PAGE_SIZE,
enablePlaceholders = true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.flow.combine
import kotlinx.coroutines.flow.conflate
import kotlinx.coroutines.flow.distinctUntilChangedBy
import kotlinx.coroutines.flow.filterNotNull
import kotlinx.coroutines.flow.flow
Expand Down Expand Up @@ -352,17 +351,12 @@ class TimelineFragment :
is StatusActionSuccess.Translate -> statusViewData.status
}
(indexedViewData.value as StatusViewData).status = status

adapter.notifyItemChanged(indexedViewData.index)
}

// Refresh adapter on mutes and blocks
when (it) {
is UiSuccess.Block,
is UiSuccess.Mute,
is UiSuccess.MuteConversation,
->
refreshAdapterAndScrollToVisibleId()
is UiSuccess.Block, is UiSuccess.Mute, is UiSuccess.MuteConversation,
-> adapter.refresh()

is UiSuccess.StatusSent -> handleStatusSentOrEdit(it.status)
is UiSuccess.StatusEdited -> handleStatusSentOrEdit(it.status)
Expand Down Expand Up @@ -430,25 +424,6 @@ class TimelineFragment :
}
}

/**
* Refreshes the adapter, waits for the first page to be updated, and scrolls the
* recyclerview to the first status that was visible before the refresh.
*
* This ensures the user's position is not lost during adapter refreshes.
*/
private fun refreshAdapterAndScrollToVisibleId() {
getFirstVisibleStatus()?.id?.let { id ->
viewLifecycleOwner.lifecycleScope.launch {
adapter.onPagesUpdatedFlow.conflate().take(1).collect {
binding.recyclerView.scrollToPosition(
adapter.snapshot().items.indexOfFirst { it.id == id },
)
}
}
}
adapter.refresh()
}

override fun onCreateMenu(menu: Menu, menuInflater: MenuInflater) {
menuInflater.inflate(R.menu.fragment_timeline, menu)

Expand Down Expand Up @@ -477,7 +452,6 @@ class TimelineFragment :
R.id.action_load_newest -> {
Timber.d("Reload because user chose load newest menu item")
viewModel.accept(InfallibleUiAction.LoadNewest)
refreshContent()
true
}
else -> false
Expand Down Expand Up @@ -582,7 +556,7 @@ class TimelineFragment :
}

binding.swipeRefreshLayout.isRefreshing = false
refreshAdapterAndScrollToVisibleId()
adapter.refresh()
}

override fun onReply(viewData: StatusViewData) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import app.pachli.core.database.model.RemoteKeyEntity
import app.pachli.core.database.model.RemoteKeyEntity.RemoteKeyKind
import app.pachli.core.database.model.StatusEntity
import app.pachli.core.database.model.TimelineAccountEntity
import app.pachli.core.database.model.TimelineStatusEntity
import app.pachli.core.database.model.TimelineStatusWithAccount
import app.pachli.core.network.model.Links
import app.pachli.core.network.model.Status
Expand Down Expand Up @@ -68,7 +69,7 @@ class CachedTimelineRemoteMediator(
RKE_TIMELINE_ID,
RemoteKeyKind.REFRESH,
)?.key
Timber.d("Loading from item: %s", statusId)
Timber.d("Refresh from item: %s", statusId)
getInitialPage(statusId, state.config.pageSize)
}

Expand All @@ -78,7 +79,7 @@ class CachedTimelineRemoteMediator(
RKE_TIMELINE_ID,
RemoteKeyKind.PREV,
) ?: return@transactionProvider MediatorResult.Success(endOfPaginationReached = true)
Timber.d("Loading from remoteKey: %s", rke)
Timber.d("Prepend from remoteKey: %s", rke)
mastodonApi.homeTimeline(minId = rke.key, limit = state.config.pageSize)
}

Expand All @@ -88,7 +89,7 @@ class CachedTimelineRemoteMediator(
RKE_TIMELINE_ID,
RemoteKeyKind.NEXT,
) ?: return@transactionProvider MediatorResult.Success(endOfPaginationReached = true)
Timber.d("Loading from remoteKey: %s", rke)
Timber.d("Append from remoteKey: %s", rke)
mastodonApi.homeTimeline(maxId = rke.key, limit = state.config.pageSize)
}
}
Expand All @@ -112,8 +113,10 @@ class CachedTimelineRemoteMediator(

when (loadType) {
LoadType.REFRESH -> {
remoteKeyDao.deletePrevNext(pachliAccountId, RKE_TIMELINE_ID)
timelineDao.deleteAllStatusesForAccount(pachliAccountId)
timelineDao.deleteAllStatusesForAccountOnTimeline(
pachliAccountId,
TimelineStatusEntity.Kind.Home,
)

remoteKeyDao.upsert(
RemoteKeyEntity(
Expand Down Expand Up @@ -247,7 +250,8 @@ class CachedTimelineRemoteMediator(
}

/**
* Inserts `statuses` and the accounts referenced by those statuses in to the cache.
* Inserts `statuses` and the accounts referenced by those statuses in to the cache,
* then adds references to them in the Home timeline.
*/
private suspend fun insertStatuses(pachliAccountId: Long, statuses: List<Status>) {
check(transactionProvider.inTransaction())
Expand All @@ -262,6 +266,15 @@ class CachedTimelineRemoteMediator(

timelineDao.upsertAccounts(accounts.map { TimelineAccountEntity.from(it, pachliAccountId) })
statusDao.upsertStatuses(statuses.map { StatusEntity.from(it, pachliAccountId) })
timelineDao.upsertStatuses(
statuses.map {
TimelineStatusEntity(
kind = TimelineStatusEntity.Kind.Home,
pachliAccountId = pachliAccountId,
statusId = it.id,
)
},
)
}

companion object {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ import dagger.hilt.android.lifecycle.HiltViewModel
import javax.inject.Inject
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.distinctUntilChangedBy
import kotlinx.coroutines.flow.flatMapLatest
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.map
Expand Down Expand Up @@ -74,9 +75,10 @@ class CachedTimelineViewModel @Inject constructor(
flow { emit(repository.getRefreshKey(it.data!!.id)) }
}

override var statuses = accountFlow.flatMapLatest {
getStatuses(it.data!!)
}.cachedIn(viewModelScope)
override var statuses = accountFlow
.distinctUntilChangedBy { it.data!!.id }
.flatMapLatest { getStatuses(it.data!!) }
.cachedIn(viewModelScope)

/** @return Flow of statuses that make up the timeline of [timeline] for [account]. */
private suspend fun getStatuses(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class DeveloperToolsUseCase @Inject constructor(
* Clear the home timeline cache.
*/
suspend fun clearHomeTimelineCache(accountId: Long) {
timelineDao.deleteAllStatusesForAccount(accountId)
timelineDao.deleteAllStatusesForAccountOnTimeline(accountId)
}

/**
Expand Down
3 changes: 1 addition & 2 deletions app/src/main/java/app/pachli/worker/PruneCacheWorker.kt
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,14 @@ class PruneCacheWorker @AssistedInject constructor(
override suspend fun doWork(): Result {
for (account in accountManager.accounts) {
Timber.d("Pruning database using account ID: %d", account.id)
timelineDao.cleanup(account.id, MAX_STATUSES_IN_CACHE)
timelineDao.cleanup(account.id)
}
return Result.success()
}

override suspend fun getForegroundInfo() = ForegroundInfo(NOTIFICATION_ID_PRUNE_CACHE, notification)

companion object {
private const val MAX_STATUSES_IN_CACHE = 1000
const val PERIODIC_WORK_TAG = "PruneCacheWorker_periodic"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import app.pachli.core.database.di.TransactionProvider
import app.pachli.core.database.model.AccountEntity
import app.pachli.core.database.model.RemoteKeyEntity
import app.pachli.core.database.model.RemoteKeyEntity.RemoteKeyKind
import app.pachli.core.database.model.TimelineStatusEntity
import app.pachli.core.database.model.TimelineStatusWithAccount
import app.pachli.core.network.json.BooleanIfNull
import app.pachli.core.network.json.DefaultIfNull
Expand Down Expand Up @@ -343,6 +344,15 @@ class CachedTimelineRemoteMediatorTest {
}
statusDao().insertStatus(statusWithAccount.status)
}
timelineDao().upsertStatuses(
statuses.map {
TimelineStatusEntity(
pachliAccountId = it.status.timelineUserId,
kind = TimelineStatusEntity.Kind.Home,
statusId = it.status.serverId,
)
},
)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.async
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.launch
import timber.log.Timber

/**
* Errors that can occur acting on a status.
Expand Down Expand Up @@ -113,10 +114,10 @@ class NotificationsRepository @Inject constructor(
// Room is row-keyed, not item-keyed. Find the user's REFRESH key, then find the
// row of the notification with that ID, and use that as the Pager's initialKey.
val initialKey = remoteKeyDao.remoteKeyForKind(pachliAccountId, RKE_TIMELINE_ID, RemoteKeyKind.REFRESH)?.key
val row = initialKey?.let { notificationDao.getNotificationRowNumber(pachliAccountId, it) } ?: 0
val row = initialKey?.let { notificationDao.getNotificationRowNumber(pachliAccountId, it) }

return Pager(
initialKey = (row - ((PAGE_SIZE * 3) / 2)).coerceAtLeast(0),
initialKey = row?.let { (row - ((PAGE_SIZE * 3) / 2)).coerceAtLeast(0) },
config = PagingConfig(
pageSize = PAGE_SIZE,
enablePlaceholders = true,
Expand Down Expand Up @@ -145,6 +146,7 @@ class NotificationsRepository @Inject constructor(
* refresh the newest notifications.
*/
suspend fun saveRefreshKey(pachliAccountId: Long, key: String?) = externalScope.async {
Timber.d("saveRefreshKey: $key")
remoteKeyDao.upsert(
RemoteKeyEntity(pachliAccountId, RKE_TIMELINE_ID, RemoteKeyKind.REFRESH, key),
)
Expand Down
Loading
Loading