[merge] Cleanup leftovers from old timeline fixes
Upstream redid some timeline chunk handling, which should make these useless Change-Id: I14f19ecba0bc093261cb2779135ff8c744439e12
This commit is contained in:
parent
8df78e41ff
commit
06a7154abf
@ -37,24 +37,6 @@ import org.matrix.android.sdk.internal.database.query.where
|
|||||||
import org.matrix.android.sdk.internal.session.room.timeline.PaginationDirection
|
import org.matrix.android.sdk.internal.session.room.timeline.PaginationDirection
|
||||||
import timber.log.Timber
|
import timber.log.Timber
|
||||||
|
|
||||||
// SC-TODO: old timeline fix, can probably remove now?
|
|
||||||
/*
|
|
||||||
internal fun ChunkEntity.moveEventsFrom(chunkToMerge: ChunkEntity, direction: PaginationDirection) {
|
|
||||||
assertIsManaged()
|
|
||||||
val localRealm = this.realm
|
|
||||||
val eventsToMerge = if (direction == PaginationDirection.FORWARDS) {
|
|
||||||
chunkToMerge.timelineEvents.sort(TimelineEventEntityFields.DISPLAY_INDEX, Sort.ASCENDING)
|
|
||||||
} else {
|
|
||||||
chunkToMerge.timelineEvents.sort(TimelineEventEntityFields.DISPLAY_INDEX, Sort.DESCENDING)
|
|
||||||
}
|
|
||||||
eventsToMerge.forEach {
|
|
||||||
if (addTimelineEventFromMove(localRealm, it, direction)) {
|
|
||||||
chunkToMerge.timelineEvents.remove(it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
internal fun ChunkEntity.addStateEvent(roomId: String, stateEvent: EventEntity, direction: PaginationDirection) {
|
internal fun ChunkEntity.addStateEvent(roomId: String, stateEvent: EventEntity, direction: PaginationDirection) {
|
||||||
if (direction == PaginationDirection.BACKWARDS) {
|
if (direction == PaginationDirection.BACKWARDS) {
|
||||||
Timber.v("We don't keep chunk state events when paginating backward")
|
Timber.v("We don't keep chunk state events when paginating backward")
|
||||||
@ -139,20 +121,6 @@ internal fun computeIsUnique(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SC-TODO: old timeline fix, probably can remove now
|
|
||||||
/*
|
|
||||||
private fun ChunkEntity.addTimelineEventFromMove(realm: Realm, event: TimelineEventEntity, direction: PaginationDirection): Boolean {
|
|
||||||
val eventId = event.eventId
|
|
||||||
if (timelineEvents.find(eventId) != null) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
event.displayIndex = nextDisplayIndex(direction)
|
|
||||||
handleThreadSummary(realm, eventId, event)
|
|
||||||
timelineEvents.add(event)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
private fun handleReadReceipts(realm: Realm, roomId: String, eventEntity: EventEntity, senderId: String): ReadReceiptsSummaryEntity {
|
private fun handleReadReceipts(realm: Realm, roomId: String, eventEntity: EventEntity, senderId: String): ReadReceiptsSummaryEntity {
|
||||||
val readReceiptsSummaryEntity = ReadReceiptsSummaryEntity.where(realm, eventEntity.eventId).findFirst()
|
val readReceiptsSummaryEntity = ReadReceiptsSummaryEntity.where(realm, eventEntity.eventId).findFirst()
|
||||||
?: realm.createObject<ReadReceiptsSummaryEntity>(eventEntity.eventId).apply {
|
?: realm.createObject<ReadReceiptsSummaryEntity>(eventEntity.eventId).apply {
|
||||||
|
@ -527,32 +527,6 @@ internal class TimelineChunk(
|
|||||||
private fun handleDatabaseChangeSet(results: RealmResults<TimelineEventEntity>, changeSet: OrderedCollectionChangeSet) {
|
private fun handleDatabaseChangeSet(results: RealmResults<TimelineEventEntity>, changeSet: OrderedCollectionChangeSet) {
|
||||||
val insertions = changeSet.insertionRanges
|
val insertions = changeSet.insertionRanges
|
||||||
for (range in insertions) {
|
for (range in insertions) {
|
||||||
/* SC-TODO: old timeline fix, can probably delete?
|
|
||||||
// Check if the insertion's displayIndices match our expectations - or skip this insertion.
|
|
||||||
// Inconsistencies (missing messages) can happen otherwise if we get insertions before having loaded all timeline events of the chunk.
|
|
||||||
if (builtEvents.isNotEmpty()) {
|
|
||||||
// Check consistency to item before insertions
|
|
||||||
if (range.startIndex > 0) {
|
|
||||||
val firstInsertion = results[range.startIndex]!!
|
|
||||||
val lastBeforeInsertion = builtEvents[range.startIndex-1]
|
|
||||||
if (firstInsertion.displayIndex+1 != lastBeforeInsertion.displayIndex) {
|
|
||||||
Timber.i("handleDatabaseChangeSet: skip insertion at ${range.startIndex}/${builtEvents.size}, " +
|
|
||||||
"displayIndex mismatch at ${range.startIndex}: ${firstInsertion.displayIndex} -> ${lastBeforeInsertion.displayIndex}")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Check consistency to item after insertions
|
|
||||||
if (range.startIndex < builtEvents.size) {
|
|
||||||
val lastInsertion = results[range.startIndex+range.length-1]!!
|
|
||||||
val firstAfterInsertion = builtEvents[range.startIndex]
|
|
||||||
if (firstAfterInsertion.displayIndex+1 != lastInsertion.displayIndex) {
|
|
||||||
Timber.i("handleDatabaseChangeSet: skip insertion at ${range.startIndex}/${builtEvents.size}, " +
|
|
||||||
"displayIndex mismatch at ${range.startIndex+range.length}: ${firstAfterInsertion.displayIndex} -> ${lastInsertion.displayIndex}")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
if (!validateInsertion(range, results)) continue
|
if (!validateInsertion(range, results)) continue
|
||||||
val newItems = results
|
val newItems = results
|
||||||
.subList(range.startIndex, range.startIndex + range.length)
|
.subList(range.startIndex, range.startIndex + range.length)
|
||||||
|
@ -72,21 +72,6 @@ internal class TokenChunkEventPersistor @Inject constructor(
|
|||||||
roomId: String,
|
roomId: String,
|
||||||
direction: PaginationDirection
|
direction: PaginationDirection
|
||||||
): Result {
|
): Result {
|
||||||
/* SC-TODO: old timeline fixes, can probably delete
|
|
||||||
if (receivedChunk.events.isEmpty() && receivedChunk.start == receivedChunk.end) {
|
|
||||||
Timber.w("Discard empty chunk with identical start/end token ${receivedChunk.start}")
|
|
||||||
|
|
||||||
return if (receivedChunk.hasMore()) {
|
|
||||||
Result.SHOULD_FETCH_MORE
|
|
||||||
} else {
|
|
||||||
Result.REACHED_END
|
|
||||||
}
|
|
||||||
} else if (receivedChunk.start == receivedChunk.end) {
|
|
||||||
// I don't think we have seen this case so far, but let's log it just in case...
|
|
||||||
// -> if it happens, we need to address it somehow!
|
|
||||||
Timber.e("Non-empty chunk with identical start/end token ${receivedChunk.start}")
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
monarchy
|
monarchy
|
||||||
.awaitTransaction { realm ->
|
.awaitTransaction { realm ->
|
||||||
Timber.i("Start persisting ${receivedChunk.events.size} events in $roomId towards $direction | " +
|
Timber.i("Start persisting ${receivedChunk.events.size} events in $roomId towards $direction | " +
|
||||||
@ -178,81 +163,6 @@ internal class TokenChunkEventPersistor @Inject constructor(
|
|||||||
if (event.eventId == null || event.senderId == null) {
|
if (event.eventId == null || event.senderId == null) {
|
||||||
return@forEach
|
return@forEach
|
||||||
}
|
}
|
||||||
/* SC-TODO: old timeline fix, can probably delete
|
|
||||||
// We check for the timeline event with this id, but not in the thread chunk
|
|
||||||
val eventId = event.eventId
|
|
||||||
val existingTimelineEvent = TimelineEventEntity
|
|
||||||
.where(realm, roomId, eventId)
|
|
||||||
.equalTo(TimelineEventEntityFields.OWNED_BY_THREAD_CHUNK, false)
|
|
||||||
.findFirst()
|
|
||||||
// If it exists, we want to stop here, just link the prevChunk
|
|
||||||
val existingChunk = existingTimelineEvent?.chunk?.firstOrNull()
|
|
||||||
if (existingChunk != null) {
|
|
||||||
if (existingChunk == currentChunk) {
|
|
||||||
Timber.w("Avoid double insertion of event $eventId, shouldn't happen in an ideal world | " +
|
|
||||||
"direction: $direction.value " +
|
|
||||||
"room: $roomId " +
|
|
||||||
"chunk: ${existingChunk.identifier()} " +
|
|
||||||
"eventId: $eventId " +
|
|
||||||
"caughtByOldCheck ${((if (direction == PaginationDirection.BACKWARDS) currentChunk.nextChunk else currentChunk.prevChunk) == existingChunk)} " +
|
|
||||||
"caughtByOldBackwardCheck ${(currentChunk.nextChunk == existingChunk)} " +
|
|
||||||
"caughtByOldForwardCheck ${(currentChunk.prevChunk == existingChunk)}"
|
|
||||||
)
|
|
||||||
// No idea why this happens, but if it does, we don't want to throw away all the other events
|
|
||||||
// (or even link chunks to themselves)
|
|
||||||
return@forEach
|
|
||||||
}
|
|
||||||
val alreadyLinkedNext = currentChunk.doesNextChunksVerifyCondition { it == existingChunk }
|
|
||||||
val alreadyLinkedPrev = currentChunk.doesPrevChunksVerifyCondition { it == existingChunk }
|
|
||||||
if (alreadyLinkedNext || alreadyLinkedPrev) {
|
|
||||||
Timber.i("Avoid double link | " +
|
|
||||||
"direction: $direction " +
|
|
||||||
"room: $roomId event: $eventId " +
|
|
||||||
"linkedPrev: $alreadyLinkedPrev linkedNext: $alreadyLinkedNext " +
|
|
||||||
"oldChunk: ${existingChunk.identifier()} newChunk: ${existingChunk.identifier()} " +
|
|
||||||
"oldBackwardCheck: ${currentChunk.nextChunk == existingChunk} " +
|
|
||||||
"oldForwardCheck: ${currentChunk.prevChunk == existingChunk}"
|
|
||||||
)
|
|
||||||
if ((direction == PaginationDirection.FORWARDS && !alreadyLinkedNext /* && alreadyLinkedPrev */) ||
|
|
||||||
(direction == PaginationDirection.BACKWARDS && !alreadyLinkedPrev /* && alreadyLinkedNext */)) {
|
|
||||||
// Do not stop processing here: even though this event already exists in an already linked chunk,
|
|
||||||
// we still may have new events to add
|
|
||||||
return@forEach
|
|
||||||
}
|
|
||||||
// Stop processing here
|
|
||||||
return@processTimelineEvents
|
|
||||||
}
|
|
||||||
// If we haven't found a single new event yet, we don't want to link in the pagination direction, as that might cause a
|
|
||||||
// timeline loop if the other chunk is in the other direction.
|
|
||||||
if (!hasNewEvents) {
|
|
||||||
Timber.i("Skip adding event $eventId, already exists")
|
|
||||||
// Only skip this event, but still process other events.
|
|
||||||
// Remember this chunk, since in case we don't find any new events, we still want to link this in pagination direction
|
|
||||||
// in order to link a chunk to the /sync chunk
|
|
||||||
if (existingChunkToLink == null) {
|
|
||||||
existingChunkToLink = existingChunk
|
|
||||||
}
|
|
||||||
return@forEach
|
|
||||||
}
|
|
||||||
when (direction) {
|
|
||||||
PaginationDirection.BACKWARDS -> {
|
|
||||||
Timber.i("Backwards insert chunk: ${existingChunk.identifier()} -> ${currentChunk.identifier()}")
|
|
||||||
currentChunk.prevChunk = existingChunk
|
|
||||||
existingChunk.nextChunk = currentChunk
|
|
||||||
}
|
|
||||||
PaginationDirection.FORWARDS -> {
|
|
||||||
Timber.i("Forward insert chunk: ${currentChunk.identifier()} -> ${existingChunk.identifier()}")
|
|
||||||
currentChunk.nextChunk = existingChunk
|
|
||||||
existingChunk.prevChunk = currentChunk
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Stop processing here
|
|
||||||
return@processTimelineEvents
|
|
||||||
}
|
|
||||||
|
|
||||||
// existingChunk == null => this is a new event we haven't seen before
|
|
||||||
hasNewEvents = true
|
|
||||||
*/
|
|
||||||
|
|
||||||
val ageLocalTs = now - (event.unsignedData?.age ?: 0)
|
val ageLocalTs = now - (event.unsignedData?.age ?: 0)
|
||||||
val eventEntity = event.toEntity(roomId, SendState.SYNCED, ageLocalTs).copyToRealmOrIgnore(realm, EventInsertType.PAGINATION)
|
val eventEntity = event.toEntity(roomId, SendState.SYNCED, ageLocalTs).copyToRealmOrIgnore(realm, EventInsertType.PAGINATION)
|
||||||
|
@ -383,20 +383,6 @@ internal class RoomSyncHandler @Inject constructor(
|
|||||||
aggregator: SyncResponsePostTreatmentAggregator
|
aggregator: SyncResponsePostTreatmentAggregator
|
||||||
): ChunkEntity {
|
): ChunkEntity {
|
||||||
val lastChunk = ChunkEntity.findLastForwardChunkOfRoom(realm, roomEntity.roomId)
|
val lastChunk = ChunkEntity.findLastForwardChunkOfRoom(realm, roomEntity.roomId)
|
||||||
/* SC-TODO: old timeline fixes, can probably delete
|
|
||||||
if (isLimited && lastChunk != null) {
|
|
||||||
Timber.i("Deleting last forward chunk (${lastChunk.identifier()})")
|
|
||||||
// Add events that oldPrev may have dropped since they were already in lastChunk
|
|
||||||
val oldPrev = lastChunk.prevChunk
|
|
||||||
if (oldPrev != null && oldPrev.nextToken != lastChunk.prevToken) {
|
|
||||||
// If the tokens mismatch, this means we have chained them due to duplicated events.
|
|
||||||
// In this case, we need to make sure to re-add possibly dropped events (which would have
|
|
||||||
// been duplicates otherwise)
|
|
||||||
oldPrev.moveEventsFrom(lastChunk, PaginationDirection.FORWARDS)
|
|
||||||
}
|
|
||||||
lastChunk.deleteOnCascade(deleteStateEvents = false, canDeleteRoot = true)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
val chunkEntity = if (!isLimited && lastChunk != null) {
|
val chunkEntity = if (!isLimited && lastChunk != null) {
|
||||||
lastChunk
|
lastChunk
|
||||||
} else {
|
} else {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user