fix: use correct sorting for snowflake IDs (#1074)
* fix: use correct sorting for snowflake IDs fixes #1071 * refactor
This commit is contained in:
parent
fe919de86d
commit
93a3e85994
|
@ -3,7 +3,7 @@ import { getTimeline } from '../_api/timelines'
|
||||||
import { toast } from '../_components/toast/toast'
|
import { toast } from '../_components/toast/toast'
|
||||||
import { mark, stop } from '../_utils/marks'
|
import { mark, stop } from '../_utils/marks'
|
||||||
import { concat, mergeArrays } from '../_utils/arrays'
|
import { concat, mergeArrays } from '../_utils/arrays'
|
||||||
import { compareTimelineItemSummaries } from '../_utils/sorting'
|
import { compareTimelineItemSummaries } from '../_utils/statusIdSorting'
|
||||||
import isEqual from 'lodash-es/isEqual'
|
import isEqual from 'lodash-es/isEqual'
|
||||||
import { database } from '../_database/database'
|
import { database } from '../_database/database'
|
||||||
import { getStatus, getStatusContext } from '../_api/statuses'
|
import { getStatus, getStatusContext } from '../_api/statuses'
|
||||||
|
|
|
@ -13,3 +13,8 @@ export const ACCOUNT_ID = '__pinafore_acct_id'
|
||||||
export const STATUS_ID = '__pinafore_status_id'
|
export const STATUS_ID = '__pinafore_status_id'
|
||||||
export const REBLOG_ID = '__pinafore_reblog_id'
|
export const REBLOG_ID = '__pinafore_reblog_id'
|
||||||
export const USERNAME_LOWERCASE = '__pinafore_acct_lc'
|
export const USERNAME_LOWERCASE = '__pinafore_acct_lc'
|
||||||
|
|
||||||
|
export const DB_VERSION_INITIAL = 9
|
||||||
|
export const DB_VERSION_SEARCH_ACCOUNTS = 10
|
||||||
|
export const DB_VERSION_SNOWFLAKE_IDS = 11
|
||||||
|
export const DB_VERSION_CURRENT = 11
|
||||||
|
|
|
@ -1,27 +1,10 @@
|
||||||
import {
|
import { DB_VERSION_CURRENT } from './constants'
|
||||||
META_STORE,
|
|
||||||
STATUS_TIMELINES_STORE,
|
|
||||||
STATUSES_STORE,
|
|
||||||
ACCOUNTS_STORE,
|
|
||||||
RELATIONSHIPS_STORE,
|
|
||||||
NOTIFICATIONS_STORE,
|
|
||||||
NOTIFICATION_TIMELINES_STORE,
|
|
||||||
PINNED_STATUSES_STORE,
|
|
||||||
TIMESTAMP,
|
|
||||||
REBLOG_ID,
|
|
||||||
THREADS_STORE,
|
|
||||||
STATUS_ID,
|
|
||||||
USERNAME_LOWERCASE
|
|
||||||
} from './constants'
|
|
||||||
import { addKnownInstance, deleteKnownInstance } from './knownInstances'
|
import { addKnownInstance, deleteKnownInstance } from './knownInstances'
|
||||||
|
import { migrations } from './migrations'
|
||||||
|
|
||||||
const openReqs = {}
|
const openReqs = {}
|
||||||
const databaseCache = {}
|
const databaseCache = {}
|
||||||
|
|
||||||
const DB_VERSION_INITIAL = 9
|
|
||||||
const DB_VERSION_SEARCH_ACCOUNTS = 10
|
|
||||||
const DB_VERSION_CURRENT = 10
|
|
||||||
|
|
||||||
function createDatabase (instanceName) {
|
function createDatabase (instanceName) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let req = indexedDB.open(instanceName, DB_VERSION_CURRENT)
|
let req = indexedDB.open(instanceName, DB_VERSION_CURRENT)
|
||||||
|
@ -34,50 +17,16 @@ function createDatabase (instanceName) {
|
||||||
let db = req.result
|
let db = req.result
|
||||||
let tx = e.currentTarget.transaction
|
let tx = e.currentTarget.transaction
|
||||||
|
|
||||||
function createObjectStore (name, init, indexes) {
|
let migrationsToDo = migrations.filter(({ version }) => e.oldVersion < version)
|
||||||
let store = init
|
|
||||||
? db.createObjectStore(name, init)
|
|
||||||
: db.createObjectStore(name)
|
|
||||||
if (indexes) {
|
|
||||||
Object.keys(indexes).forEach(indexKey => {
|
|
||||||
store.createIndex(indexKey, indexes[indexKey])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (e.oldVersion < DB_VERSION_INITIAL) {
|
function doNextMigration () {
|
||||||
createObjectStore(STATUSES_STORE, { keyPath: 'id' }, {
|
if (!migrationsToDo.length) {
|
||||||
[TIMESTAMP]: TIMESTAMP,
|
return
|
||||||
[REBLOG_ID]: REBLOG_ID
|
}
|
||||||
})
|
let { migration } = migrationsToDo.shift()
|
||||||
createObjectStore(STATUS_TIMELINES_STORE, null, {
|
migration(db, tx, doNextMigration)
|
||||||
'statusId': ''
|
|
||||||
})
|
|
||||||
createObjectStore(NOTIFICATIONS_STORE, { keyPath: 'id' }, {
|
|
||||||
[TIMESTAMP]: TIMESTAMP,
|
|
||||||
[STATUS_ID]: STATUS_ID
|
|
||||||
})
|
|
||||||
createObjectStore(NOTIFICATION_TIMELINES_STORE, null, {
|
|
||||||
'notificationId': ''
|
|
||||||
})
|
|
||||||
createObjectStore(ACCOUNTS_STORE, { keyPath: 'id' }, {
|
|
||||||
[TIMESTAMP]: TIMESTAMP
|
|
||||||
})
|
|
||||||
createObjectStore(RELATIONSHIPS_STORE, { keyPath: 'id' }, {
|
|
||||||
[TIMESTAMP]: TIMESTAMP
|
|
||||||
})
|
|
||||||
createObjectStore(THREADS_STORE, null, {
|
|
||||||
'statusId': ''
|
|
||||||
})
|
|
||||||
createObjectStore(PINNED_STATUSES_STORE, null, {
|
|
||||||
'statusId': ''
|
|
||||||
})
|
|
||||||
createObjectStore(META_STORE)
|
|
||||||
}
|
|
||||||
if (e.oldVersion < DB_VERSION_SEARCH_ACCOUNTS) {
|
|
||||||
tx.objectStore(ACCOUNTS_STORE)
|
|
||||||
.createIndex(USERNAME_LOWERCASE, USERNAME_LOWERCASE)
|
|
||||||
}
|
}
|
||||||
|
doNextMigration()
|
||||||
}
|
}
|
||||||
req.onsuccess = () => resolve(req.result)
|
req.onsuccess = () => resolve(req.result)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { toReversePaddedBigInt, zeroPad } from '../_utils/sorting'
|
import { toReversePaddedBigInt, zeroPad } from '../_utils/statusIdSorting'
|
||||||
|
|
||||||
//
|
//
|
||||||
// timelines
|
// timelines
|
||||||
|
|
|
@ -0,0 +1,105 @@
|
||||||
|
import {
|
||||||
|
ACCOUNTS_STORE, DB_VERSION_SNOWFLAKE_IDS, DB_VERSION_INITIAL,
|
||||||
|
DB_VERSION_SEARCH_ACCOUNTS, META_STORE,
|
||||||
|
NOTIFICATION_TIMELINES_STORE,
|
||||||
|
NOTIFICATIONS_STORE, PINNED_STATUSES_STORE,
|
||||||
|
REBLOG_ID, RELATIONSHIPS_STORE,
|
||||||
|
STATUS_ID,
|
||||||
|
STATUS_TIMELINES_STORE,
|
||||||
|
STATUSES_STORE, THREADS_STORE,
|
||||||
|
TIMESTAMP, USERNAME_LOWERCASE
|
||||||
|
} from './constants'
|
||||||
|
import { toReversePaddedBigInt } from '../_utils/statusIdSorting'
|
||||||
|
|
||||||
|
function initialMigration (db, tx, done) {
|
||||||
|
function createObjectStore (name, init, indexes) {
|
||||||
|
let store = init
|
||||||
|
? db.createObjectStore(name, init)
|
||||||
|
: db.createObjectStore(name)
|
||||||
|
if (indexes) {
|
||||||
|
Object.keys(indexes).forEach(indexKey => {
|
||||||
|
store.createIndex(indexKey, indexes[indexKey])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
createObjectStore(STATUSES_STORE, { keyPath: 'id' }, {
|
||||||
|
[TIMESTAMP]: TIMESTAMP,
|
||||||
|
[REBLOG_ID]: REBLOG_ID
|
||||||
|
})
|
||||||
|
createObjectStore(STATUS_TIMELINES_STORE, null, {
|
||||||
|
'statusId': ''
|
||||||
|
})
|
||||||
|
createObjectStore(NOTIFICATIONS_STORE, { keyPath: 'id' }, {
|
||||||
|
[TIMESTAMP]: TIMESTAMP,
|
||||||
|
[STATUS_ID]: STATUS_ID
|
||||||
|
})
|
||||||
|
createObjectStore(NOTIFICATION_TIMELINES_STORE, null, {
|
||||||
|
'notificationId': ''
|
||||||
|
})
|
||||||
|
createObjectStore(ACCOUNTS_STORE, { keyPath: 'id' }, {
|
||||||
|
[TIMESTAMP]: TIMESTAMP
|
||||||
|
})
|
||||||
|
createObjectStore(RELATIONSHIPS_STORE, { keyPath: 'id' }, {
|
||||||
|
[TIMESTAMP]: TIMESTAMP
|
||||||
|
})
|
||||||
|
createObjectStore(THREADS_STORE, null, {
|
||||||
|
'statusId': ''
|
||||||
|
})
|
||||||
|
createObjectStore(PINNED_STATUSES_STORE, null, {
|
||||||
|
'statusId': ''
|
||||||
|
})
|
||||||
|
createObjectStore(META_STORE)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
|
||||||
|
function addSearchAccountsMigration (db, tx, done) {
|
||||||
|
tx.objectStore(ACCOUNTS_STORE)
|
||||||
|
.createIndex(USERNAME_LOWERCASE, USERNAME_LOWERCASE)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
|
||||||
|
function snowflakeIdsMigration (db, tx, done) {
|
||||||
|
let stores = [STATUS_TIMELINES_STORE, NOTIFICATION_TIMELINES_STORE]
|
||||||
|
let storeDoneCount = 0
|
||||||
|
|
||||||
|
// Here we have to convert the old "reversePaddedBigInt" format to the new
|
||||||
|
// one which is compatible with Pleroma-style snowflake IDs.
|
||||||
|
stores.forEach(store => {
|
||||||
|
let objectStore = tx.objectStore(store)
|
||||||
|
let cursor = objectStore.openCursor()
|
||||||
|
cursor.onsuccess = e => {
|
||||||
|
let { result } = e.target
|
||||||
|
if (result) {
|
||||||
|
let { key, value } = result
|
||||||
|
// key is timeline name plus delimiter plus reverse padded big int
|
||||||
|
let newKey = key.split('\u0000')[0] + '\u0000' + toReversePaddedBigInt(value)
|
||||||
|
|
||||||
|
objectStore.delete(key).onsuccess = () => {
|
||||||
|
objectStore.add(value, newKey).onsuccess = () => {
|
||||||
|
result.continue()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (++storeDoneCount === stores.length) {
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const migrations = [
|
||||||
|
{
|
||||||
|
version: DB_VERSION_INITIAL,
|
||||||
|
migration: initialMigration
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: DB_VERSION_SEARCH_ACCOUNTS,
|
||||||
|
migration: addSearchAccountsMigration
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: DB_VERSION_SNOWFLAKE_IDS,
|
||||||
|
migration: snowflakeIdsMigration
|
||||||
|
}
|
||||||
|
]
|
|
@ -21,3 +21,10 @@ export function pickBy (obj, predicate) {
|
||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function padStart (string, length, chars) {
|
||||||
|
while (string.length < length) {
|
||||||
|
string = chars + string
|
||||||
|
}
|
||||||
|
return string
|
||||||
|
}
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
import padStart from 'lodash-es/padStart'
|
|
||||||
|
|
||||||
export function zeroPad (str, toSize) {
|
|
||||||
return padStart(str, toSize, '0')
|
|
||||||
}
|
|
||||||
|
|
||||||
export function toPaddedBigInt (id) {
|
|
||||||
return zeroPad(id, 30)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function toReversePaddedBigInt (id) {
|
|
||||||
let bigInt = toPaddedBigInt(id)
|
|
||||||
let res = ''
|
|
||||||
for (let i = 0; i < bigInt.length; i++) {
|
|
||||||
res += (9 - parseInt(bigInt.charAt(i), 10)).toString(10)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
export function compareTimelineItemSummaries (left, right) {
|
|
||||||
let leftPadded = toPaddedBigInt(left.id)
|
|
||||||
let rightPadded = toPaddedBigInt(right.id)
|
|
||||||
return leftPadded < rightPadded ? -1 : leftPadded === rightPadded ? 0 : 1
|
|
||||||
}
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
// Pleroma uses base62 IDs, Mastodon uses 0-9 big ints encoded as strings.
|
||||||
|
// Using base62 for both works, since the first 10 characters of base62
|
||||||
|
// are 0-9.
|
||||||
|
|
||||||
|
import { padStart } from './lodash-lite'
|
||||||
|
|
||||||
|
// Unfortunately base62 ordering is not the same as JavaScript's default ASCII ordering,
|
||||||
|
// used both for JS string comparisons as well as IndexedDB ordering.
|
||||||
|
const BASE62_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||||
|
const ASCII_ORDERING = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
|
||||||
|
const MAX_ID_LENGTH = 30 // assume that Mastodon/Pleroma IDs won't get any bigger than this
|
||||||
|
|
||||||
|
const BASE62_LOOKUP = new Map(BASE62_ALPHABET.split('').map((char, i) => ([char, i])))
|
||||||
|
|
||||||
|
export function zeroPad (str, toSize) {
|
||||||
|
return padStart(str, toSize, '0')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toPaddedBigInt (id) {
|
||||||
|
let asciiOrdered = ''
|
||||||
|
for (let i = 0; i < id.length; i++) {
|
||||||
|
let char = id.charAt(i)
|
||||||
|
let idx = BASE62_LOOKUP.get(char)
|
||||||
|
let asciiChar = ASCII_ORDERING[idx]
|
||||||
|
asciiOrdered += asciiChar
|
||||||
|
}
|
||||||
|
return zeroPad(asciiOrdered, MAX_ID_LENGTH)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toReversePaddedBigInt (id) {
|
||||||
|
let padded = zeroPad(id, MAX_ID_LENGTH)
|
||||||
|
let reversed = ''
|
||||||
|
for (let i = 0; i < padded.length; i++) {
|
||||||
|
let char = padded.charAt(i)
|
||||||
|
let idx = BASE62_LOOKUP.get(char)
|
||||||
|
let reverseIdx = BASE62_ALPHABET.length - 1 - idx
|
||||||
|
reversed += ASCII_ORDERING[reverseIdx]
|
||||||
|
}
|
||||||
|
return reversed
|
||||||
|
}
|
||||||
|
|
||||||
|
export function compareTimelineItemSummaries (left, right) {
|
||||||
|
let leftPadded = toPaddedBigInt(left.id)
|
||||||
|
let rightPadded = toPaddedBigInt(right.id)
|
||||||
|
return leftPadded < rightPadded ? -1 : leftPadded === rightPadded ? 0 : 1
|
||||||
|
}
|
|
@ -0,0 +1,129 @@
|
||||||
|
/* global describe, it */
|
||||||
|
|
||||||
|
import { toPaddedBigInt, toReversePaddedBigInt } from '../../src/routes/_utils/statusIdSorting'
|
||||||
|
import assert from 'assert'
|
||||||
|
import times from 'lodash-es/times'
|
||||||
|
|
||||||
|
function lt (a, b) {
|
||||||
|
assert(a < b, `Failed: ${a} < ${b}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
function gt (a, b) {
|
||||||
|
assert(a > b, `Failed: ${a} > ${b}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('test-id-sorting.js', () => {
|
||||||
|
it('can sort mastodon IDs correctly', () => {
|
||||||
|
let id1 = '1'
|
||||||
|
let id2 = '2'
|
||||||
|
let id3 = '101687554574502736'
|
||||||
|
let id4 = '101688993168288745'
|
||||||
|
let id5 = '101689076270570796'
|
||||||
|
|
||||||
|
lt(toPaddedBigInt(id1), toPaddedBigInt(id2))
|
||||||
|
lt(toPaddedBigInt(id2), toPaddedBigInt(id3))
|
||||||
|
lt(toPaddedBigInt(id3), toPaddedBigInt(id4))
|
||||||
|
lt(toPaddedBigInt(id4), toPaddedBigInt(id5))
|
||||||
|
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id1), toPaddedBigInt(id1))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id2), toPaddedBigInt(id2))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id3), toPaddedBigInt(id3))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id4), toPaddedBigInt(id4))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id5), toPaddedBigInt(id5))
|
||||||
|
|
||||||
|
gt(toReversePaddedBigInt(id1), toReversePaddedBigInt(id2))
|
||||||
|
gt(toReversePaddedBigInt(id2), toReversePaddedBigInt(id3))
|
||||||
|
gt(toReversePaddedBigInt(id3), toReversePaddedBigInt(id4))
|
||||||
|
gt(toReversePaddedBigInt(id4), toReversePaddedBigInt(id5))
|
||||||
|
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id1), toReversePaddedBigInt(id1))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id2), toReversePaddedBigInt(id2))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id3), toReversePaddedBigInt(id3))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id4), toReversePaddedBigInt(id4))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id5), toReversePaddedBigInt(id5))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('can sort mastodon IDs correctly - more examples', () => {
|
||||||
|
let ids = times(1000, i => i.toString())
|
||||||
|
|
||||||
|
for (let i = 1; i < ids.length; i++) {
|
||||||
|
let prev = ids[i - 1]
|
||||||
|
let next = ids[i]
|
||||||
|
lt(toPaddedBigInt(prev), toPaddedBigInt(next))
|
||||||
|
gt(toReversePaddedBigInt(prev), toReversePaddedBigInt(next))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('can sort base62 IDs correctly', () => {
|
||||||
|
let id1 = '0'
|
||||||
|
let id2 = 'a'
|
||||||
|
let id3 = 't'
|
||||||
|
let id4 = 'A'
|
||||||
|
let id5 = 'Z'
|
||||||
|
|
||||||
|
lt(toPaddedBigInt(id1), toPaddedBigInt(id2))
|
||||||
|
lt(toPaddedBigInt(id2), toPaddedBigInt(id3))
|
||||||
|
lt(toPaddedBigInt(id3), toPaddedBigInt(id4))
|
||||||
|
lt(toPaddedBigInt(id4), toPaddedBigInt(id5))
|
||||||
|
|
||||||
|
lt(toPaddedBigInt(id1), toPaddedBigInt(id5))
|
||||||
|
lt(toPaddedBigInt(id2), toPaddedBigInt(id5))
|
||||||
|
lt(toPaddedBigInt(id3), toPaddedBigInt(id5))
|
||||||
|
lt(toPaddedBigInt(id2), toPaddedBigInt(id4))
|
||||||
|
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id1), toPaddedBigInt(id1))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id2), toPaddedBigInt(id2))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id3), toPaddedBigInt(id3))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id4), toPaddedBigInt(id4))
|
||||||
|
assert.deepStrictEqual(toPaddedBigInt(id5), toPaddedBigInt(id5))
|
||||||
|
|
||||||
|
gt(toReversePaddedBigInt(id1), toReversePaddedBigInt(id2))
|
||||||
|
gt(toReversePaddedBigInt(id2), toReversePaddedBigInt(id3))
|
||||||
|
gt(toReversePaddedBigInt(id3), toReversePaddedBigInt(id4))
|
||||||
|
gt(toReversePaddedBigInt(id4), toReversePaddedBigInt(id5))
|
||||||
|
|
||||||
|
gt(toReversePaddedBigInt(id1), toReversePaddedBigInt(id5))
|
||||||
|
gt(toReversePaddedBigInt(id2), toReversePaddedBigInt(id5))
|
||||||
|
gt(toReversePaddedBigInt(id3), toReversePaddedBigInt(id5))
|
||||||
|
gt(toReversePaddedBigInt(id2), toReversePaddedBigInt(id4))
|
||||||
|
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id1), toReversePaddedBigInt(id1))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id2), toReversePaddedBigInt(id2))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id3), toReversePaddedBigInt(id3))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id4), toReversePaddedBigInt(id4))
|
||||||
|
assert.deepStrictEqual(toReversePaddedBigInt(id5), toReversePaddedBigInt(id5))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('can sort pleroma ids - more examples', () => {
|
||||||
|
// these are already in base62 sorted order
|
||||||
|
let ids = [
|
||||||
|
'9gP7cpqqJWyp93GxRw',
|
||||||
|
'9gP7p4Ng7RdTgOSsro',
|
||||||
|
'9gP8eQQFvdZgoQ9tw0',
|
||||||
|
'9gP8XTjVDpsT3Iqgb2',
|
||||||
|
'9gP99enEY6IAMJnaXA',
|
||||||
|
'9gP9WIcp8QCIGbj6ES',
|
||||||
|
'9gPA897muEuxo0FxCa',
|
||||||
|
'9gPAaSqTB8Rv4nev0C',
|
||||||
|
'9gPAhfTCdeRCG5D9IO',
|
||||||
|
'9gPAG1uvaSBblj05Y0',
|
||||||
|
'9gPBatpwvN76kABf7Y',
|
||||||
|
'9gPBA9SYjPFVNUUZTU',
|
||||||
|
'9gPBOzteZJZO3wFCQy',
|
||||||
|
'9gPC7jAtaS1vEQdcnY',
|
||||||
|
'9gPC9Ps4KQMLwRdZWy',
|
||||||
|
'9gPCF0G8SvCKFHYg52',
|
||||||
|
'9gPCJoNY42C4qZJo0W',
|
||||||
|
'9gPEBGmBJX3YDntYBM',
|
||||||
|
'9gPELIqcT0BhXgksSG',
|
||||||
|
'9gPISh6j4FMCcu4Js0'
|
||||||
|
]
|
||||||
|
|
||||||
|
for (let i = 1; i < ids.length; i++) {
|
||||||
|
let prev = ids[i - 1]
|
||||||
|
let next = ids[i]
|
||||||
|
lt(toPaddedBigInt(prev), toPaddedBigInt(next))
|
||||||
|
gt(toReversePaddedBigInt(prev), toReversePaddedBigInt(next))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in New Issue