diff --git a/packages/modules/packages/archivist/packages/indexeddb/package.json b/packages/modules/packages/archivist/packages/indexeddb/package.json index bb6b129dfa..d2584e0961 100644 --- a/packages/modules/packages/archivist/packages/indexeddb/package.json +++ b/packages/modules/packages/archivist/packages/indexeddb/package.json @@ -48,7 +48,8 @@ "@xyo-network/id-payload-plugin": "workspace:^", "@xyo-network/payload-wrapper": "workspace:^", "fake-indexeddb": "^6.0.0", - "typescript": "^5.6.3" + "typescript": "^5.6.3", + "vitest": "^2.1.3" }, "publishConfig": { "access": "public" diff --git a/packages/modules/packages/archivist/packages/indexeddb/src/spec/Archivist.spec.ts b/packages/modules/packages/archivist/packages/indexeddb/src/spec/Archivist.spec.ts index 4d1ebd7f87..d3aec31cf6 100644 --- a/packages/modules/packages/archivist/packages/indexeddb/src/spec/Archivist.spec.ts +++ b/packages/modules/packages/archivist/packages/indexeddb/src/spec/Archivist.spec.ts @@ -26,6 +26,9 @@ import { IDBVersionChangeEvent, indexedDB, } from 'fake-indexeddb' +import { + beforeAll, describe, expect, it, +} from 'vitest' import { IndexedDbArchivist } from '../Archivist.ts' import { IndexedDbArchivistConfigSchema } from '../Config.ts' @@ -183,8 +186,8 @@ describe('IndexedDbArchivist', () => { expect(getResult?.length).toBe(sources.length) const dataHashes = (await PayloadBuilder.dataHashes(getResult)) ?? [] const deleteResult = await archivistModule.delete?.(dataHashes) - expect(deleteResult).toBeArrayOfSize(dataHashes.length) - expect(await archivistModule.all?.()).toBeEmpty() + expect(deleteResult.length).toBe(dataHashes.length) + expect((await archivistModule.all?.()).length).toBe(0) }) }) describe('get', () => { @@ -225,7 +228,7 @@ describe('IndexedDbArchivist', () => { const hashThatDoesNotExist = '0000000000000000000000000000000000000000000000000000000000000000' const getResult = await archivistModule.get([hashThatDoesNotExist]) expect(getResult).toBeDefined() - expect(getResult).toBeArrayOfSize(0) + expect(getResult.length).toBe(0) }) describe('by hash', () => { let payload1: PayloadWithMeta @@ -263,18 +266,18 @@ describe('IndexedDbArchivist', () => { it('returns value using hash', async () => { const result = await archivistModule.get([dataHash1]) expect(result).toBeDefined() - expect(result).toBeArrayOfSize(1) + expect(result.length).toBe(1) }) it('deduplicates multiple hashes', async () => { const result = await archivistModule.get([dataHash1, dataHash2]) expect(result).toBeDefined() - expect(result).toBeArrayOfSize(1) + expect(result.length).toBe(1) }) it('returns the first occurrence of the hash', async () => { // Same data hash contained by multiple root hashes const result = await archivistModule.get([dataHash2]) expect(result).toBeDefined() - expect(result).toBeArrayOfSize(1) + expect(result.length).toBe(1) // Returns the first occurrence of the data hash expect(result[0]).toEqual(payload1) }) @@ -283,12 +286,12 @@ describe('IndexedDbArchivist', () => { it('returns value using hash', async () => { const result = await archivistModule.get([rootHash1]) expect(result).toBeDefined() - expect(result).toBeArrayOfSize(1) + expect(result.length).toBe(1) }) it('deduplicates multiple hashes', async () => { const result = await archivistModule.get([rootHash1, rootHash1]) expect(result).toBeDefined() - expect(result).toBeArrayOfSize(1) + expect(result.length).toBe(1) }) }) }) @@ -366,7 +369,7 @@ describe('IndexedDbArchivist', () => { // Ensure the DB has only one instance of the payload written to it const allResult = await archivistModule.all?.() expect(allResult).toBeDefined() - expect(allResult).toBeArrayOfSize(1) + expect(allResult.length).toBe(1) }) }) }) @@ -407,32 +410,32 @@ describe('IndexedDbArchivist', () => { // console.log(toJsonString([bw, payloads, errors], 10)) const batch1 = await archivist.next?.({ limit: 2 }) - expect(batch1).toBeArrayOfSize(2) + expect(batch1.length).toBe(2) expect(batch1?.[0].$hash).toEqual(payloads1[0].$hash) const batch2 = await archivist.next?.({ limit: 2, offset: await PayloadBuilder.hash(batch1?.[1]) }) - expect(batch2).toBeArrayOfSize(2) + expect(batch2.length).toBe(2) expect(batch2?.[0].$hash).toEqual(payloads2[0].$hash) const batch3 = await archivist.next?.({ limit: 20, offset: await PayloadBuilder.hash(batch1?.[1]) }) - expect(batch3).toBeArrayOfSize(2) + expect(batch3.length).toBe(2) expect(batch3?.[0].$hash).toEqual(payloads2[0].$hash) // desc const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' }) - expect(batch1Desc).toBeArrayOfSize(2) + expect(batch1Desc.length).toBe(2) expect(batch1Desc?.[0].$hash).toEqual(payloads2[1].$hash) const batch2Desc = await archivist.next?.({ limit: 2, offset: await PayloadBuilder.hash(batch1Desc?.[1]), order: 'desc', }) - expect(batch2Desc).toBeArrayOfSize(2) + expect(batch2Desc.length).toBe(2) expect(batch2Desc?.[1].$hash).toEqual(payloads1[0].$hash) const batch3Desc = await archivist.next?.({ limit: 20, offset: await PayloadBuilder.hash(batch1Desc?.[1]), order: 'desc', }) - expect(batch3Desc).toBeArrayOfSize(2) + expect(batch3Desc.length).toBe(2) expect(batch3Desc?.[1].$hash).toEqual(payloads1[0].$hash) }) }) diff --git a/packages/modules/packages/archivist/packages/storage/package.json b/packages/modules/packages/archivist/packages/storage/package.json index 48b4dfe544..257bc6cf74 100644 --- a/packages/modules/packages/archivist/packages/storage/package.json +++ b/packages/modules/packages/archivist/packages/storage/package.json @@ -43,6 +43,7 @@ }, "devDependencies": { "@xylabs/delay": "^4.3.2", + "@xylabs/object": "^4.3.2", "@xylabs/ts-scripts-yarn3": "^4.2.3", "@xylabs/tsconfig": "^4.2.3", "@xyo-network/account": "workspace:^", diff --git a/packages/modules/packages/archivist/packages/storage/src/StorageArchivist.ts b/packages/modules/packages/archivist/packages/storage/src/StorageArchivist.ts index 94391bb544..13ed4781e3 100644 --- a/packages/modules/packages/archivist/packages/storage/src/StorageArchivist.ts +++ b/packages/modules/packages/archivist/packages/storage/src/StorageArchivist.ts @@ -9,6 +9,7 @@ import type { ArchivistInsertQuery, ArchivistInstance, ArchivistModuleEventData, + ArchivistNextOptions, ArchivistParams, } from '@xyo-network/archivist-model' import { @@ -29,6 +30,8 @@ import store from 'store2' const storeTypes = store as unknown as StoreType +type WithStorageMeta = WithMeta & { _timestamp: number } + export type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config' export const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config' @@ -92,24 +95,6 @@ export class StorageArchivist< return this._storage } - /* override async loadAccount(account?: AccountInstance, persistAccount?: boolean, privateStorage?: StoreBase, _logger?: Logger) { - if (!this._account) { - if (persistAccount) { - const privateKey = privateStorage?.get('privateKey') - if (privateKey) { - try { - this._account = await Account.create({ privateKey }) - return this._account - } catch (ex) { - console.error(`Error reading Account from storage [${ex}] - Recreating Account`) - privateStorage?.remove('privateKey') - } - } - } - } - return await super.loadAccount() - } */ - protected override allHandler(): PromisableArray { const found = new Set() this.logger?.log(`this.storage.length: ${this.storage.length}`) @@ -123,6 +108,8 @@ export class StorageArchivist< return true } }) + .sort((a, b) => a._timestamp - b._timestamp) + .map(payload => this.removeStorageMeta(payload)) } protected override clearHandler(): void | Promise { @@ -158,6 +145,35 @@ export class StorageArchivist< ).filter(exists) } + protected getFromOffset( + order: 'asc' | 'desc' = 'asc', + limit: number = 10, + offset?: Hash, + ): WithStorageMeta[] { + const offsetHash = offset ? (this.storage.get(offset) as PayloadWithMeta | undefined)?.$hash : undefined + const found = new Set() + const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll()) + .map(([, value]) => value) + .filter((payload) => { + if (found.has(payload.$hash)) { + return false + } else { + found.add(payload.$hash) + return true + } + }) + .sort((a, b) => { + return order === 'asc' ? a._timestamp - b._timestamp : b._timestamp - a._timestamp + }) + if (offsetHash) { + const index = payloads.findIndex(payload => payload.$hash === offsetHash) + if (index !== -1) { + return payloads.slice(index + 1, index + 1 + limit) + } + } + return payloads.slice(0, limit) + } + protected override getHandler(hashes: string[]): Promisable { const found = new Set() return ( @@ -172,22 +188,42 @@ export class StorageArchivist< found.add(payload.$hash) return true } - }) + }).map(payload => this.removeStorageMeta(payload)) } protected override async insertHandler(payloads: Payload[]): Promise { + let timestamp = Date.now() const pairs = await PayloadBuilder.hashPairs(payloads) return pairs.map(([payload, hash]) => { - const value = JSON.stringify(payload) + const storagePayload = this.addStorageMeta(payload, timestamp++) + const value = JSON.stringify(storagePayload) + console.log('insert.storagePayloads:', storagePayload) assertEx(value.length < this.maxEntrySize, () => `Payload too large [${hash}, ${value.length}]`) - this.storage.set(hash, payload) - this.storage.set(payload.$hash, payload) + this.storage.set(hash, storagePayload) + this.storage.set(payload.$hash, storagePayload) return payload }) } + protected override nextHandler(options?: ArchivistNextOptions): Promisable { + const { + limit, offset, order, + } = options ?? {} + return this.getFromOffset(order, limit ?? 10, offset) + } + protected override async startHandler() { await super.startHandler() return true } + + private addStorageMeta(payload: WithMeta, _timestamp: number): WithStorageMeta { + return { ...payload, _timestamp } + } + + private removeStorageMeta(payload: WithStorageMeta): WithMeta { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { _timestamp, ...rest } = payload + return rest as WithMeta + } } diff --git a/packages/modules/packages/archivist/packages/storage/src/spec/StorageArchivist.next.spec.ts b/packages/modules/packages/archivist/packages/storage/src/spec/StorageArchivist.next.spec.ts new file mode 100644 index 0000000000..6cc57719ee --- /dev/null +++ b/packages/modules/packages/archivist/packages/storage/src/spec/StorageArchivist.next.spec.ts @@ -0,0 +1,80 @@ +/** + * @jest-environment jsdom + */ + +import { toJsonString } from '@xylabs/object' +import { Account } from '@xyo-network/account' +import { PayloadBuilder } from '@xyo-network/payload-builder' +import { + describe, expect, + it, +} from 'vitest' + +import { StorageArchivist, StorageArchivistConfigSchema } from '../StorageArchivist.ts' + +describe('next', () => { + it('next', async () => { + const archivist = await StorageArchivist.create({ + account: 'random', + config: { + namespace: 'test-next', + schema: StorageArchivistConfigSchema, + type: 'local', + }, + }) + + const account = await Account.random() + + const payloads1 = [ + await PayloadBuilder.build({ schema: 'network.xyo.test', value: 1 }), + await PayloadBuilder.build({ schema: 'network.xyo.test', value: 2 }), + ] + + // console.log('Payloads1:', toJsonString(await PayloadBuilder.hashPairs(payloads1), 10)) + + const payloads2 = [ + await PayloadBuilder.build({ schema: 'network.xyo.test', value: 3 }), + await PayloadBuilder.build({ schema: 'network.xyo.test', value: 4 }), + ] + + // console.log('Payloads2:', toJsonString(await PayloadBuilder.hashPairs(payloads2), 10)) + + await archivist.insert(payloads1) + // console.log(toJsonString(payloads1, 10)) + const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account) + expect(bw).toBeDefined() + expect(payloads).toBeDefined() + expect(errors).toBeDefined() + + // console.log(toJsonString([bw, payloads, errors], 10)) + + const batch1 = await archivist.next?.({ limit: 2 }) + expect(batch1.length).toBe(2) + expect(batch1?.[0].$hash).toEqual(payloads1[0].$hash) + + const batch2 = await archivist.next?.({ limit: 2, offset: await PayloadBuilder.hash(batch1?.[1]) }) + expect(batch2.length).toBe(2) + expect(batch2?.[0].$hash).toEqual(payloads2[0].$hash) + + const batch3 = await archivist.next?.({ limit: 20, offset: await PayloadBuilder.hash(batch1?.[1]) }) + expect(batch3.length).toBe(2) + expect(batch3?.[0].$hash).toEqual(payloads2[0].$hash) + + // desc + const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' }) + expect(batch1Desc.length).toBe(2) + expect(batch1Desc?.[0].$hash).toEqual(payloads2[1].$hash) + + const batch2Desc = await archivist.next?.({ + limit: 2, offset: await PayloadBuilder.hash(batch1Desc?.[1]), order: 'desc', + }) + expect(batch2Desc.length).toBe(2) + expect(batch2Desc?.[1].$hash).toEqual(payloads1[0].$hash) + + const batch3Desc = await archivist.next?.({ + limit: 20, offset: await PayloadBuilder.hash(batch1Desc?.[1]), order: 'desc', + }) + expect(batch3Desc.length).toBe(2) + expect(batch3Desc?.[1].$hash).toEqual(payloads1[0].$hash) + }) +}) diff --git a/yarn.lock b/yarn.lock index f45a6e9848..399c1c5eba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6392,6 +6392,7 @@ __metadata: fake-indexeddb: "npm:^6.0.0" idb: "npm:^8.0.0" typescript: "npm:^5.6.3" + vitest: "npm:^2.1.3" languageName: unknown linkType: soft @@ -6443,6 +6444,7 @@ __metadata: "@xylabs/delay": "npm:^4.3.2" "@xylabs/exists": "npm:^4.3.2" "@xylabs/hex": "npm:^4.3.2" + "@xylabs/object": "npm:^4.3.2" "@xylabs/promise": "npm:^4.3.2" "@xylabs/ts-scripts-yarn3": "npm:^4.2.3" "@xylabs/tsconfig": "npm:^4.2.3"