Skip to content

Commit

Permalink
Fix: ProjectToken.RevenueSplitLeft mapping (#324)
Browse files Browse the repository at this point in the history
* fix: unstake  tokens atfer current revenue share finalized and new revenue share issued

* [offchainState] fix: transform raw json objects to jsonb properties

* fix: ProjectToken.RevenueSplitLeft mapping

* [CRT] dont decrease revenue share participant numbers

* fix: getCumulativeHistoricalShareAllocation custom resolver

* bump package version & update CHANGELOG
  • Loading branch information
zeeshanakram3 authored Apr 2, 2024
1 parent 434bccf commit ba75a4b
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 43 deletions.
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
# 4.0.0
# 4.0.2

## Bug Fixes:
- Fixed: unstake tokens atfer revenue share has been finalized.
- Fixed: transform raw json objects to `jsonb` properties while importing the offchain state.
- Fixed: `bigint` to `number` conversion in `getCumulativeHistoricalShareAllocation` custom resolver.
- Fixed duplicate notifications being received by users for featured NFTs.

# 4.0.1

## Misc

Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "orion",
"version": "4.0.1",
"version": "4.0.2",
"engines": {
"node": ">=16"
},
Expand Down
24 changes: 9 additions & 15 deletions src/mappings/token/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -757,21 +757,15 @@ export async function processRevenueSplitLeftEvent({
}: EventHandlerContext<'ProjectToken.RevenueSplitLeft'>) {
const account = await getTokenAccountByMemberByTokenOrFail(overlay, memberId, tokenId)
account.stakedAmount -= unstakedAmount
const token = await overlay.getRepository(CreatorToken).getByIdOrFail(tokenId.toString())
if (token.currentRevenueShareId) {
// TODO: refactor this as should be true all the times, might be a good idea to panic
const revenueShare = await overlay
.getRepository(RevenueShare)
.getByIdOrFail(token.currentRevenueShareId)
revenueShare.participantsNum -= 1
const qRevenueShareParticipation = (
await overlay
.getRepository(RevenueShareParticipation)
.getManyByRelation('accountId', account.id)
).find((participation) => participation.revenueShareId === revenueShare.id)
if (qRevenueShareParticipation) {
qRevenueShareParticipation.recovered = true
}

const revenueShareParticipation = (
await overlay
.getRepository(RevenueShareParticipation)
.getManyByRelation('accountId', account.id)
).find((participation) => participation.recovered === false)

if (revenueShareParticipation) {
revenueShareParticipation.recovered = true
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/server-extension/resolvers/CreatorToken/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ export class TokenResolver {
cumulativeAllocationAmount += share.allocation
}
return {
cumulativeHistoricalAllocation: Number(cumulativeAllocationAmount),
cumulativeHistoricalAllocation: cumulativeAllocationAmount.toString(),
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/server-extension/resolvers/CreatorToken/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ export class GetCumulativeHistoricalShareAllocationArgs {

@ObjectType()
export class GetCumulativeHistoricalShareAllocationResult {
@Field(() => Int, { nullable: false })
cumulativeHistoricalAllocation!: number
@Field(() => String, { nullable: false })
cumulativeHistoricalAllocation!: string
}

@ArgsType()
Expand Down
76 changes: 55 additions & 21 deletions src/utils/offchainState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ import { createLogger } from '@subsquid/logger'
import assert from 'assert'
import { createParseStream, createStringifyStream } from 'big-json'
import fs from 'fs'
import { snakeCase } from 'lodash'
import path from 'path'
import { EntityManager } from 'typeorm'
import { EntityManager, ValueTransformer } from 'typeorm'
import * as model from '../model'
import {
AccountNotificationPreferences,
Expand Down Expand Up @@ -35,7 +36,7 @@ type ClassConstructors<T> = {
type ExportedStateMap = {
[K in keyof ClassConstructors<typeof model>]?:
| true
| (keyof SnakeCaseKeys<InstanceType<ClassConstructors<typeof model>[K]>>)[]
| (keyof InstanceType<ClassConstructors<typeof model>[K]>)[]
}

const exportedStateMap: ExportedStateMap = {
Expand All @@ -59,17 +60,17 @@ const exportedStateMap: ExportedStateMap = {
EmailDeliveryAttempt: true,
Token: true,
NextEntityId: true,
Channel: ['is_excluded', 'video_views_num', 'follows_num', 'ypp_status', 'channel_weight'],
Video: ['is_excluded', 'views_num'],
Comment: ['is_excluded'],
OwnedNft: ['is_featured'],
VideoCategory: ['is_supported'],
Channel: ['isExcluded', 'videoViewsNum', 'followsNum', 'yppStatus', 'channelWeight'],
Video: ['isExcluded', 'viewsNum'],
Comment: ['isExcluded'],
OwnedNft: ['isFeatured'],
VideoCategory: ['isSupported'],
}

type ExportedData = {
[K in keyof typeof exportedStateMap]?: {
type: 'insert' | 'update'
values: Record<string, unknown>[]
values: InstanceType<ClassConstructors<typeof model>[K]>[]
}
}

Expand All @@ -88,20 +89,20 @@ function migrateExportDataToV300(data: ExportedData): ExportedData {
id: `${V2_MIGRATION_USER_PREFIX}${uniqueId()}`,
isRoot: false,
}
data.User = { type: 'insert', values: [migrationUser] }
data.User = { type: 'insert', values: [migrationUser as model.User] }
const replaceIpWithUserId = (v: Record<string, unknown>) => {
delete v.ip
v.userId = migrationUser.id
}
data.VideoViewEvent?.values.forEach(replaceIpWithUserId)
data.Report?.values.forEach(replaceIpWithUserId)
data.NftFeaturingRequest?.values.forEach(replaceIpWithUserId)
data.VideoViewEvent?.values.forEach(replaceIpWithUserId as any)
data.Report?.values.forEach(replaceIpWithUserId as any)
data.NftFeaturingRequest?.values.forEach(replaceIpWithUserId as any)

// We don't migrate channel follows from v2, because in v3
// an account is required in order to follow a channel
delete data.ChannelFollow
data.Channel?.values.forEach((v) => {
v.follows_num = 0
v.followsNum = 0
})

return data
Expand All @@ -111,8 +112,6 @@ function migrateExportDataToV320(data: ExportedData): ExportedData {
data.Account?.values.forEach((account) => {
// account will find himself with all notification pref. enabled by default
account.notificationPreferences = defaultNotificationPreferences()
// referrer channel id is set to null
account.referrerChannelId = null
})

// all channels will start as unverified because they are re-synched from mappings
Expand Down Expand Up @@ -197,7 +196,10 @@ export class OffchainState {
? await em
.getRepository(entityName)
.createQueryBuilder()
.select(['id', ...(fields as unknown as string)])
.select([
'id',
...fields.map((field) => `${snakeCase(String(field))} AS "${String(field)}"`),
])
.getRawMany()
: await em.getRepository(entityName).find({})
if (!values.length) {
Expand Down Expand Up @@ -234,8 +236,36 @@ export class OffchainState {
return parseInt(major) * (1000 * 1000) + parseInt(minor) * 1000 + parseInt(patch)
}

private transformJsonbProperties(data: ExportedData, em: EntityManager): ExportedData {
// construct proper JSONB objects from raw data
return Object.fromEntries(
Object.entries(data).map(([entityName, { type, values }]) => {
const metadata = em.connection.getMetadata(entityName)
const jsonbColumns = metadata.columns.filter((c) => c.type === 'jsonb')

values = (values as any[]).map((value) => {
jsonbColumns.forEach((column) => {
const propertyName = column.propertyName
const transformer = column.transformer as ValueTransformer | undefined
if (value[propertyName] && transformer) {
const rawValue = value[propertyName]
const transformedValue = transformer.from(rawValue)
value[propertyName] = transformedValue
}
})
return value
})

return [entityName, { type, values }]
})
)
}

public prepareExportData(exportState: ExportedState, em: EntityManager): ExportedData {
let { data } = exportState

data = this.transformJsonbProperties(data, em)

Object.entries(this.migrations)
.sort(([a], [b]) => this.versionToNumber(a) - this.versionToNumber(b)) // sort in increasing order
.forEach(([version, fn]) => {
Expand All @@ -250,7 +280,7 @@ export class OffchainState {
private async importNextEntityIdCounters(
overlay: EntityManagerOverlay,
entityName: string,
data: Record<string, unknown>[]
data: model.NextEntityId[]
) {
const em = overlay.getEm()
assert(entityName === 'NextEntityId')
Expand Down Expand Up @@ -297,7 +327,7 @@ export class OffchainState {
const fieldTypes = Object.fromEntries(
fieldNames.map((fieldName) => {
const metaType = meta.columns.find(
(c) => c.databaseNameWithoutPrefixes === fieldName
(c) => c.databaseNameWithoutPrefixes === snakeCase(fieldName)
)?.type
return [fieldName, metaType === String ? 'text' : metaType]
})
Expand All @@ -314,7 +344,7 @@ export class OffchainState {
`UPDATE "${meta.tableName}"
SET ${fieldNames
.filter((f) => f !== 'id')
.map((f) => `"${f}" = "data"."${f}"`)
.map((f) => `"${snakeCase(f)}" = "data"."${f}"`)
.join(', ')}
FROM (
SELECT
Expand All @@ -326,7 +356,7 @@ export class OffchainState {
.join(', ')}
) AS "data"
WHERE "${meta.tableName}"."id" = "data"."id"`,
fieldNames.map((fieldName) => batch.map((v) => v[fieldName]))
fieldNames.map((fieldName) => batch.map((v) => v[fieldName as keyof typeof v]))
)
}
} else {
Expand All @@ -344,7 +374,11 @@ export class OffchainState {

// UPSERT operation specifically for NextEntityId
if (entityName === 'NextEntityId') {
await this.importNextEntityIdCounters(overlay, entityName, batch)
await this.importNextEntityIdCounters(
overlay,
entityName,
batch as model.NextEntityId[]
)
} else {
await em.getRepository(entityName).insert(batch)
}
Expand Down

0 comments on commit ba75a4b

Please sign in to comment.