diff --git a/.env.dev b/.env.dev index 9ddb284..336f4d2 100644 --- a/.env.dev +++ b/.env.dev @@ -1,4 +1,4 @@ -ARWEAVE_NODES=["https://arweave.net"] +ARWEAVE_NODES=["http://lon-2.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] DATABASE_HOST=0.0.0.0 DATABASE_PORT=5432 DATABASE_USER=arweave @@ -8,15 +8,19 @@ DATABASE_NAME=arweave ENVIRONMENT=public PORT=3000 -PARALLEL=4 +PARALLEL=1 +ANS102=1 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] - CACHING=1 CACHE_FOLDER=/gateway/cache CACHE_OFFSET=0 -MANIFEST_PREFIX=amp-gw.online \ No newline at end of file +MANIFEST=1 +MANIFEST_PREFIX=amp-gw.online + +TYPE=APP +FILTER=app.filter.json +START_HEIGHT=764180 \ No newline at end of file diff --git a/.env.docker b/.env.docker index 7404e1e..bf5dfe0 100644 --- a/.env.docker +++ b/.env.docker @@ -1,4 +1,4 @@ -ARWEAVE_NODES=["https://arweave.net"] +ARWEAVE_NODES=["http://lon-2.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] DATABASE_HOST=postgres DATABASE_PORT=5432 DATABASE_USER=arweave @@ -8,15 +8,19 @@ DATABASE_NAME=arweave ENVIRONMENT=public PORT=3000 -PARALLEL=4 +PARALLEL=1 +ANS102=1 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] - CACHING=1 CACHE_FOLDER=/gateway/cache CACHE_OFFSET=0 -MANIFEST_PREFIX=amp-gw.online \ No newline at end of file +MANIFEST=1 +MANIFEST_PREFIX=amp-gw.online + +TYPE=APP +FILTER=app.filter.json +START_HEIGHT=764180 \ No newline at end of file diff --git a/.gitignore b/.gitignore index edc7766..14b6770 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,7 @@ snapshot **/Thumbs.db # Webstorm -.idea \ No newline at end of file +.idea + +# App Filters +app.filter.json \ No newline at end of file diff --git a/QUICKSTART.md b/QUICKSTART.md new file mode 100644 index 0000000..a77e075 --- /dev/null +++ b/QUICKSTART.md @@ -0,0 +1,68 @@ +# Quickstart with App Nodes + +## Configuring your environment + +In your `.env` file make sure to have the following configured: + +```conf +TYPE=APP # This makes sure it's configured to be an app node +``` + +```conf +FILTER=app.filter.json # The path to the app filter json file +``` + +```conf +START_HEIGHT=764189 # The block (-1) from where your contract was deployed at +``` + +Make sure `START_HEIGHT` is atleast one block before where the contract was deployed. + +## Creating a filter file + +Your `app.filter.json` file needs to filter for both the smart contract deployment and the smart contract source. You can filter for specific ids by using the `id` key. + +```json +{ + "filter": [ + { + "id": "boJ3Fa1OU9W1NY5g1dkmgqYk5Lg_mndcdC9q64CmDPU" + }, + { + "id": "aZrQ9fNp1fdKqBsdZKVHFF4NZRx-icDGfAncRw4zGpY" + } + ] +} +``` + +You will also need to filter for the Contract tag with that ID. You can do so by adding a tag filter. + +```json +{ + "filter": [ + { + "name": "Contract", + "value": "boJ3Fa1OU9W1NY5g1dkmgqYk5Lg_mndcdC9q64CmDPU" + } + ] +} +``` + +See `app.filter.dev.json` for reference of how to setup a filter file. Update the values with your smart contract info. + +## Filtering additional tags + +You can also filter for other tags too. Just add a new `filter` object to the filter array. + +```json +{ + "filter": [ + { + "name": "AR-Tag-Key", + "value": "Any-Value" + } + ] +} +``` + +Adding multiple `name, value` objects to the filter acts as an `AND` operator. Creating new `filter` objects act as an `OR` operator when filtering for transactions. \ No newline at end of file diff --git a/README.md b/README.md index 9a2341d..b8671ab 100644 --- a/README.md +++ b/README.md @@ -12,22 +12,16 @@ Review the [documentation](https://arweaveteam.github.io/gateway/#/) to learn mo 2. Docker and Docker Compose LTS -### Suggested Hardware +## Quickstart with App Nodes -There are several million transactions on the Arweave chain. In order to effectively serve content on the gateway you'll need a decent sized computer. The ideal specs for a Gateway should have the following: - -1. 16GB RAM (ideally 32GB RAM) - -2. ~1TB of SSD storage available - -3. Intel i5 / AMD FX or greater, +4 vCPUs should be more than enough, these are typically Intel Xeon CPUs. +To get started with the new app nodes. Please read the [Quick Start Guide](./QUICKSTART.md). It goes over how to configure the environment and write filters for application specific needs. ## Environment By default, there is a default environment you can use located at `.env.docker` in the repository. ```env -ARWEAVE_NODES=["https://arweave.net"] +ARWEAVE_NODES=["http://lon-2.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] DATABASE_HOST=postgres DATABASE_PORT=5432 @@ -38,24 +32,30 @@ DATABASE_NAME=arweave ENVIRONMENT=public PORT=3000 -PARALLEL=4 - -INDICES=["App-Name", "app", "domain", "namespace"] +PARALLEL=1 +ANS102=1 CACHING=1 CACHE_FOLDER=/gateway/cache CACHE_OFFSET=0 +MANIFEST=1 MANIFEST_PREFIX=amp-gw.online + +TYPE=APP +FILTER=app.filter.json +START_HEIGHT=764180 ``` Make sure you copy this configuration to `.env`. ```bash -cp .env.docker .env +cp .env.dev .env ``` -## Compilation +You should also update the `ARWEAVE_NODES` to valid + +## Running the server You can start the server with `docker-compose`. diff --git a/app.filter.dev.json b/app.filter.dev.json new file mode 100644 index 0000000..acaa774 --- /dev/null +++ b/app.filter.dev.json @@ -0,0 +1,20 @@ +[ + { + "filter": [ + { + "name": "Contract", + "value": "boJ3Fa1OU9W1NY5g1dkmgqYk5Lg_mndcdC9q64CmDPU" + } + ] + }, + { + "filter": [ + { + "id": "boJ3Fa1OU9W1NY5g1dkmgqYk5Lg_mndcdC9q64CmDPU" + }, + { + "id": "aZrQ9fNp1fdKqBsdZKVHFF4NZRx-icDGfAncRw4zGpY" + } + ] + } +] \ No newline at end of file diff --git a/bin/index.create.sh b/bin/index.create.sh index 1aa6850..759d629 100644 --- a/bin/index.create.sh +++ b/bin/index.create.sh @@ -26,14 +26,6 @@ CREATE INDEX "transactions_height" ON transactions USING BTREE ("height"); CREATE INDEX "transactions_target" ON transactions USING BTREE ("target"); --- Transaction Owner Address Index CREATE INDEX "transactions_owner_address" ON transactions USING BTREE ("owner_address"); ---- Transaction Namespace Index -CREATE INDEX "index_namespace_transactions" ON transactions USING BTREE ("namespace"); ---- Transaction Domain Index -CREATE INDEX "index_domain_transactions" ON transactions USING BTREE ("domain"); ---- Transaction App Index -CREATE INDEX "index_app_transactions" ON transactions USING BTREE ("app"); ---- Transaction App-Name Index -CREATE INDEX "index_App-Name_transactions" ON transactions USING BTREE ("App-Name"); --- Transactions created_at index CREATE INDEX "transactions_created_at" ON transactions USING BTREE ("created_at"); @@ -58,8 +50,6 @@ CREATE INDEX "tags_name_value_128" ON tags USING BTREE ("name", "value") WHERE L CREATE INDEX "tags_tx_id_name_128" ON tags USING BTREE ("tx_id", "name") WHERE LENGTH("name") > 64 AND LENGTH("name") < 128; --- Tag created_at CREATE INDEX "tags_created_at" ON tags USING BTREE ("created_at"); -CREATE INDEX "tags_created_at_64" ON tags USING BTREE ("created_at") WHERE LENGTH("value") < 64; -CREATE INDEX "tags_created_at_128" ON tags USING BTREE ("created_at") WHERE LENGTH("value") < 128; EOF diff --git a/docs/DEV.md b/docs/DEV.md index b1262d3..cb01ab5 100755 --- a/docs/DEV.md +++ b/docs/DEV.md @@ -44,7 +44,7 @@ exit By default, there is a development environment you can use located at `.env.dev` in the repository. This `.dev` environment is different to the `.env.docker` environment which is designed for `docker` usage. ```env -ARWEAVE_NODES=["https://arweave.net"] +ARWEAVE_NODES=["http://lon-2.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] DATABASE_HOST=0.0.0.0 DATABASE_PORT=5432 @@ -55,18 +55,22 @@ DATABASE_NAME=arweave ENVIRONMENT=public PORT=3000 -PARALLEL=4 +PARALLEL=1 +ANS102=1 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] - CACHING=1 CACHE_FOLDER=/gateway/cache CACHE_OFFSET=0 +MANIFEST=1 MANIFEST_PREFIX=amp-gw.online + +TYPE=APP +FILTER=app.filter.json +START_HEIGHT=764180 ``` Make sure you copy this configuration to `.env`. diff --git a/docs/MANIFEST.md b/docs/MANIFEST.md index 0ebae19..e865d8e 100755 --- a/docs/MANIFEST.md +++ b/docs/MANIFEST.md @@ -4,6 +4,20 @@ Transaction manifests are only usable with a valid domain. Please make sure to h In order to effectively use manifests. You need to have domain wildcards pointing to your domain. +### Enabling Manifests + +Make sure that the environment variable `MANIFEST` is set to `1`. + +```conf +MANIFEST=1 +``` + +To disable manifests (useful for non TLD gateways), run: + +```conf +MANIFEST=0 +``` + ### Configuring Manifests In order to configure manifests, you need to change the `MANIFEST_PREFIX` environment variable. It should just be your domain name. Simply change it from: diff --git a/docs/README.md b/docs/README.md index 17960ff..16cf8bb 100755 --- a/docs/README.md +++ b/docs/README.md @@ -27,7 +27,7 @@ There are several million transactions on the Arweave chain. In order to effecti By default, there is a default environment you can use located at `.env.docker` in the repository. ```env -ARWEAVE_NODES=["https://arweave.net"] +ARWEAVE_NODES=["http://lon-2.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] DATABASE_HOST=postgres DATABASE_PORT=5432 @@ -40,18 +40,22 @@ MANIFESTS=0 BIP39=0 PORT=3000 -PARALLEL=4 +PARALLEL=1 +ANS102=1 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] - CACHING=1 CACHE_FOLDER=/gateway/cache CACHE_OFFSET=0 +MANIFEST=1 MANIFEST_PREFIX=amp-gw.online + +TYPE=APP +FILTER=app.filter.json +START_HEIGHT=764180 ``` Make sure you copy this configuration to `.env`. diff --git a/migrations/20200404025828_initialize.ts b/migrations/20200404025828_initialize.ts index d71a2e1..e375883 100755 --- a/migrations/20200404025828_initialize.ts +++ b/migrations/20200404025828_initialize.ts @@ -4,9 +4,6 @@ import {config} from 'dotenv'; config(); export async function up(knex: Knex) { - const indices = JSON.parse(process.env.INDICES || '[]'); - - return knex.schema .withSchema(process.env.ENVIRONMENT || 'public') .createTable('transactions', (table) => { @@ -22,16 +19,12 @@ export async function up(knex: Knex) { table.string('content_type'); table.integer('format', 2); table.integer('height', 4); - table.string('owner_address'); - table.string('data_root', 64); - table.string('parent', 64); + table.integer('precache_height', 4); + table.text('owner_address'); + table.text('data_root'); + table.text('parent'); table.timestamp('created_at').defaultTo(knex.fn.now()); - for (let i = 0; i < indices.length; i++) { - const index = indices[i]; - table.string(index, 64); - } - table.primary(['id'], 'pkey_transactions'); }) .createTable('blocks', (table) => { diff --git a/package.json b/package.json index 6c2efa1..e3af6d5 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,7 @@ "detect-mocha": "^0.1.0", "dotenv": "^8.2.0", "event-stream": "^4.0.1", + "execa": "^5.1.1", "express": "^4.17.1", "express-pg-session": "^1.1.0", "express-session": "^1.17.1", diff --git a/src/Gateway.ts b/src/Gateway.ts index ef12194..dddc346 100755 --- a/src/Gateway.ts +++ b/src/Gateway.ts @@ -19,6 +19,8 @@ import {startSync} from './database/sync.database'; config(); +export const enableManifests = process.env.MANIFEST === '1' ? true : false; + export const app: Express = express(); export function start() { @@ -30,7 +32,10 @@ export function start() { app.use(sessionMiddleware); app.use(sessionPinningMiddleware); app.use(koiLogger.logger); - app.use(manifestMiddleware); + + if (enableManifests) { + app.use(manifestMiddleware); + } app.get('/', statusRoute); app.get('/status', syncRoute); diff --git a/src/database/block.database.ts b/src/database/block.database.ts index 36e3c38..84829a5 100644 --- a/src/database/block.database.ts +++ b/src/database/block.database.ts @@ -36,3 +36,7 @@ export function formatBlock(block: BlockType): BlockDatabaseType { extended: JSON.stringify(pick(block, blockExtendedFields)), }; } + +export function getExtendedFields(block: BlockType) { + return JSON.stringify(pick(block, blockExtendedFields)); +} diff --git a/src/database/import.database.ts b/src/database/import.database.ts index 82d2910..dd2705a 100644 --- a/src/database/import.database.ts +++ b/src/database/import.database.ts @@ -1,15 +1,28 @@ import {config} from 'dotenv'; -import {indices} from '../utility/order.utility'; -import {connection} from '../database/connection.database'; -import {transactionFields} from '../database/transaction.database'; +import {connection} from './connection.database'; +import {formatBlock} from './block.database'; +import {formatTransaction, DatabaseTag} from './transaction.database'; +import {BlockType} from '../query/block.query'; +import {TransactionType} from '../query/transaction.query'; config(); -export async function importBlocks(path: string) { +export async function importBlock(newBlock: BlockType) { return new Promise(async (resolve, reject) => { try { - const encoding = '(FORMAT CSV, HEADER, ESCAPE \'\\\', DELIMITER \'|\', FORCE_NULL("height"))'; - await connection.raw(`COPY blocks ("id", "previous_block", "mined_at", "height", "txs", "extended") FROM '${path}' WITH ${encoding}`); + const block = formatBlock(newBlock); + + await connection.table('blocks') + .insert({ + id: block.id, + height: block.height, + mined_at: block.mined_at, + previous_block: block.previous_block, + txs: block.txs, + extended: block.extended, + }) + .onConflict('id' as never) + .merge(); return resolve(true); } catch (error) { @@ -18,15 +31,15 @@ export async function importBlocks(path: string) { }); } -export async function importTransactions(path: string) { +export async function importTransaction(newTransaction: TransactionType) { return new Promise(async (resolve, reject) => { try { - const fields = transactionFields - .concat(indices) - .map((field) => `"${field}"`); + const transaction = formatTransaction(newTransaction); - const encoding = '(FORMAT CSV, HEADER, ESCAPE \'\\\', DELIMITER \'|\', FORCE_NULL("format", "height", "data_size"))'; - await connection.raw(`COPY transactions (${fields.join(',')}) FROM '${path}' WITH ${encoding}`); + await connection.table('transactions') + .insert(transaction) + .onConflict('id' as never) + .merge(); return resolve(true); } catch (error) { @@ -35,11 +48,18 @@ export async function importTransactions(path: string) { }); } -export async function importTags(path: string) { +export async function importTag(tag: DatabaseTag) { return new Promise(async (resolve, reject) => { try { - const encoding = '(FORMAT CSV, HEADER, ESCAPE \'\\\', DELIMITER \'|\', FORCE_NULL(index))'; - await connection.raw(`COPY tags ("tx_id", "index", "name", "value") FROM '${path}' WITH ${encoding}`); + await connection.table('tags') + .insert({ + tx_id: tag.tx_id, + index: tag.index, + name: tag.name, + value: tag.value, + }) + .onConflict(['tx_id' as never, 'index' as never]) + .merge(); return resolve(true); } catch (error) { diff --git a/src/database/insert.database.ts b/src/database/insert.database.ts new file mode 100644 index 0000000..e43db40 --- /dev/null +++ b/src/database/insert.database.ts @@ -0,0 +1,84 @@ +import moment from 'moment'; +import {connection} from './connection.database'; +import {BlockType} from '../query/block.query'; +import {TransactionType, tagValue, Tag} from '../query/transaction.query'; +import {getExtendedFields} from './block.database'; +import {fromB64Url, sha256B64Url} from '../utility/encoding.utility'; + +export async function insertBlock(block: BlockType) { + return await connection + .table('blocks') + .insert({ + id: block.indep_hash, + height: block.height, + mined_at: moment(block.timestamp * 1000).format(), + previous_block: block.previous_block, + txs: JSON.stringify(block.txs), + extended: getExtendedFields(block), + }) + .onConflict('id' as never) + .ignore(); +} + +export async function transactionCached(tx_id: string) { + const result = await connection + .queryBuilder() + .table('transactions') + .select('id') + .where({id: tx_id}) + .limit(1); + + return result.length === 0 ? false : true; +} + +export async function removeStaleTransactions(height: number) { + return await connection + .table('transactions') + .delete() + .where('precache_height', '<=', height - 50) + .whereNotNull('precache_height'); +} + +export async function insertTransaction(tx: TransactionType, height?: number | null, precacheHeight?: number | null) { + return await connection + .table('transactions') + .insert({ + id: tx.id, + owner: tx.owner, + tags: JSON.stringify(tx.tags), + target: tx.target, + quantity: tx.quantity, + reward: tx.reward, + signature: tx.signature, + last_tx: tx.last_tx, + data_size: tx.data_size, + content_type: tagValue(tx.tags, 'content-type'), + format: tx.format, + height, + precache_height: precacheHeight, + owner_address: tx.owner.length > 64 ? sha256B64Url(fromB64Url(tx.owner)) : tx.owner, + data_root: tx.data_root, + parent: tx.parent, + }) + .onConflict('id' as never) + .merge(); +} + +export async function insertTag(tx_id: string, tags: Array) { + const preparedTags = []; + + for (let i = 0; i < tags.length; i++) { + preparedTags.push({ + tx_id, + index: i, + name: tags[i].name, + value: tags[i].value, + }); + } + + return await connection + .table('tags') + .insert(preparedTags) + .onConflict(['tx_id' as never, 'index' as never]) + .ignore(); +} diff --git a/src/database/rescan.database.ts b/src/database/rescan.database.ts deleted file mode 100644 index 1f6ad95..0000000 --- a/src/database/rescan.database.ts +++ /dev/null @@ -1,81 +0,0 @@ -import ProgressBar from 'progress'; -import {existsSync, createReadStream, createWriteStream, readFileSync, writeFileSync, unlinkSync} from 'fs'; -import {split, mapSync} from 'event-stream'; -import {config} from 'dotenv'; -import {streams} from '../utility/csv.utility'; -import {log} from '../utility/log.utility'; -import {mkdir} from '../utility/file.utility'; -import {importTransactions, importTags} from '../database/import.database'; -import {storeTransaction, processAns} from '../database/sync.database'; - -config(); -mkdir('snapshot'); -mkdir('cache'); - -export interface TxStream { - tx: string; - height: string; - type: string; -} - -export const rescan = createWriteStream('.rescan.temp'); -export let bar: ProgressBar; - -export async function startRescan(path: string = 'cache/.rescan') { - log.info('[rescan] starting rescan'); - - if (existsSync(path)) { - log.info('[rescan] found existing rescan file. Indexing missing transactions.'); - await streamTransactions(path); - } -} - -export async function streamTransactions(path: string) { - const txs: Array = []; - - createReadStream(path) - .pipe(split()) - .pipe(mapSync((line: string) => { - const [tx, height, type] = line.split(','); - txs.push({tx, height, type}); - })) - .on('end', async () => { - txs.pop(); - - for (let i = 0; i < txs.length; i++) { - const {tx, height, type} = txs[i]; - await restoreTransaction(tx, height, type); - } - - const rescan = readFileSync('.rescan.temp'); - writeFileSync(path, rescan); - unlinkSync('.resync.temp'); - - log.info('[rescan] complete, unindexed transaction stored in .rescan'); - - process.exit(); - }); -} - -export async function restoreTransaction(tx: string, height: string, type: string) { - try { - if (type === 'normal') { - await storeTransaction(tx, Number(height)); - } - - if (type === 'ans') { - await processAns(tx, Number(height)); - } - - await importTransactions(`${process.cwd()}/cache/transaction.csv`); - await importTags(`${process.cwd()}/cache/tags.csv`); - - streams.transaction.cache = createWriteStream('cache/transaction.csv'); - streams.tags.cache = createWriteStream('cache/tags.csv'); - - log.info(`[rescan] successfully added missing tx ${tx} at height ${height}`); - } catch (error) { - log.info(`[rescan] failed ${tx} at ${height}, added to the .rescan.temp file. It was not added to the database`); - rescan.write(`${tx},${height}\n`); - } -} diff --git a/src/database/sync.app.database.ts b/src/database/sync.app.database.ts new file mode 100644 index 0000000..ae700de --- /dev/null +++ b/src/database/sync.app.database.ts @@ -0,0 +1,90 @@ +import {config} from 'dotenv'; +import {log} from '../utility/log.utility'; +import {getLastBlock} from '../utility/height.utility'; + +import {block} from '../query/block.query'; +import {validateTransaction} from '../utility/filter.utility'; +import {transaction, TransactionType, tagToB64} from '../query/transaction.query'; +import {retrieveTransaction} from '../query/gql.query'; +import {insertBlock, transactionCached, removeStaleTransactions, insertTransaction, insertTag} from './insert.database'; + + +config(); + +export const startHeight = parseInt(process.env.START_HEIGHT || '0'); + +export async function syncAppNode() { + const lastBlock = await getLastBlock(); + const startBlock = lastBlock ? lastBlock - 1 : startHeight; + + log.info(`[database] starting app node sync at ${startBlock}`); + await storeBlock(startBlock); +} + +export async function storeBlock(height: number) { + try { + log.info(`[database] storing block #${height}`); + const currentBlock = await block(height); + await storeTransactions(currentBlock.txs, height); + await insertBlock(currentBlock); + await removeStaleTransactions(height); + await storeBlock(height + 1); + } catch (error) { + console.log(error); + log.info(`[database] block ${height} may have not been mined yet, retrying in 60 seconds`); + setTimeout(async () => { + await storeBlock(height); + }, 60 * 1000); + } +} + +export async function storeTransactions(txs: Array, height: number) { + const batch = []; + + for (let i = 0; i < txs.length; i++) { + const tx = txs[i]; + batch.push(storeTransaction(tx, height)); + } + + await Promise.all(batch); +} + +export async function storeTransaction(tx: string, height: number, retry: boolean = true) { + try { + const currentTransaction = await transaction(tx); + if (validateTransaction(currentTransaction.id, currentTransaction.tags) || await transactionCached(tx)) { + log.info(`[database] valid transaction for app node found ${tx}`); + await insertTransaction(currentTransaction, height); + await insertTag(currentTransaction.id, currentTransaction.tags); + } + } catch (error) { + if (retry) { + log.info(`[database] could not retrieve tx ${tx} at height ${height}, retrying`); + const gqlTx = await retrieveTransaction(tx); + log.info(`[database] recovered tx ${tx} at height ${height} from arweave.net`); + + if (validateTransaction(gqlTx.id, gqlTx.tags) || await transactionCached(tx)) { + log.info(`[database] valid transaction for app node found ${tx}`); + const fmtTx: TransactionType = { + format: 2, + id: gqlTx.id, + last_tx: '', + owner: gqlTx.owner.address, + tags: tagToB64(gqlTx.tags), + target: gqlTx.recipient, + quantity: gqlTx.quantity.winston, + data: '', + data_root: '', + data_size: gqlTx.data.size, + data_tree: [], + reward: gqlTx.fee.winston, + signature: gqlTx.signature, + parent: gqlTx.bundledIn.id, + }; + + await insertTransaction(fmtTx, height); + await insertTag(fmtTx.id, fmtTx.tags); + } + } + } +} diff --git a/src/database/sync.database.ts b/src/database/sync.database.ts index f07f42c..7fb4ea4 100644 --- a/src/database/sync.database.ts +++ b/src/database/sync.database.ts @@ -2,27 +2,27 @@ import ProgressBar from 'progress'; import {DataItemJson} from 'arweave-bundles'; import {config} from 'dotenv'; import {getLastBlock} from '../utility/height.utility'; -import {serializeBlock, serializeTransaction, serializeAnsTransaction, serializeTags} from '../utility/serialize.utility'; -import {streams, initStreams, resetCacheStreams} from '../utility/csv.utility'; +import {serializeBlock, serializeTransaction, serializeAnsTransaction} from '../utility/serialize.utility'; +import {streams, initStreams} from '../utility/csv.utility'; import {log} from '../utility/log.utility'; import {ansBundles} from '../utility/ans.utility'; -import {mkdir} from '../utility/file.utility'; import {sleep} from '../utility/sleep.utility'; import {TestSuite} from '../utility/mocha.utility'; import {getNodeInfo} from '../query/node.query'; import {block} from '../query/block.query'; import {transaction, tagValue, Tag} from '../query/transaction.query'; import {getDataFromChunks} from '../query/node.query'; -import {importBlocks, importTransactions, importTags} from './import.database'; +import {importBlock, importTransaction, importTag} from './import.database'; import {DatabaseTag} from './transaction.database'; import {cacheANSEntries} from '../caching/ans.entry.caching'; +import {syncAppNode} from './sync.app.database'; config(); -mkdir('snapshot'); -mkdir('cache'); +export const nodeType = process.env.TYPE ?? 'APP'; +export const storeANS102 = process.env.ANS102 === '1' ? true : false; export const storeSnapshot = process.env.SNAPSHOT === '1' ? true : false; -export const parallelization = parseInt(process.env.PARALLEL || '8'); +export const parallelization = parseInt(process.env.PARALLEL || '1'); export let SIGINT: boolean = false; export let SIGKILL: boolean = false; @@ -43,6 +43,11 @@ export function configureSyncBar(start: number, end: number) { } export async function startSync() { + if (nodeType === 'APP') { + await syncAppNode(); + return; + } + const startHeight = await getLastBlock(); currentHeight = startHeight; @@ -105,30 +110,6 @@ export async function parallelize(height: number) { await Promise.all(batch); - try { - await importBlocks(`${process.cwd()}/cache/block.csv`); - } catch (error) { - log.error('[sync] importing new blocks failed most likely due to it already being in the DB'); - log.error(error); - } - - try { - await importTransactions(`${process.cwd()}/cache/transaction.csv`); - } catch (error) { - log.error('[sync] importing new transactions failed most likely due to it already being in the DB'); - log.error(error); - } - - try { - await importTags(`${process.cwd()}/cache/tags.csv`); - } catch (error) { - log.error('[sync] importing new tags failed most likely due to it already being in the DB'); - log.error(error); - } - - - resetCacheStreams(); - if (!bar.complete) { bar.tick(batch.length); } @@ -144,13 +125,9 @@ export async function parallelize(height: number) { export async function storeBlock(height: number, retry: number = 0) { try { const currentBlock = await block(height); - const {formattedBlock, input} = serializeBlock(currentBlock, height); + const {formattedBlock} = serializeBlock(currentBlock, height); - streams.block.cache.write(input); - - if (storeSnapshot) { - streams.block.snapshot.write(input); - } + importBlock(currentBlock); if (height > 0) { await storeTransactions(JSON.parse(formattedBlock.txs) as Array, height); @@ -182,20 +159,18 @@ export async function storeTransactions(txs: Array, height: number) { export async function storeTransaction(tx: string, height: number, retry: boolean = true) { try { const currentTransaction = await transaction(tx); - const {formattedTransaction, preservedTags, input} = serializeTransaction(currentTransaction, height); + const {formattedTransaction, preservedTags} = serializeTransaction(currentTransaction, height); - streams.transaction.cache.write(input); - - if (storeSnapshot) { - streams.transaction.snapshot.write(input); - } + importTransaction(currentTransaction); storeTags(formattedTransaction.id, preservedTags); - const ans102 = tagValue(preservedTags, 'Bundle-Type') === 'ANS-102'; + if (storeANS102) { + const ans102 = tagValue(preservedTags, 'Bundle-Type') === 'ANS-102'; - if (ans102) { - await processAns(formattedTransaction.id, height); + if (ans102) { + await processAns(formattedTransaction.id, height); + } } } catch (error) { console.log(''); @@ -266,12 +241,14 @@ export async function processANSTransaction(ansTxs: Array, height: export function storeTags(tx_id: string, tags: Array) { for (let i = 0; i < tags.length; i++) { - const tag = tags[i]; - const {input} = serializeTags(tx_id, i, tag); - streams.tags.cache.write(input); - if (storeSnapshot) { - streams.tags.snapshot.write(input); - } + const tag: DatabaseTag = { + tx_id, + index: i, + name: tags[i].name || '', + value: tags[i].value || '', + }; + + importTag(tag); } } diff --git a/src/database/transaction.database.ts b/src/database/transaction.database.ts index 0d185c8..9196f42 100644 --- a/src/database/transaction.database.ts +++ b/src/database/transaction.database.ts @@ -1,7 +1,6 @@ import {config} from 'dotenv'; import {DataItemJson} from 'arweave-bundles'; import {pick} from 'lodash'; -import {indices} from '../utility/order.utility'; import {TransactionType, tagValue} from '../query/transaction.query'; import {fromB64Url, sha256B64Url} from '../utility/encoding.utility'; @@ -40,52 +39,26 @@ export const transactionFields = [ ]; export function formatTransaction(transaction: TransactionType) { - const indexFields: any = {}; - - for (let i = 0; i < indices.length; i++) { - const index = indices[i]; - const value = tagValue(transaction.tags, index); - - if (value) { - indexFields[index] = transaction.tags[i]; - } - } - return pick( { ...transaction, - ...indexFields, content_type: tagValue(transaction.tags, 'content-type'), format: transaction.format || 0, data_size: transaction.data_size || transaction.data ? fromB64Url(transaction.data).byteLength : undefined, tags: JSON.stringify(transaction.tags), owner_address: sha256B64Url(fromB64Url(transaction.owner)), }, - transactionFields.concat(indices), ); } export function formatAnsTransaction(ansTransaction: DataItemJson) { - const indexFields: any = {}; - - for (let i = 0; i < indices.length; i++) { - const index = indices[i]; - const value = tagValue(ansTransaction.tags, index); - - if (value) { - indexFields[index] = ansTransaction.tags[i]; - } - } - return pick( { - ...indexFields, id: ansTransaction.id, owner: ansTransaction.owner, content_type: 'ANS-102', target: ansTransaction.target, tags: JSON.stringify(ansTransaction.tags), }, - transactionFields.concat(indices), ); } diff --git a/src/graphql/query.graphql.ts b/src/graphql/query.graphql.ts index 69b4000..0eeaacb 100644 --- a/src/graphql/query.graphql.ts +++ b/src/graphql/query.graphql.ts @@ -1,10 +1,9 @@ import {config} from 'dotenv'; import {QueryBuilder} from 'knex'; import {connection} from '../database/connection.database'; -import {indices} from '../utility/order.utility'; import {ISO8601DateTimeString} from '../utility/encoding.utility'; import {TagFilter} from './types'; -import {tagToB64, toB64url} from '../query/transaction.query'; +import {tagFilterToB64} from '../query/transaction.query'; config(); @@ -39,9 +38,9 @@ export interface QueryParams { } export async function generateQuery(params: QueryParams): Promise { - const {to, from, tags, id, ids, status = 'confirmed', select} = params; + const {to, from, tags, id, ids, select} = params; const {limit = 10, sortOrder = 'HEIGHT_DESC'} = params; - const {since = new Date().toISOString(), offset = 0, minHeight = -1, maxHeight = -1} = params; + const {since = null, offset = 0, minHeight = -1, maxHeight = -1} = params; const query = connection .queryBuilder() @@ -62,10 +61,6 @@ export async function generateQuery(params: QueryParams): Promise query.where('blocks.mined_at', '<', since); } - if (status === 'confirmed') { - query.whereNotNull('transactions.height'); - } - if (to) { query.whereIn('transactions.target', to); } @@ -75,29 +70,16 @@ export async function generateQuery(params: QueryParams): Promise } if (tags) { - const tagsConverted = tagToB64(tags); + const tagsConverted = tagFilterToB64(tags); tagsConverted.forEach((tag) => { - let indexed = false; - - for (let i = 0; i < indices.length; i++) { - const index = toB64url(indices[i]); - - if (tag.name === index) { - query.whereIn(`transactions.${indices[i]}`, tag.values); - indexed = true; - } - } - - if (indexed === false) { - query.whereIn('transactions.id', (subQuery) => { - return subQuery - .select('tx_id') - .from('tags') - .where('tags.name', tag.name) - .whereIn('tags.value', tag.values); - }); - } + query.whereIn('transactions.id', (subQuery) => { + return subQuery + .select('tx_id') + .from('tags') + .where('tags.name', tag.name) + .whereIn('tags.value', tag.values); + }); }); } diff --git a/src/graphql/resolver.graphql.ts b/src/graphql/resolver.graphql.ts index a7ee24c..b066e55 100644 --- a/src/graphql/resolver.graphql.ts +++ b/src/graphql/resolver.graphql.ts @@ -43,15 +43,14 @@ const blockFieldMap = { export const resolvers: Resolvers = { Query: { transaction: async (parent, queryParams, {req, connection}) => { - req.log.info('[graphql/v2] transaction/request', queryParams); - const params: QueryParams = { id: queryParams.id, blocks: true, select: fieldMap, + limit: 1, }; - const result = (await generateQuery(params)).first(); + const result = (await generateQuery(params))[0]; return await result as TransactionHeader; }, diff --git a/src/query/gql.query.ts b/src/query/gql.query.ts new file mode 100644 index 0000000..81c05fc --- /dev/null +++ b/src/query/gql.query.ts @@ -0,0 +1,74 @@ +import {post} from 'superagent'; +import {Tag} from './transaction.query'; + +export interface GQLTransaction { + id: string; + anchor: string; + signature: string; + recipient: string; + owner: { + address: string; + key: string; + }; + fee: { + winston: string; + ar: string; + }; + quantity: { + winston: string; + ar: string; + }; + data: { + size: string; + type: string; + }; + tags: Array; + bundledIn: { + id: string; + } +} + +export async function retrieveTransaction(id: string): Promise { + const payload = await post('https://arweave.net/graphql') + .send({ + query: `query { + transaction(id: "${id}") { + id + anchor + signature + recipient + owner { + address + key + } + fee { + winston + ar + } + quantity { + winston + ar + } + data { + size + type + } + tags { + name + value + } + block { + id + timestamp + height + previous + } + bundledIn { + id + } + } + }`, + }); + + return payload.body.data.transaction as GQLTransaction; +} diff --git a/src/query/transaction.query.ts b/src/query/transaction.query.ts index d30e879..1982234 100644 --- a/src/query/transaction.query.ts +++ b/src/query/transaction.query.ts @@ -23,6 +23,7 @@ export interface TransactionType { data_root: string; reward: string; signature: string; + parent: string; } export async function transaction(id: string): Promise { @@ -43,6 +44,7 @@ export async function transaction(id: string): Promise { data_root: body.data_root, reward: body.reward, signature: body.signature, + parent: body.parent, }; } @@ -80,7 +82,21 @@ export function tagToUTF8(tags: Array): Array { return conversion; } -export function tagToB64(tags: Array): Array { +export function tagToB64(tags: Array): Array { + const conversion: Array = []; + + for (let i = 0; i < tags.length; i++) { + const tag = tags[i]; + conversion.push({ + name: fromB64Url(tag.name).toString(), + value: fromB64Url(tag.value).toString(), + }); + } + + return conversion; +} + +export function tagFilterToB64(tags: Array): Array { const conversion: Array = []; for (let i = 0; i < tags.length; i++) { diff --git a/src/route/status.route.ts b/src/route/status.route.ts index 673c8e8..b5cd4d3 100644 --- a/src/route/status.route.ts +++ b/src/route/status.route.ts @@ -1,11 +1,12 @@ import {Request, Response} from 'express'; -import {currentHeight} from '../database/sync.database'; +import {getLastBlock} from '../utility/height.utility'; import {getNodeInfo} from '../query/node.query'; export const start = Number(new Date); export async function statusRoute(req: Request, res: Response) { const info = await getNodeInfo(); + const currentHeight = await getLastBlock(); const delta = info.height - currentHeight; diff --git a/src/route/transaction.route.ts b/src/route/transaction.route.ts index 0815744..ea3668b 100644 --- a/src/route/transaction.route.ts +++ b/src/route/transaction.route.ts @@ -1,9 +1,37 @@ import {Request, Response, NextFunction} from 'express'; import {post} from 'superagent'; +import {getLastBlock} from '../utility/height.utility'; +import {TransactionType} from '../query/transaction.query'; +import {insertTransaction, insertTag} from '../database/insert.database'; + +export async function precacheTransaction(req: Request) { + const precacheHeight = await getLastBlock(); + + const precachedTransaction: TransactionType = { + format: req.body.format, + id: req.body.id, + last_tx: req.body.last_tx, + owner: req.body.owner, + tags: req.body.tags, + target: req.body.target, + quantity: req.body.quantity, + data: req.body.data, + data_size: req.body.data_size, + data_root: req.body.data_root, + data_tree: [], + reward: req.body.reward, + signature: req.body.signature, + parent: '', + }; + + await insertTransaction(precachedTransaction, null, precacheHeight); + await insertTag(precachedTransaction.id, precachedTransaction.tags); +} export async function transactionRoute(req: Request, res: Response, next: NextFunction) { try { const payload = await post(`${req.session.node}/tx`).send(req.body); + await precacheTransaction(req); return res.status(200).send(payload.body); } catch (error) { console.log(error); diff --git a/src/utility/csv.utility.ts b/src/utility/csv.utility.ts index 997a1a1..82e4d6f 100644 --- a/src/utility/csv.utility.ts +++ b/src/utility/csv.utility.ts @@ -1,5 +1,6 @@ import {existsSync, WriteStream, createWriteStream} from 'fs'; -import {indices, blockOrder, transactionOrder, tagOrder} from './order.utility'; +import {blockOrder, transactionOrder, tagOrder} from './order.utility'; +import {mkdir} from './file.utility'; export interface CSVStreams { block: { @@ -23,6 +24,9 @@ export interface CSVStreams { }; } +mkdir('snapshot'); +mkdir('cache'); + export const streams: CSVStreams = { block: { snapshot: createWriteStream('snapshot/block.csv', {flags: 'a'}), @@ -88,10 +92,10 @@ export function initStreams() { streams.block.cache.write(blockOrder.join('|') + '\n'); if (appendHeaders.transaction) { - streams.transaction.snapshot.write(transactionOrder.concat(indices).join('|') + '\n'); + streams.transaction.snapshot.write(transactionOrder.join('|') + '\n'); } - streams.transaction.cache.write(transactionOrder.concat(indices).join('|') + '\n'); + streams.transaction.cache.write(transactionOrder.join('|') + '\n'); if (appendHeaders.tags) { streams.tags.snapshot.write(tagOrder.join('|') + '\n'); @@ -106,6 +110,6 @@ export function resetCacheStreams() { streams.tags.cache = createWriteStream('cache/tags.csv'); streams.block.cache.write(blockOrder.join('|') + '\n'); - streams.transaction.snapshot.write(transactionOrder.concat(indices).join('|') + '\n'); + streams.transaction.snapshot.write(transactionOrder.join('|') + '\n'); streams.tags.cache.write(tagOrder.join('|') + '\n'); } diff --git a/src/utility/filter.utility.ts b/src/utility/filter.utility.ts new file mode 100644 index 0000000..c13d101 --- /dev/null +++ b/src/utility/filter.utility.ts @@ -0,0 +1,38 @@ +import {config} from 'dotenv'; +import {read} from 'fs-jetpack'; +import {Tag, tagValue} from '../query/transaction.query'; + +config(); + +export interface FilterValueI { + id?: string; + name?: string; + value?: string; +} + +export interface FilterI { + filter: Array; +} + +export const filterPath = process.env.FILTER ?? 'app.filter.json'; +export const filters: Array = JSON.parse(read(filterPath) || '[]') as Array; + +export function validateTransaction(id: string, tags: Array): boolean { + for (let i = 0; i < filters.length; i++) { + const filter = filters[i]; + + for (let ii = 0; ii < filter.filter.length; ii++) { + const filterValue = filter.filter[ii]; + + if (filterValue.id === id) { + return true; + } + + if (tagValue(tags, filterValue.name || '') === filterValue.value) { + return true; + } + } + } + + return false; +} diff --git a/src/utility/height.utility.ts b/src/utility/height.utility.ts index b4d8c7c..0f116a4 100644 --- a/src/utility/height.utility.ts +++ b/src/utility/height.utility.ts @@ -1,6 +1,6 @@ import {connection} from '../database/connection.database'; -export async function getLastBlock() { +export async function getLastBlock(): Promise { const result = await connection .queryBuilder() .select('height') @@ -9,7 +9,7 @@ export async function getLastBlock() { .limit(1); if (result.length > 0) { - return result[0].height; + return result[0].height as number; } else { return 0; } diff --git a/src/utility/order.utility.ts b/src/utility/order.utility.ts index 0c502f5..ec8d8f8 100644 --- a/src/utility/order.utility.ts +++ b/src/utility/order.utility.ts @@ -1,7 +1,6 @@ import {config} from 'dotenv'; config(); -export const indices = JSON.parse(process.env.INDICES || '[]') as Array; export const blockOrder = ['id', 'previous_block', 'mined_at', 'height', 'txs', 'extended']; export const transactionOrder = ['format', 'id', 'signature', 'owner', 'owner_address', 'target', 'reward', 'last_tx', 'height', 'tags', 'quantity', 'content_type', 'data_size', 'data_root']; export const tagOrder = ['tx_id', 'index', 'name', 'value']; diff --git a/src/utility/serialize.utility.ts b/src/utility/serialize.utility.ts index 25959ae..472ada6 100644 --- a/src/utility/serialize.utility.ts +++ b/src/utility/serialize.utility.ts @@ -1,5 +1,4 @@ import {DataItemJson} from 'arweave-bundles'; -import {indices} from './order.utility'; import {BlockType} from '../query/block.query'; import {TransactionType, Tag} from '../query/transaction.query'; import {formatBlock} from '../database/block.database'; @@ -20,13 +19,12 @@ export function serializeBlock(block: BlockType, height: number) { } export function serializeTransaction(tx: TransactionType, height: number) { - const formattedTransaction = formatTransaction(tx); + const formattedTransaction: any = formatTransaction(tx); const preservedTags = JSON.parse(formattedTransaction.tags) as Array; formattedTransaction.tags = `${formattedTransaction.tags.replace(/"/g, '\\"')}`; const values = transactionFields - .map((field) => `"${field === 'height' ? height : formattedTransaction[field] ? formattedTransaction[field] : ''}"`) - .concat(indices.map((ifield) => `"${formattedTransaction[ifield] ? formattedTransaction[ifield] : ''}"`)); + .map((field) => `"${field === 'height' ? height : formattedTransaction[field] ? formattedTransaction[field] : ''}"`); const input = `${values.join(delimiter)}\n`; @@ -38,14 +36,13 @@ export function serializeTransaction(tx: TransactionType, height: number) { } export function serializeAnsTransaction(tx: DataItemJson, height: number) { - const formattedAnsTransaction = formatAnsTransaction(tx); + const formattedAnsTransaction: any = formatAnsTransaction(tx); formattedAnsTransaction.tags = `${formattedAnsTransaction.tags.replace(/"/g, '\\"')}`; const ansTags = tx.tags; const values = transactionFields - .map((field) => `"${field === 'height' ? height : formattedAnsTransaction[field] ? formattedAnsTransaction[field] : ''}"`) - .concat(indices.map((ifield) => `"${formattedAnsTransaction[ifield] ? formattedAnsTransaction[ifield] : ''}"`)); + .map((field) => `"${field === 'height' ? height : formattedAnsTransaction[field] ? formattedAnsTransaction[field] : ''}"`); const input = `${values.join(delimiter)}\n`; diff --git a/yarn.lock b/yarn.lock index f9aded6..59c5c37 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2881,7 +2881,7 @@ cross-fetch@3.1.4, cross-fetch@^3.0.6: dependencies: node-fetch "2.6.1" -cross-spawn@^7.0.0, cross-spawn@^7.0.2: +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -3569,6 +3569,21 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" +execa@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" @@ -4090,6 +4105,11 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" @@ -4490,6 +4510,11 @@ human-crypto-keys@^0.1.4: node-forge "^0.8.2" pify "^4.0.1" +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + iconv-lite@0.4.24, iconv-lite@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" @@ -5566,6 +5591,11 @@ merge-descriptors@1.0.1: resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + merge2@^1.3.0: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" @@ -5946,6 +5976,13 @@ normalize-path@^3.0.0, normalize-path@~3.0.0: resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + nullthrows@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/nullthrows/-/nullthrows-1.1.1.tgz#7818258843856ae971eae4208ad7d7eb19a431b1" @@ -6115,7 +6152,7 @@ onetime@^2.0.0: dependencies: mimic-fn "^1.0.0" -onetime@^5.1.0: +onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== @@ -6305,7 +6342,7 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-key@^3.1.0: +path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== @@ -7107,7 +7144,7 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" -signal-exit@^3.0.2: +signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== @@ -7422,6 +7459,11 @@ strip-bom@^4.0.0: resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"