-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 694ff8d
Showing
15 changed files
with
1,127 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
node_modules | ||
package-lock.json | ||
DS_Store |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
# Raresats | ||
|
||
## Abstract | ||
We provide a tool to find and extract rare sats in a wallet. We currently support the following rare and exotic types: | ||
- Rodarmor rarity: | ||
- uncommon | ||
- rare | ||
- epic | ||
- legendary | ||
- mythic | ||
- Exotic sats: | ||
- black | ||
- alpha | ||
- omega | ||
- nakamoto | ||
- palindrome | ||
- first tx | ||
- block 9 | ||
- block 78 | ||
- vintage | ||
- pizza | ||
|
||
## Installation | ||
The tool is available as a npm package (soon). You can install it with the following command: | ||
|
||
```bash | ||
git clone https://github.com/SATO-Technologies/raresats.git | ||
cd raresats | ||
npm install --global . | ||
``` | ||
|
||
## Usage | ||
The tool can be used as a CLI or as a library. The CLI is available with the `raresats` command and the library is available with the `raresats` package. | ||
|
||
### Find | ||
|
||
#### CLI | ||
|
||
```bash | ||
raresats find -a <address> -s uncommon rare epic legendary | ||
raresats find -u <utxo> -s black alpha omega | ||
``` | ||
|
||
Options: | ||
- `--ordurl <url>`: the url of the ord instance used to fetch ranges (default: http://127.0.0.1:4001). This instance MUST run the JSON-RPC API. | ||
- `--mempoolurl <url>`: the url of the mempool instance used to fetch utxos (default: https://mempool.space) | ||
- `-s <satributes>`: the satributes to search for (default: all) separated by a space. | ||
|
||
Library: | ||
```javascript | ||
const raresats = require('raresats'); | ||
const res = raresats.find({ | ||
address: 'bc1p...', | ||
satributes: ['uncommon', 'rare', 'epic', 'legendary'], | ||
}); | ||
console.log(res); | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
#!/usr/bin/env node | ||
|
||
import { find } from "./commands/find.js"; | ||
import { SATRIBUTES } from "./rareAndExotic/sats.js"; | ||
import { ArgumentParser } from "argparse"; | ||
import JSONbig from 'json-bigint'; | ||
|
||
const JSONbigNative = JSONbig({ useNativeBigInt: true, alwaysParseAsBig: true}); | ||
|
||
const parser = new ArgumentParser({ prog: "raresats", description: "Find and extract rare sats" }); | ||
const subparsers = parser.add_subparsers({ dest: "command" }); | ||
|
||
const findParser = subparsers.add_parser("find", { help: "Find rare sats" }); | ||
|
||
const findGroup = findParser.add_mutually_exclusive_group({ required: true }); | ||
findGroup.add_argument("-a", "--address", { help: "The address to find rare sats from", default: null}); | ||
findGroup.add_argument("-u", "--utxo", { help: "The utxo to find rare sats from", default: null}); | ||
|
||
findParser.add_argument("--ordurl", { help: "The ord instance to fetch ranges from", default: "http://127.0.0.1:4001"}); | ||
findParser.add_argument("--mempoolurl", { help: "The mempool instance to fetch utxos from", default: "https://mempool.space"}); | ||
findParser.add_argument("-s", "--satributes", { help: "The sattributes to find", nargs: "*", choices: SATRIBUTES }); | ||
|
||
const args = parser.parse_args(); | ||
|
||
if(!args.command) { | ||
parser.print_help(); | ||
process.exit(0); | ||
} | ||
|
||
if(args.command === "find") { | ||
let res = await find({ | ||
address: args.address, | ||
utxo: args.utxo, | ||
ordURL: args.ordurl, | ||
mempoolURL: args.mempoolurl, | ||
satributes: args.satributes | ||
}); | ||
|
||
let output = JSONbigNative.stringify(res, null, 2); | ||
console.log(output); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,247 @@ | ||
import { sortRangesWithIndices, rangeOverlapsRange, cloneRanges, rangesSize, minRepr } from "../utils/ranges.js" | ||
import { bigIntMin } from "../utils/bigints.js"; | ||
import { success, failure } from "../utils/outputMessages.js"; | ||
import { SATRIBUTES, typeToListingFunc } from "../rareAndExotic/sats.js"; | ||
|
||
import fetch from 'node-fetch'; | ||
import JSONbig from 'json-bigint'; | ||
const JSONbigNative = JSONbig({ useNativeBigInt: true, alwaysParseAsBig: true }); | ||
|
||
|
||
function _getLocations(rareSatsRanges, utxoRanges) { | ||
let cumulativeOffsets = [0n]; | ||
for (let i = 1; i < utxoRanges.length; i++) { | ||
cumulativeOffsets.push(cumulativeOffsets[i - 1] + (utxoRanges[i - 1][1] - utxoRanges[i - 1][0] + 1n)); | ||
} | ||
|
||
let sortedIndicesRareSatsRanges = sortRangesWithIndices(rareSatsRanges); | ||
let sortedIndicesUtxoRanges = sortRangesWithIndices(utxoRanges); | ||
|
||
let locations = []; | ||
|
||
let i = 0; | ||
let j = 0; | ||
let rareRange = rareSatsRanges[sortedIndicesRareSatsRanges[i]]; | ||
let utxoRange = utxoRanges[sortedIndicesUtxoRanges[j]]; | ||
while (i < sortedIndicesRareSatsRanges.length && j < sortedIndicesUtxoRanges.length) { | ||
if (rangeOverlapsRange(rareRange, utxoRange)) { | ||
let start = rareRange[0]; // equal to bigIntMax(rareRange[0], utxoRange[0]) | ||
let end = bigIntMin(rareRange[1], utxoRange[1]); | ||
locations.push({ | ||
offset: cumulativeOffsets[sortedIndicesUtxoRanges[j]] + (start - utxoRange[0]), | ||
size: end - start + 1n, | ||
}); | ||
|
||
if (end == utxoRange[1]) { | ||
j++; | ||
rareRange = [end + 1n, rareRange[1]] | ||
if (j < sortedIndicesUtxoRanges.length) { | ||
utxoRange = utxoRanges[sortedIndicesUtxoRanges[j]]; | ||
} | ||
} | ||
|
||
if (end == rareRange[1]) { | ||
i++; | ||
if (i < sortedIndicesRareSatsRanges.length) { | ||
rareRange = rareSatsRanges[sortedIndicesRareSatsRanges[i]]; | ||
} | ||
} | ||
} | ||
else { | ||
j++; | ||
if (j < sortedIndicesUtxoRanges.length) { | ||
utxoRange = utxoRanges[sortedIndicesUtxoRanges[j]]; | ||
} | ||
} | ||
} | ||
return locations; | ||
} | ||
|
||
function _mergeLocations(locations) { | ||
if (locations.length == 0) return []; | ||
|
||
// Step 1: register boundaries | ||
// Both start and end boundaries are inclusive | ||
let boundaries = []; | ||
for (let loc of locations) { | ||
boundaries.push({ offset: loc.offset, type: 'start', name: loc.type }); | ||
boundaries.push({ offset: loc.offset + loc.size - 1n, type: 'end', name: loc.type }); | ||
} | ||
|
||
// Step 2: sort boundaries by offset | ||
boundaries.sort((a, b) => { | ||
if (a.offset < b.offset) return -1; | ||
if (a.offset > b.offset) return 1; | ||
return 0; | ||
}); | ||
|
||
// Step 3: group locations | ||
let mergedLocations = []; | ||
let active = new Set(); | ||
let last = boundaries[0]; | ||
boundaries.forEach(b => { | ||
if (active.size > 0n && (last.offset < b.offset || (last.type = 'start' && b.type == 'end' && last.name == b.name))) { | ||
mergedLocations.push({ | ||
offset: last.offset, | ||
size: b.offset - last.offset + 1n - (last.type == 'end' ? 1n : 0n) - (b.type == 'start' ? 1n : 0n), | ||
type: Array.from(active).sort((a, b) => SATRIBUTES.indexOf(a) - SATRIBUTES.indexOf(b)).join('_'), | ||
}); | ||
} | ||
|
||
if (b.type === 'start') { | ||
active.add(b.name); | ||
} else if (b.type === 'end') { | ||
active.delete(b.name); | ||
} | ||
|
||
last = b; | ||
}); | ||
|
||
// Step 4: merge adjacent locations of the same type | ||
let i = 0; | ||
while (i < mergedLocations.length - 1) { | ||
if (mergedLocations[i].type == mergedLocations[i + 1].type) { | ||
mergedLocations[i].size += mergedLocations[i + 1].size; | ||
mergedLocations.splice(i + 1, 1); | ||
} | ||
else { | ||
i++; | ||
} | ||
} | ||
|
||
return mergedLocations; | ||
} | ||
|
||
function _removeTrailingSlash(url) { | ||
return url.endsWith('/') ? url.slice(0, -1) : url; | ||
} | ||
|
||
export async function find({ | ||
address = null, | ||
utxo = null, | ||
ordURL = "http://127.0.0.1:4001", | ||
mempoolURL = "https://mempool.space", | ||
satributes = null, | ||
}) { | ||
if (satributes == null) { | ||
satributes = SATRIBUTES; | ||
} | ||
else { | ||
// Ensures that types is ordered by priority (SATRIBUTES is) | ||
satributes = SATRIBUTES.filter(x => satributes.includes(x)); | ||
} | ||
|
||
let utxos = []; | ||
|
||
if (address == null && utxo == null) { | ||
return failure("Either address or utxo must be provided"); | ||
} | ||
|
||
if (address != null && utxo != null) { | ||
return failure("Only one of address or utxo must be provided"); | ||
} | ||
|
||
let utxosValues = {}; | ||
|
||
if (address != null) { | ||
try { | ||
let req = await fetch(`${_removeTrailingSlash(mempoolURL)}/api/address/${address}/utxo`); | ||
let res = JSONbigNative.parse(await req.text()); | ||
for (let utxo of res) { | ||
if (utxo.status.confirmed) { | ||
utxos.push(`${utxo.txid}:${utxo.vout}`); | ||
utxosValues[`${utxo.txid}:${utxo.vout}`] = utxo.value; | ||
} | ||
} | ||
} | ||
catch (e) { | ||
return failure(`Error fetching utxos: ${e}`); | ||
} | ||
} | ||
|
||
if (utxo != null) { | ||
utxos.push(utxo); | ||
} | ||
|
||
// `utxos` is now a list of at least one (unspent) outpoint" | ||
|
||
let outpointToRanges = {}; | ||
|
||
for (let u of utxos) { | ||
try { | ||
let req = await fetch(`${_removeTrailingSlash(ordURL)}/output/${u}`, { | ||
headers: { | ||
'Accept': 'application/json' | ||
} | ||
}); | ||
let rgs = JSONbigNative.parse(await req.text()).sat_ranges; | ||
if (rgs == null) { | ||
return failure(`Error fetching sat ranges of ${u}, looks like it is spent or the ord instance is not accessible or not synced`); | ||
} | ||
outpointToRanges[u] = rgs; | ||
} | ||
catch (e) { | ||
return failure(`Error fetching sat ranges of ${u}, looks like it doesn't exist`); | ||
} | ||
} | ||
|
||
// This code assumes that the right side of a sat-range is inclusive. | ||
// The API returns ranges with the right side exclusive, so we subtract 1 from the right side. | ||
for (let u of utxos) { | ||
for (let r of outpointToRanges[u]) { | ||
r[1] = r[1] - 1n; | ||
} | ||
} | ||
|
||
let outpointData = {}; | ||
for (let u of utxos) outpointData[u] = { utxoValue: utxosValues[u] }; | ||
|
||
for (let u of utxos) { | ||
outpointData[u].rareRanges = {}; | ||
for (let s of satributes) { | ||
// TODO: listingFunc shouldn't change the ranges. For now we clone to avoid an issue. | ||
let rgs = minRepr(typeToListingFunc[s](cloneRanges(outpointToRanges[u]))); | ||
if (rgs.length > 0) { | ||
outpointData[u].rareRanges[s] = rgs; | ||
} | ||
} | ||
} | ||
|
||
// Count rare and exotic sats | ||
let totalCount = {}; | ||
for (let s of satributes) totalCount[s] = 0n; | ||
for (let u of utxos) outpointData[u].count = {}; | ||
for (let u of utxos) { | ||
for (let s of satributes) { | ||
if (outpointData[u].rareRanges[s]) { | ||
let n = rangesSize(outpointData[u].rareRanges[s]); | ||
outpointData[u].count[s] = n + (outpointData[u].count[s] || 0n); | ||
totalCount[s] += n; | ||
} | ||
} | ||
} | ||
|
||
// Add locations | ||
for (let u of utxos) { | ||
let locations = []; | ||
for (let s of satributes) { | ||
if (outpointData[u].rareRanges[s]) { | ||
let locs = _getLocations(outpointData[u].rareRanges[s], outpointToRanges[u], s); | ||
locations = locations.concat(locs.map(x => ({ type: s, ...x }))); | ||
} | ||
} | ||
outpointData[u].locations = _mergeLocations(locations); | ||
} | ||
|
||
// Remove empty outpoints | ||
for (let u of utxos) { | ||
if (Object.keys(outpointData[u].rareRanges).length == 0) { | ||
delete outpointData[u]; | ||
} | ||
} | ||
|
||
return success({ | ||
totalCount, | ||
utxos: { ...outpointData }, | ||
}); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
export { find } from './commands/find.js'; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
{ | ||
"name": "raresats", | ||
"version": "0.8.1", | ||
"main": "./index.js", | ||
"bin": "./cli.js", | ||
"scripts": { | ||
"test": "echo \"Error: no test specified\" && exit 1" | ||
}, | ||
"author": "Simon GLATRE (simon@bysato.com)", | ||
"license": "", | ||
"description": "Find and extract rare and exotic sats.", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/SATO-Technologies/raresats" | ||
}, | ||
"dependencies": { | ||
"argparse": "^2.0.1", | ||
"json-bigint": "^1.0.0", | ||
"node-fetch": "^3.3.2" | ||
}, | ||
"type": "module" | ||
} |
Oops, something went wrong.