2023-04-08 15:17:52 -04:00
|
|
|
// Big modules
|
|
|
|
import { BigNumber } from 'ethers'
|
|
|
|
import { existsSync, mkdirSync } from 'fs'
|
|
|
|
import { opendir, readFile, rm } from 'fs/promises'
|
|
|
|
import { createInterface } from 'readline'
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
// Local logic
|
2023-05-02 18:19:28 -04:00
|
|
|
import { AsyncUtils, NumberUtils, ErrorUtils } from '@tornado/sdk-utils'
|
2023-04-17 17:56:57 -04:00
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
// PouchDB
|
|
|
|
import PouchDB from 'pouchdb'
|
|
|
|
import * as PouchDBAdapterMemory from 'pouchdb-adapter-memory'
|
|
|
|
|
|
|
|
// @ts-ignore
|
|
|
|
import { toIndexableString } from 'pouchdb-collate'
|
|
|
|
|
|
|
|
// Register plugins
|
|
|
|
PouchDB.plugin(PouchDBAdapterMemory)
|
|
|
|
|
2023-05-02 18:19:28 -04:00
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECLARATIONS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
|
|
|
export interface TornadoInstance {
|
|
|
|
network: number
|
|
|
|
symbol: string
|
|
|
|
decimals: number
|
|
|
|
denomination: number
|
|
|
|
deployBlock: number
|
|
|
|
address: string
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface ClassicInstance extends TornadoInstance {
|
|
|
|
anonymityMiningEnabled: boolean
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface TokenData {
|
|
|
|
network: number
|
|
|
|
decimals: number
|
|
|
|
address: string
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Keys {
|
|
|
|
export interface InstanceLookup {
|
|
|
|
network: string
|
|
|
|
token: string
|
|
|
|
denomination: string
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface RelayerProperties {
|
|
|
|
address: string
|
|
|
|
version: string
|
|
|
|
serviceFeePercent: number
|
|
|
|
miningFeePercent: number
|
|
|
|
status: string
|
|
|
|
chainId: number
|
|
|
|
prices: Map<string, BigNumber>
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Options {
|
|
|
|
export interface Cache {
|
|
|
|
adapter?: string
|
|
|
|
cachePath?: string
|
|
|
|
persistent?: boolean
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ REST ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
export namespace Files {
|
|
|
|
export type PathGetter = (relative: string) => string
|
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
export const getModulesPath = (relative: string, prefix?: string): string =>
|
|
|
|
(prefix ?? __dirname + '/../../node_modules/') + relative
|
|
|
|
export const getResourcePath = (relative: string, prefix?: string): string =>
|
|
|
|
(prefix ?? __dirname + '/../resources/') + relative
|
|
|
|
export const getCachePath = (relative: string, prefix?: string): string =>
|
2023-05-02 18:19:28 -04:00
|
|
|
(prefix ?? process.cwd() + '/cache/') + relative
|
2023-04-08 15:17:52 -04:00
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
export const cacheDirExists = (prefix?: string): boolean => existsSync(getCachePath('', prefix))
|
|
|
|
export const makeCacheDir = (prefix?: string): void => mkdirSync(getCachePath('', prefix))
|
2023-04-08 15:17:52 -04:00
|
|
|
|
|
|
|
export const loadRaw = (relative: string): Promise<Buffer> => readFile(getResourcePath(relative))
|
|
|
|
|
|
|
|
export async function wipeCache(prompt: boolean = true): Promise<void> {
|
|
|
|
const dir = await opendir(getCachePath(''))
|
|
|
|
const toRemove: string[] = []
|
|
|
|
const userInput = createInterface({ input: process.stdin, output: process.stdout })
|
|
|
|
for await (const entry of dir) {
|
|
|
|
if (entry.name.match('(Deposit.*)|(Withdrawal.*)|(Note.*)|(Invoice.*)'))
|
|
|
|
toRemove.push(getCachePath(entry.name))
|
|
|
|
}
|
|
|
|
if (toRemove.length != 0) {
|
|
|
|
if (prompt) {
|
|
|
|
const promptString = `\nCache wipe requested, following would be wiped:\n\n${toRemove.join(
|
|
|
|
'\n'
|
|
|
|
)}\n\nContinue? (y/n): `
|
|
|
|
function wipeCachePrompt(prompt: string, resolve: any): void {
|
|
|
|
userInput.question(prompt, (answer) => {
|
|
|
|
if (answer == 'y') {
|
|
|
|
userInput.close()
|
|
|
|
resolve(true)
|
|
|
|
} else if (answer == 'n') {
|
|
|
|
userInput.close()
|
|
|
|
resolve(false)
|
|
|
|
} else wipeCachePrompt('', resolve)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
const answer = await new Promise((resolve) => wipeCachePrompt(promptString, resolve))
|
|
|
|
if (answer)
|
|
|
|
await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch(
|
|
|
|
(err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch(
|
|
|
|
(err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Json {
|
|
|
|
const cachedJsonData = new Map<string, any>()
|
|
|
|
|
|
|
|
// reading
|
|
|
|
export async function load(
|
|
|
|
relativePath: string,
|
|
|
|
encoding: BufferEncoding = 'utf8',
|
|
|
|
pathGetter: Files.PathGetter = Files.getResourcePath
|
|
|
|
): Promise<any> {
|
|
|
|
if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath)
|
|
|
|
else {
|
|
|
|
const obj = JSON.parse(await readFile(pathGetter(relativePath), encoding))
|
|
|
|
cachedJsonData.set(relativePath, obj)
|
|
|
|
return obj
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export function toMap<V>(jsonData: any): Map<string, V> {
|
|
|
|
return new Map<string, V>(Object.entries(jsonData))
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function loadMap<V>(
|
|
|
|
relativePath: string,
|
|
|
|
encoding: BufferEncoding = 'utf8'
|
|
|
|
): Promise<Map<string, V>> {
|
|
|
|
return toMap<V>(await load(relativePath, encoding))
|
|
|
|
}
|
|
|
|
|
|
|
|
export function getError(...values: any[]): Error {
|
|
|
|
return ErrorUtils.getError(`there is no such entry for the key-value path [${values.join('][')}]`)
|
|
|
|
}
|
|
|
|
|
|
|
|
export function throwError(...values: any[]): void {
|
|
|
|
throw getError(...values)
|
|
|
|
}
|
|
|
|
|
|
|
|
export function getValue(jsonObj: any, keys: any[]): any {
|
|
|
|
for (let i = 0; i < keys.length; i++) {
|
|
|
|
jsonObj = jsonObj[keys[i]] ?? throwError(...keys.slice(0, i + 1))
|
|
|
|
}
|
|
|
|
return jsonObj
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Decide whether to also cache the data instead of just loading it for the function call
|
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
export namespace Onchain {
|
2023-04-08 15:17:52 -04:00
|
|
|
export async function getClassicInstanceData(
|
|
|
|
network: string,
|
|
|
|
token: string,
|
|
|
|
denomination: string
|
2023-04-27 13:34:22 -04:00
|
|
|
): Promise<ClassicInstance> {
|
2023-04-08 15:17:52 -04:00
|
|
|
const instanceData = Json.getValue(await Json.load('onchain/instances.json'), [network, token])
|
|
|
|
return {
|
|
|
|
network: +network,
|
|
|
|
symbol: token.toUpperCase(),
|
|
|
|
decimals: Json.getValue(instanceData, ['decimals']),
|
|
|
|
denomination: +denomination,
|
|
|
|
deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]),
|
|
|
|
address: Json.getValue(instanceData, ['instanceAddress', denomination]),
|
|
|
|
anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled'])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-27 13:34:22 -04:00
|
|
|
export async function getInstanceLookupKeys(instanceAddress: string): Promise<Keys.InstanceLookup> {
|
2023-04-19 13:01:40 -04:00
|
|
|
// lookup some stuff first
|
|
|
|
const lookupObj: { [key: string]: string } = await Json.load('onchain/instanceAddresses.json')
|
|
|
|
|
|
|
|
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
|
|
|
|
|
|
|
|
const network = pathstring.match('[0-9]+')![0],
|
|
|
|
token = pathstring.substring(network.length).match('[a-z]+')![0],
|
|
|
|
denomination = pathstring.substring(network.length + token.length)
|
|
|
|
|
|
|
|
return {
|
|
|
|
network: network,
|
|
|
|
token: token,
|
|
|
|
denomination: denomination
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getPathstringBasedContent<T>(
|
|
|
|
filepath: string,
|
2023-04-08 15:17:52 -04:00
|
|
|
paths: Array<{
|
2023-04-17 17:56:57 -04:00
|
|
|
network?: string
|
|
|
|
token?: string
|
|
|
|
denomination?: string
|
2023-04-08 15:17:52 -04:00
|
|
|
}>
|
|
|
|
): Promise<Array<T>> {
|
2023-04-19 13:01:40 -04:00
|
|
|
const obj = await Json.load(filepath)
|
2023-04-08 15:17:52 -04:00
|
|
|
return await Promise.all(
|
2023-04-17 17:56:57 -04:00
|
|
|
paths.map((path) =>
|
2023-04-23 18:01:45 -04:00
|
|
|
Json.getValue(obj, [`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`])
|
2023-04-17 17:56:57 -04:00
|
|
|
)
|
2023-04-08 15:17:52 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
export async function getNetworkSymbol(networkId: string): Promise<string> {
|
2023-04-19 13:01:40 -04:00
|
|
|
return (
|
|
|
|
await getPathstringBasedContent<string>('onchain/networkSymbols.json', [{ network: networkId }])
|
|
|
|
)[0]
|
2023-04-17 17:56:57 -04:00
|
|
|
}
|
2023-04-08 15:17:52 -04:00
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
export function getInstanceAddresses(
|
2023-04-08 15:17:52 -04:00
|
|
|
paths: Array<{
|
|
|
|
network: string
|
|
|
|
token: string
|
|
|
|
denomination: string
|
|
|
|
}>
|
|
|
|
): Promise<Array<string>> {
|
2023-04-19 13:01:40 -04:00
|
|
|
return getPathstringBasedContent<string>('onchain/instanceAddresses.json', paths)
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
export async function getInstanceAddress(
|
|
|
|
network: string,
|
|
|
|
token: string,
|
|
|
|
denomination: string
|
|
|
|
): Promise<string> {
|
|
|
|
return (await getInstanceAddresses([{ network: network, token: token, denomination: denomination }]))[0]
|
|
|
|
}
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
export function getInstanceDeployBlockNums(
|
2023-04-08 15:17:52 -04:00
|
|
|
paths: Array<{
|
|
|
|
network: string
|
|
|
|
token: string
|
|
|
|
denomination: string
|
|
|
|
}>
|
|
|
|
): Promise<Array<number>> {
|
2023-04-19 13:01:40 -04:00
|
|
|
return getPathstringBasedContent<number>('onchain/deployedBlockNumbers.json', paths)
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
export async function getInstanceDeployBlockNum(
|
|
|
|
network: string,
|
|
|
|
token: string,
|
|
|
|
denomination: string
|
|
|
|
): Promise<number> {
|
|
|
|
return (
|
|
|
|
await getInstanceDeployBlockNums([{ network: network, token: token, denomination: denomination }])
|
|
|
|
)[0]
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getProxyAddress(network: string): Promise<string> {
|
|
|
|
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'proxy'])
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getMulticallAddress(network: string): Promise<string> {
|
|
|
|
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall'])
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getMulticall3Address(network: string): Promise<string> {
|
|
|
|
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall3'])
|
|
|
|
}
|
|
|
|
|
2023-04-27 13:34:22 -04:00
|
|
|
export async function getTokenData(network: string, token: string): Promise<TokenData> {
|
2023-04-08 15:17:52 -04:00
|
|
|
const data = Json.getValue(await Json.load('onchain/tokens.json'), [network, token])
|
|
|
|
return {
|
|
|
|
network: +network,
|
|
|
|
decimals: +data['decimals'],
|
|
|
|
address: data['address']
|
|
|
|
}
|
|
|
|
}
|
2023-04-17 17:56:57 -04:00
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
export async function getTokenAddress(network: string, token: string): Promise<string> {
|
|
|
|
return (
|
|
|
|
await getPathstringBasedContent<string>('onchain/tokenAddresses.json', [
|
|
|
|
{ network: network, token: token }
|
|
|
|
])
|
|
|
|
)[0]
|
|
|
|
}
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
export async function getTokenDecimals(network: string, token: string): Promise<number> {
|
2023-04-19 13:01:40 -04:00
|
|
|
return (
|
|
|
|
await getPathstringBasedContent<number>('onchain/decimals.json', [{ network: network, token: token }])
|
|
|
|
)[0]
|
2023-04-17 17:56:57 -04:00
|
|
|
}
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
export namespace Offchain {
|
2023-04-08 15:17:52 -04:00
|
|
|
export async function getUncensoredRpcURL(network: string, name: string = ''): Promise<string> {
|
|
|
|
const rpcs = Json.toMap<string>(
|
|
|
|
Json.getValue(await Json.load('offchain/infrastructure.json'), ['jrpc-uncensored', network])
|
|
|
|
)
|
|
|
|
|
|
|
|
if (name.length !== 0) {
|
|
|
|
return rpcs.get(name)!
|
|
|
|
}
|
|
|
|
|
|
|
|
let keys = rpcs.keys()
|
|
|
|
let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1)
|
|
|
|
for (let i = 0; i < randCount; i++) keys.next()
|
|
|
|
|
|
|
|
return rpcs.get(keys.next().value)!
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getClassicSubgraphURL(network: string): Promise<string> {
|
|
|
|
return Json.getValue(await Json.load('offchain/infrastructure.json'), ['subgraph', network])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Constants {
|
|
|
|
export const MERKLE_TREE_HEIGHT = 20
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Docs {
|
2023-04-11 15:36:32 -04:00
|
|
|
// TODO: Probably find some easier way to lookup below docs for the end user...
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
export class Base {
|
|
|
|
_id: string
|
|
|
|
_rev?: string
|
|
|
|
|
|
|
|
constructor(id: string) {
|
|
|
|
this._id = id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Deposit extends Base {
|
|
|
|
blockNumber: number
|
|
|
|
leafIndex: number
|
2023-04-11 15:36:32 -04:00
|
|
|
commitment: string
|
|
|
|
transactionHash: string
|
2023-04-08 15:17:52 -04:00
|
|
|
timestamp: string
|
|
|
|
|
|
|
|
constructor(obj: any) {
|
|
|
|
const blockNumber = obj['blockNumber']
|
|
|
|
const transactionHash = obj['transactionHash']
|
|
|
|
const commitment = obj['args']['commitment']
|
|
|
|
const leafIndex = obj['args']['leafIndex']
|
|
|
|
const timestamp = obj['args']['timestamp']
|
|
|
|
|
2023-04-11 15:36:32 -04:00
|
|
|
// To preserve order because we will need it later
|
|
|
|
super(toIndexableString([blockNumber, leafIndex, commitment]))
|
2023-04-08 15:17:52 -04:00
|
|
|
|
|
|
|
this.commitment = commitment
|
2023-04-11 15:36:32 -04:00
|
|
|
this.blockNumber = blockNumber
|
2023-04-08 15:17:52 -04:00
|
|
|
this.leafIndex = leafIndex
|
2023-04-11 15:36:32 -04:00
|
|
|
this.transactionHash = transactionHash
|
2023-04-08 15:17:52 -04:00
|
|
|
this.timestamp = timestamp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Withdrawal extends Base {
|
|
|
|
blockNumber: number
|
|
|
|
to: string
|
2023-04-11 15:36:32 -04:00
|
|
|
nullifierHash: string
|
|
|
|
transactionHash: string
|
2023-04-08 15:17:52 -04:00
|
|
|
fee: string
|
|
|
|
|
|
|
|
constructor(obj: any) {
|
|
|
|
const blockNumber = obj['blockNumber']
|
|
|
|
const transactionHash = obj['transactionHash']
|
|
|
|
const to = obj['args']['to']
|
|
|
|
const nullifierHash = obj['args']['nullifierHash']
|
|
|
|
const fee = (obj['args']['fee'] as BigNumber).toString()
|
|
|
|
|
2023-04-11 15:36:32 -04:00
|
|
|
super(toIndexableString([blockNumber, to, nullifierHash]))
|
2023-04-08 15:17:52 -04:00
|
|
|
|
|
|
|
this.blockNumber = blockNumber
|
|
|
|
this.to = to
|
2023-04-11 15:36:32 -04:00
|
|
|
this.nullifierHash = nullifierHash
|
|
|
|
this.transactionHash = transactionHash
|
2023-04-08 15:17:52 -04:00
|
|
|
this.fee = fee
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Note extends Base {
|
2023-04-17 17:56:57 -04:00
|
|
|
network: string
|
|
|
|
token: string
|
|
|
|
denomination: string
|
2023-04-08 15:17:52 -04:00
|
|
|
note: string
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
constructor(index: number, network: string, token: string, denomination: string, note: string) {
|
|
|
|
super(toIndexableString([index, network, denomination, token]))
|
|
|
|
this.network = network
|
|
|
|
this.token = token
|
|
|
|
this.denomination = denomination
|
2023-04-08 15:17:52 -04:00
|
|
|
this.note = note
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Invoice extends Base {
|
2023-04-17 17:56:57 -04:00
|
|
|
network: string
|
|
|
|
token: string
|
|
|
|
denomination: string
|
2023-04-08 15:17:52 -04:00
|
|
|
invoice: string
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
constructor(index: number, network: string, token: string, denomination: string, invoice: string) {
|
|
|
|
super(toIndexableString([index, network, denomination, token]))
|
|
|
|
this.network = network
|
|
|
|
this.token = token
|
|
|
|
this.denomination = denomination
|
2023-04-08 15:17:52 -04:00
|
|
|
this.invoice = invoice
|
|
|
|
}
|
|
|
|
}
|
2023-04-11 15:36:32 -04:00
|
|
|
|
|
|
|
export class Relayer extends Base {
|
|
|
|
address: string
|
|
|
|
version: string
|
|
|
|
serviceFeePercent: number
|
|
|
|
miningFeePercent: number
|
|
|
|
status: string
|
|
|
|
chainId: number
|
2023-04-17 17:56:57 -04:00
|
|
|
prices: Map<string, BigNumber>
|
2023-04-11 15:36:32 -04:00
|
|
|
|
|
|
|
constructor(url: string, properties: RelayerProperties) {
|
|
|
|
super(toIndexableString([url]))
|
|
|
|
this.address = properties.address
|
|
|
|
this.version = properties.version
|
|
|
|
this.serviceFeePercent = properties.serviceFeePercent
|
|
|
|
this.miningFeePercent = properties.miningFeePercent
|
|
|
|
this.status = properties.status
|
|
|
|
this.chainId = properties.chainId
|
2023-04-17 17:56:57 -04:00
|
|
|
this.prices = properties.prices
|
2023-04-11 15:36:32 -04:00
|
|
|
}
|
|
|
|
}
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
export namespace Cache {
|
|
|
|
export class Base<T extends Docs.Base> {
|
2023-04-19 13:01:40 -04:00
|
|
|
name: string
|
2023-04-08 15:17:52 -04:00
|
|
|
db: PouchDB.Database<T>
|
|
|
|
|
2023-04-27 13:34:22 -04:00
|
|
|
constructor(name: string, options?: Options.Cache) {
|
2023-04-19 13:01:40 -04:00
|
|
|
this.name = name
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
if (options?.persistent === false && options?.adapter !== 'memory' && options?.adapter !== null)
|
2023-04-17 17:56:57 -04:00
|
|
|
throw ErrorUtils.getError('Cache.new: if not persistent, cache must use memory adapter.')
|
2023-04-08 15:17:52 -04:00
|
|
|
|
|
|
|
if (options?.adapter === 'memory' && options?.persistent === true)
|
2023-04-17 17:56:57 -04:00
|
|
|
throw ErrorUtils.getError("Cache.new: can't specify memory adapter if persistent.")
|
2023-04-08 15:17:52 -04:00
|
|
|
|
|
|
|
const dbAdapter = options?.adapter ?? (options?.persistent === false ? 'memory' : 'leveldb')
|
|
|
|
|
2023-05-02 18:19:28 -04:00
|
|
|
if (options?.cachePath)
|
|
|
|
if (options.cachePath.charAt(options.cachePath.length - 1) != '/') options.cachePath += '/'
|
|
|
|
|
|
|
|
if (!Files.cacheDirExists(options?.cachePath)) Files.makeCacheDir()
|
|
|
|
|
|
|
|
this.db = new PouchDB<T>(Files.getCachePath(name, options?.cachePath), { adapter: dbAdapter })
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
2023-04-11 15:36:32 -04:00
|
|
|
async get(keys: Array<any>): Promise<T> {
|
|
|
|
return await this.db.get(toIndexableString(keys)).catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
async close(): Promise<void> {
|
|
|
|
await this.db.close()
|
|
|
|
}
|
|
|
|
|
|
|
|
async clear(): Promise<void> {
|
|
|
|
await this.db.destroy()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export abstract class Syncable<T extends Docs.Base> extends Base<T> {
|
2023-04-27 13:34:22 -04:00
|
|
|
pooler?: AsyncUtils.PromisePooler
|
2023-04-08 15:17:52 -04:00
|
|
|
|
2023-04-27 13:34:22 -04:00
|
|
|
constructor(name: string, options?: Options.Cache) {
|
|
|
|
super(name, options)
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
abstract buildDoc(response: any): Docs.Base
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
abstract getCallbacks(...args: Array<any>): Array<AsyncUtils.Callback>
|
|
|
|
|
2023-04-19 13:01:40 -04:00
|
|
|
abstract getErrorHandlers(...args: Array<any>): Array<AsyncUtils.ErrorHandler>
|
|
|
|
|
2023-04-27 13:34:22 -04:00
|
|
|
initializePooler(
|
|
|
|
callbacks: Array<AsyncUtils.Callback>,
|
|
|
|
errorHandlers: Array<AsyncUtils.ErrorHandler>,
|
|
|
|
concurrencyLimit: number
|
|
|
|
): void {
|
|
|
|
if (this.pooler) this.pooler.reset()
|
|
|
|
this.pooler = new AsyncUtils.PromisePooler(callbacks, errorHandlers, concurrencyLimit)
|
|
|
|
}
|
|
|
|
|
2023-04-08 15:17:52 -04:00
|
|
|
async close(): Promise<void> {
|
2023-04-27 13:34:22 -04:00
|
|
|
if (this.pooler && this.pooler.pending)
|
2023-04-17 17:56:57 -04:00
|
|
|
throw ErrorUtils.getError("Syncable.close: can't clear while pooler still has pending promises.")
|
2023-04-08 15:17:52 -04:00
|
|
|
await super.close()
|
|
|
|
}
|
|
|
|
|
|
|
|
async clear(): Promise<void> {
|
2023-04-27 13:34:22 -04:00
|
|
|
if (this.pooler && this.pooler.pending)
|
2023-04-17 17:56:57 -04:00
|
|
|
throw ErrorUtils.getError("Syncable.clear: can't clear while pooler still has pending promises.")
|
2023-04-08 15:17:52 -04:00
|
|
|
await super.clear()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-11 15:36:32 -04:00
|
|
|
type DocsArray<T extends Docs.Base> = Array<{
|
|
|
|
doc?: T
|
|
|
|
id: string
|
|
|
|
key: string
|
|
|
|
value: {
|
|
|
|
rev: string
|
|
|
|
deleted?: boolean
|
|
|
|
}
|
|
|
|
}>
|
|
|
|
|
|
|
|
export async function loadContents<T extends Docs.Base>(
|
|
|
|
nameOfContent: string,
|
|
|
|
full: boolean = true,
|
|
|
|
emptyError: Error = ErrorUtils.getError(
|
|
|
|
`Core.loadCacheContents: there is no cache entry for ${nameOfContent}`
|
|
|
|
)
|
|
|
|
): Promise<DocsArray<T>> {
|
2023-04-17 17:56:57 -04:00
|
|
|
const cache = new Cache.Base<T>(nameOfContent)
|
2023-04-11 15:36:32 -04:00
|
|
|
|
|
|
|
const docs = await cache.db.allDocs({ include_docs: full }).catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
|
2023-04-17 17:56:57 -04:00
|
|
|
if (docs.total_rows === 0) {
|
|
|
|
await cache.clear()
|
|
|
|
throw emptyError
|
|
|
|
}
|
2023-04-11 15:36:32 -04:00
|
|
|
|
|
|
|
return docs.rows as DocsArray<T>
|
|
|
|
}
|
2023-04-08 15:17:52 -04:00
|
|
|
}
|