Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
-- CreateTable
CREATE TABLE `TransactionInput` (
`id` VARCHAR(191) NOT NULL DEFAULT (uuid()),
`transactionId` VARCHAR(191) NOT NULL,
`addressId` VARCHAR(191) NOT NULL,
`index` INTEGER NOT NULL,
`amount` DECIMAL(24, 8) NOT NULL,

INDEX `TransactionInput_transactionId_idx`(`transactionId`),
INDEX `TransactionInput_addressId_idx`(`addressId`),
PRIMARY KEY (`id`)
) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;

-- CreateTable
CREATE TABLE `TransactionOutput` (
`id` VARCHAR(191) NOT NULL DEFAULT (uuid()),
`transactionId` VARCHAR(191) NOT NULL,
`addressId` VARCHAR(191) NOT NULL,
`index` INTEGER NOT NULL,
`amount` DECIMAL(24, 8) NOT NULL,

INDEX `TransactionOutput_transactionId_idx`(`transactionId`),
INDEX `TransactionOutput_addressId_idx`(`addressId`),
PRIMARY KEY (`id`)
) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;

-- AddForeignKey
ALTER TABLE `TransactionInput` ADD CONSTRAINT `TransactionInput_transactionId_fkey` FOREIGN KEY (`transactionId`) REFERENCES `Transaction`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE `TransactionInput` ADD CONSTRAINT `TransactionInput_addressId_fkey` FOREIGN KEY (`addressId`) REFERENCES `Address`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE `TransactionOutput` ADD CONSTRAINT `TransactionOutput_transactionId_fkey` FOREIGN KEY (`transactionId`) REFERENCES `Transaction`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE `TransactionOutput` ADD CONSTRAINT `TransactionOutput_addressId_fkey` FOREIGN KEY (`addressId`) REFERENCES `Address`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;
56 changes: 43 additions & 13 deletions prisma-local/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,20 @@ datasource db {
}

model Address {
id String @id @default(dbgenerated("(uuid())"))
address String @unique @db.VarChar(255)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
networkId Int
network Network @relation(fields: [networkId], references: [id], onUpdate: Restrict)
userProfiles AddressesOnUserProfiles[]
lastSynced DateTime?
syncing Boolean @default(false)
paybuttons AddressesOnButtons[]
transactions Transaction[]
clientPayments ClientPayment[]
id String @id @default(dbgenerated("(uuid())"))
address String @unique @db.VarChar(255)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
networkId Int
network Network @relation(fields: [networkId], references: [id], onUpdate: Restrict)
userProfiles AddressesOnUserProfiles[]
lastSynced DateTime?
syncing Boolean @default(false)
paybuttons AddressesOnButtons[]
transactions Transaction[]
clientPayments ClientPayment[]
transactionInputs TransactionInput[]
transactionOutputs TransactionOutput[]

@@index([networkId], map: "Address_networkId_fkey")
}
Expand Down Expand Up @@ -77,7 +79,9 @@ model Transaction {
opReturn String @db.LongText @default("")
address Address @relation(fields: [addressId], references: [id], onDelete: Cascade, onUpdate: Cascade)
prices PricesOnTransactions[]
invoices Invoice[]
invoices Invoice[]
inputs TransactionInput[]
outputs TransactionOutput[]

createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
Expand All @@ -86,6 +90,32 @@ model Transaction {
@@index([addressId, timestamp], map: "Transaction_addressId_timestamp_idx")
}

model TransactionInput {
id String @id @default(dbgenerated("(uuid())"))
transactionId String
addressId String
index Int
transaction Transaction @relation(fields: [transactionId], references: [id], onDelete: Cascade)
address Address @relation(fields: [addressId], references: [id], onDelete: Cascade)
amount Decimal @db.Decimal(24, 8)

@@index([transactionId])
@@index([addressId])
}

model TransactionOutput {
id String @id @default(dbgenerated("(uuid())"))
transactionId String
addressId String
index Int
transaction Transaction @relation(fields: [transactionId], references: [id], onDelete: Cascade)
address Address @relation(fields: [addressId], references: [id], onDelete: Cascade)
amount Decimal @db.Decimal(24, 8)

@@index([transactionId])
@@index([addressId])
}

model Wallet {
id String @id @default(dbgenerated("(uuid())"))
createdAt DateTime @default(now())
Expand Down
44 changes: 41 additions & 3 deletions services/chronikService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ import {
import { Address, Prisma, ClientPaymentStatus } from '@prisma/client'
import xecaddr from 'xecaddrjs'
import { getAddressPrefix, satoshisToUnit } from 'utils/index'
import { fetchAddressesArray, fetchAllAddressesForNetworkId, getEarliestUnconfirmedTxTimestampForAddress, getLatestConfirmedTxTimestampForAddress, setSyncing, setSyncingBatch, updateLastSynced, updateManyLastSynced } from './addressService'
import { fetchAddressesArray, fetchAllAddressesForNetworkId, getEarliestUnconfirmedTxTimestampForAddress, getLatestConfirmedTxTimestampForAddress, setSyncing, setSyncingBatch, updateLastSynced, updateManyLastSynced, upsertAddress } from './addressService'
import * as ws from 'ws'
import { BroadcastTxData } from 'ws-service/types'
import config from 'config'
import io, { Socket } from 'socket.io-client'
import moment from 'moment'
import { OpReturnData, parseError, parseOpReturnData } from 'utils/validators'
import { OpReturnData, parseAddress, parseError, parseOpReturnData } from 'utils/validators'
import { executeAddressTriggers, executeTriggersBatch } from './triggerService'
import { appendTxsToFile } from 'prisma-local/seeds/transactions'
import { PHASE_PRODUCTION_BUILD } from 'next/dist/shared/lib/constants'
Expand Down Expand Up @@ -285,13 +285,51 @@ export class ChronikBlockchainClient {

private async getTransactionFromChronikTransaction (transaction: Tx, address: Address): Promise<Prisma.TransactionUncheckedCreateInput> {
const { amount, opReturn } = await this.getTransactionAmountAndData(transaction, address.address)
const inputAddresses = this.getSortedInputAddresses(transaction)
const outputAddresses = this.getSortedOutputAddresses(transaction)

const uniqueAddressStrings = [...new Set([
...inputAddresses.map(({ address: addr }) => addr),
...outputAddresses.map(({ address: addr }) => addr)
])]
const addressIdMap = new Map<string, string>()
await Promise.all(
uniqueAddressStrings.map(async (addrStr) => {
try {
const parsed = parseAddress(addrStr)
const addr = await upsertAddress(parsed)
addressIdMap.set(parsed, addr.id)
} catch {
// Skip invalid addresses: don't upsert, don't add to map
}
})
)
Comment on lines +291 to +306
Copy link
Contributor

@coderabbitai coderabbitai bot Feb 23, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

rg -n "fetchAllAddressesForNetworkId" --type=ts -A 30

Repository: PayButton/paybutton-server

Length of output: 13076


🏁 Script executed:

# Find the addressService file(s) to examine the implementation
fd -e ts -path "*address*" | head -20

Repository: PayButton/paybutton-server

Length of output: 238


🏁 Script executed:

# Also search for where fetchAllAddressesForNetworkId is defined
rg -n "function fetchAllAddressesForNetworkId\|fetchAllAddressesForNetworkId\s*[:=]" --type=ts -B 2 -A 20

Repository: PayButton/paybutton-server

Length of output: 52


fetchAllAddressesForNetworkId returns ALL addresses — including participant records created by upsertAddress.

The implementation at services/addressService.ts:181–187 filters only by networkId with no user association check:

export async function fetchAllAddressesForNetworkId (networkId: number): Promise<Address[]> {
  return await prisma.address.findMany({
    where: {
      networkId
    }
  })
}

When upsertAddress creates Address records for participant addresses (lines 291–306), they persist in the database without user association. On the next server restart:

  1. subscribeInitialAddresses() (line 978) will subscribe to ALL addresses including these participant records.
  2. syncMissedTransactions() (line 960) will sync transactions for all participant addresses.
  3. Each synced transaction creates more participant addresses from inputs/outputs, and the next restart repeats the process — unbounded growth.

This is a fundamental behavioral change: the Address table now tracks all transaction participants, not just user-registered addresses, and the system automatically enrolls them for syncing at restart.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@services/chronikService.ts` around lines 291 - 306, The code currently calls
upsertAddress for every parsed input/output address (in the uniqueAddressStrings
map) which creates DB Address rows with no user association and causes unbounded
enrollment; instead, stop creating new "participant" addresses: for each parsed
address (from parseAddress) first query for an existing address record (use
fetchAddressByNetworkIdAndAddress or equivalent lookup) and only set
addressIdMap if an existing Address is found; do NOT call upsertAddress for
addresses that would create a record with no user association. Update the
uniqueAddressStrings mapping logic to skip creation and only populate
addressIdMap from existing addresses to prevent persisting participant-only
addresses.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The issue here is not the change but the lack of filtering on the subscription. This should be fixed separately

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Fabcien, understood! You're right that the subscription and sync logic in fetchAllAddressesForNetworkId (and related functions like subscribeInitialAddresses() and syncMissedTransactions()) should filter to only user-registered addresses rather than all participant addresses that get upserted.

Keeping the current change in getTransactionFromChronikTransaction to store inputs/outputs makes sense as a separate concern from how we manage subscriptions on server restart.

Would you like me to create a follow-up issue to track the work needed to add filtering logic to prevent subscribing/syncing participant-only addresses?


✏️ Learnings added
Learnt from: Fabcien
Repo: PayButton/paybutton-server PR: 1110
File: services/chronikService.ts:291-306
Timestamp: 2026-02-23T15:44:26.406Z
Learning: In `services/chronikService.ts`, the `upsertAddress` calls for transaction input/output addresses are intentional to store participant data. The subscription filtering (e.g., in `fetchAllAddressesForNetworkId`) should be addressed separately to distinguish user-registered addresses from participant addresses.

Note: Learnings are effective only in the context of similar code segments. To apply general coding guidelines, please configure review instructions. You can manage existing learnings in the app.


const getAddressId = (addr: string): string | undefined => {
try {
return addressIdMap.get(parseAddress(addr))
} catch {
return undefined
}
}

return {
hash: transaction.txid,
amount,
timestamp: transaction.block !== undefined ? transaction.block.timestamp : transaction.timeFirstSeen,
addressId: address.id,
confirmed: transaction.block !== undefined,
opReturn
opReturn,
inputs: {
create: inputAddresses
.map(({ address: addr, amount: amt }, i) => ({ addressId: getAddressId(addr), index: i, amount: amt }))
.filter((item): item is { addressId: string, index: number, amount: Prisma.Decimal } => item.addressId !== undefined)
},
outputs: {
create: outputAddresses
.map(({ address: addr, amount: amt }, i) => ({ addressId: getAddressId(addr), index: i, amount: amt }))
.filter((item): item is { addressId: string, index: number, amount: Prisma.Decimal } => item.addressId !== undefined)
}
}
}

Expand Down
17 changes: 13 additions & 4 deletions services/transactionService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,11 @@ export function getSimplifiedTrasaction (tx: TransactionWithAddressAndPrices, in

const parsedOpReturn = resolveOpReturn(opReturn)

const dbInputsArr = (tx as { inputs?: Array<{ address: { address: string }, amount: Prisma.Decimal }> }).inputs
const dbOutputsArr = (tx as { outputs?: Array<{ address: { address: string }, amount: Prisma.Decimal }> }).outputs
const resolvedInputAddresses = inputAddresses ?? (Array.isArray(dbInputsArr) ? dbInputsArr.map(i => ({ address: i.address.address, amount: i.amount })) : [])
const resolvedOutputAddresses = outputAddresses ?? (Array.isArray(dbOutputsArr) ? dbOutputsArr.map(o => ({ address: o.address.address, amount: o.amount })) : [])

const simplifiedTransaction: SimplifiedTransaction = {
hash,
amount,
Expand All @@ -63,8 +68,8 @@ export function getSimplifiedTrasaction (tx: TransactionWithAddressAndPrices, in
timestamp,
message: parsedOpReturn?.message ?? '',
rawMessage: parsedOpReturn?.rawMessage ?? '',
inputAddresses: inputAddresses ?? [],
outputAddresses: outputAddresses ?? [],
inputAddresses: resolvedInputAddresses,
outputAddresses: resolvedOutputAddresses,
prices: tx.prices
}

Expand All @@ -90,7 +95,9 @@ const includePrices = {

const includeAddressAndPrices = {
address: true,
...includePrices
...includePrices,
inputs: { include: { address: true }, orderBy: { index: 'asc' as const } },
outputs: { include: { address: true }, orderBy: { index: 'asc' as const } }
}

const transactionWithPrices = Prisma.validator<Prisma.TransactionDefaultArgs>()(
Expand Down Expand Up @@ -129,7 +136,9 @@ const includePaybuttonsAndPrices = {
}
}
},
...includePrices
...includePrices,
inputs: { include: { address: true }, orderBy: { index: 'asc' as const } },
outputs: { include: { address: true }, orderBy: { index: 'asc' as const } }
}
export const includePaybuttonsAndPricesAndInvoices = {
...includePaybuttonsAndPrices,
Expand Down
40 changes: 36 additions & 4 deletions tests/unittests/transactionService.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ const includePaybuttonsAndPrices = {
}
}
},
...includePrices
...includePrices,
inputs: { include: { address: true }, orderBy: { index: 'asc' as const } },
outputs: { include: { address: true }, orderBy: { index: 'asc' as const } }
}

describe('Create services', () => {
Expand Down Expand Up @@ -194,6 +196,37 @@ describe('Address object arrays (input/output) integration', () => {
expect(simplified.inputAddresses).toEqual(inputs)
expect(simplified.outputAddresses).toEqual(outputs)
})

it('getSimplifiedTrasaction uses inputs/outputs from tx when not provided explicitly', () => {
const inputsFromDb = [
{ address: { address: 'ecash:qqinput1' }, amount: new Prisma.Decimal(1.23) },
{ address: { address: 'ecash:qqinput2' }, amount: new Prisma.Decimal(4.56) }
]
const outputsFromDb = [
{ address: { address: 'ecash:qqout1' }, amount: new Prisma.Decimal(7.89) },
{ address: { address: 'ecash:qqout2' }, amount: new Prisma.Decimal(0.12) }
]
const tx: any = {
hash: 'hash1',
amount: new Prisma.Decimal(5),
confirmed: true,
opReturn: '',
address: { address: 'ecash:qqprimaryaddressxxxxxxxxxxxxxxxxxxxxx' },
timestamp: 1700000000,
prices: mockedTransaction.prices,
inputs: inputsFromDb,
outputs: outputsFromDb
}
const simplified = transactionService.getSimplifiedTrasaction(tx)
expect(simplified.inputAddresses).toEqual([
{ address: 'ecash:qqinput1', amount: new Prisma.Decimal(1.23) },
{ address: 'ecash:qqinput2', amount: new Prisma.Decimal(4.56) }
])
expect(simplified.outputAddresses).toEqual([
{ address: 'ecash:qqout1', amount: new Prisma.Decimal(7.89) },
{ address: 'ecash:qqout2', amount: new Prisma.Decimal(0.12) }
])
})
})

describe('Date and timezone filters for transactions', () => {
Expand All @@ -206,7 +239,7 @@ describe('Date and timezone filters for transactions', () => {
{ label: 'negative offset (Canada)', timezone: 'America/Toronto' }
]

const computeExpectedRange = (tz: string) => {
const computeExpectedRange = (tz: string): { gte: number, lte: number } => {
const start = new Date(startDate)
const end = new Date(endDate)

Expand Down Expand Up @@ -234,7 +267,7 @@ describe('Date and timezone filters for transactions', () => {
}
}

const computeYearFilter = (year: number, tz: string) => {
const computeYearFilter = (year: number, tz: string): { timestamp: { gte: number, lte: number } } => {
const startDateObj = new Date(year, 0, 1, 0, 0, 0)
const endDateObj = new Date(year, 11, 31, 23, 59, 59)

Expand Down Expand Up @@ -424,4 +457,3 @@ describe('Date and timezone filters for transactions', () => {
expect(callArgs.where.OR).toBeUndefined()
})
})