Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion db/scripts/4_create_views.sql
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ ORDER BY l1.batch_index ASC
CREATE OR REPLACE VIEW batchable_l2_only_tx_states
AS

SELECT tx.*, row_number() over (ORDER BY tx.id ASC) -1 AS row_number
SELECT tx.*, row_number() over (ORDER BY tx.block_number ASC, tx.tx_index ASC) -1 AS row_number
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah so if you have multiple transactions in the same block then you order by the ID, that right? This keeps everything globally ordered

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm trying to replace all ordering on ID since that appears to be at least somewhat nondeterministic under load. If somehow we end up with 2 L2 txs in the same block (we shoudln't, that'd be an issue), we'll order those by the tx index in the block.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok great, that makes sense

FROM l2_tx_output tx
INNER JOIN canonical_chain_batch cc
ON tx.canonical_chain_batch_number = cc.batch_number
Expand Down
36 changes: 17 additions & 19 deletions packages/rollup-core/src/app/data/data-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ export class DefaultDataService implements DataService {

const batchTimestamp = parseInt(txRes[0]['block_timestamp'], 10)

const maxTxId = await this.getMaxL2TxOutputIdForCanonicalChainBatch(
const maxBlockNumber = await this.getMaxL2TxOutputBlockNumberForCanonicalChainBatch(
batchTimestamp,
maxBatchCalldataBytes
)
Expand All @@ -434,20 +434,19 @@ export class DefaultDataService implements DataService {
`UPDATE l2_tx_output tx
SET
canonical_chain_batch_number = ${batchNumber},
canonical_chain_batch_index = t.row_number
canonical_chain_batch_index = t.batch_index
FROM
(
SELECT id, row_number() over (ORDER BY id) -1 as row_number
SELECT id, row_number() over (ORDER BY block_number ASC, tx_index ASC) -1 as batch_index
FROM l2_tx_output
WHERE
canonical_chain_batch_number IS NULL
AND l1_rollup_tx_id IS NULL
AND block_timestamp = ${batchTimestamp}
AND id <= ${maxTxId}
AND block_number <= ${maxBlockNumber}
ORDER BY block_number ASC, tx_index ASC
) t
WHERE tx.id = t.id
`,
WHERE tx.id = t.id`,
txContext
)

Expand All @@ -460,19 +459,19 @@ export class DefaultDataService implements DataService {
}
}

public async getMaxL2TxOutputIdForCanonicalChainBatch(
public async getMaxL2TxOutputBlockNumberForCanonicalChainBatch(
batchTimestamp: number,
maxBatchCalldataBytes: number
): Promise<number> {
const res: Row[] = await this.rdb.select(
`SELECT
id,
block_number,
GREATEST(LENGTH(calldata)-2, 0) / 2 + ${ROLLUP_TX_SIZE_IN_BYTES_MINUS_CALLDATA} as calldata_bytes
FROM l2_tx_output
WHERE
block_timestamp = ${batchTimestamp}
AND canonical_chain_batch_number IS NULL
ORDER BY id ASC
ORDER BY block_number ASC, tx_index ASC
`
)

Expand All @@ -483,30 +482,30 @@ export class DefaultDataService implements DataService {
}

let totalCalldataBytes: number = 0
let lastId = -1
let lastBlockNumber = -1
for (const row of res) {
const rowBytes: number = parseInt(row['calldata_bytes'], 10)
totalCalldataBytes += rowBytes
if (totalCalldataBytes > maxBatchCalldataBytes) {
if (lastId === -1) {
const msg: string = `L2 Tx with ID ${row['id']} has ${totalCalldataBytes} bytes of calldata, which is bigger than the limit of ${maxBatchCalldataBytes}! Cannot roll up this transaction!`
if (lastBlockNumber === -1) {
const msg: string = `L2 Tx with block number ${row['block_number']} has ${totalCalldataBytes} bytes of calldata, which is bigger than the limit of ${maxBatchCalldataBytes}! Cannot roll up this transaction!`
log.error(msg)
throw Error(msg)
}
log.debug(
`Building Canonical Chain Batch with ${totalCalldataBytes -
rowBytes} bytes of rollup tx calldata and timestamp ${batchTimestamp}. Largest tx output ID: ${lastId}`
rowBytes} bytes of rollup tx calldata and timestamp ${batchTimestamp}. Largest tx output ID: ${lastBlockNumber}`
)
return lastId
return lastBlockNumber
}
lastId = parseInt(row['id'], 10)
lastBlockNumber = parseInt(row['block_number'], 10)
}

log.debug(
`Building Canonical Chain Batch with ${totalCalldataBytes} bytes of rollup tx calldata and timestamp ${batchTimestamp}. Largest tx output ID: ${lastId}`
`Building Canonical Chain Batch with ${totalCalldataBytes} bytes of rollup tx calldata and timestamp ${batchTimestamp}. Largest tx output ID: ${lastBlockNumber}`
)

return lastId
return lastBlockNumber
}

/**
Expand Down Expand Up @@ -605,7 +604,7 @@ export class DefaultDataService implements DataService {
state_commitment_chain_batch_number = ${batchNumber},
state_commitment_chain_batch_index = t.row_number
FROM (
SELECT id, row_number() over (ORDER BY id) -1 as row_number
SELECT id, row_number() over (ORDER BY block_number ASC, tx_index ASC) -1 as row_number
FROM l2_tx_output
WHERE state_commitment_chain_batch_number IS NULL
ORDER BY block_number ASC, tx_index ASC
Expand Down Expand Up @@ -670,7 +669,6 @@ export class DefaultDataService implements DataService {
FROM (
SELECT id, row_number
FROM batchable_l2_only_tx_states
ORDER BY block_number ASC, tx_index ASC
LIMIT ${maxBatchSize}
) t
WHERE tx.id = t.id`,
Expand Down
6 changes: 3 additions & 3 deletions packages/rollup-core/src/types/data/l2-data-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,15 @@ export interface L2DataService {
): Promise<number>

/**
* Gets the largest L2 Tx Output ID that should be included in the batch built with batchTimestamp.
* Gets the largest L2 Tx Output block number that should be included in the batch built with batchTimestamp.
* This is mainly useful as a filter when there is an available batch that has enough rollup transactions'
* bytes to exceed the maxBatchCalldataBytes value.
*
* @param batchTimestamp The block timestamp of the L2 Tx Outputs to be used for the Rollup Batch.
* @param maxBatchCalldataBytes The max amount of rolled up tx bytes to include in the batch.
* @returns The ID of the last (biggest ID) L2 Tx Output to be included in the batch.
* @returns The L2 block number of the last (latest) L2 Tx Output to be included in the batch.
*/
getMaxL2TxOutputIdForCanonicalChainBatch(
getMaxL2TxOutputBlockNumberForCanonicalChainBatch(
batchTimestamp: number,
maxBatchCalldataBytes: number
): Promise<number>
Expand Down
14 changes: 13 additions & 1 deletion packages/rollup-core/test/db/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
ZERO,
ZERO_ADDRESS,
} from '@eth-optimism/core-utils'
import { PostgresDB, Row } from '@eth-optimism/core-db'
import { PostgresDB, RDB, Row } from '@eth-optimism/core-db'
import { BigNumber as BigNum } from 'ethers/utils'
import { Block, TransactionResponse } from 'ethers/providers'

Expand Down Expand Up @@ -370,3 +370,15 @@ export const insertTxOutput = async (
}
}
}

export const selectStateRootBatchRes = async (
rdb: RDB,
batchNum: number
): Promise<Row[]> => {
return rdb.select(
`SELECT *
FROM l2_tx_output
WHERE state_commitment_chain_batch_number = ${batchNum}
ORDER BY state_commitment_chain_batch_index ASC`
)
}
Loading