Skip to content
Permalink

Comparing changes

This is a direct comparison between two commits made in this repository or its related repositories. View the default comparison for this range or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: Blobscan/optimism-batch-decoder
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 032393851b3203db2be14d0e9e22ece8d424dbad
Choose a base ref
..
head repository: Blobscan/optimism-batch-decoder
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 6d4f748b75884ee993d3950bae4b26f0588242d1
Choose a head ref
Showing with 10 additions and 23 deletions.
  1. +10 −23 src/index2.ts
33 changes: 10 additions & 23 deletions src/index2.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import fs from 'fs'
import rlp from 'rlp'
import { decompressBatches } from './batches/batch'
import type { FramesWithCompressedData } from './frames/frame'

/**
* Read the binary file and split it into chunks of the specified size.
@@ -31,8 +30,8 @@ function bytesToNumber(bytes: Uint8Array): number {
* @param datas - Array of Uint8Array data chunks to process.
* @returns An array of frames with compressed data.
*/
function processChannelData(datas: Uint8Array[]): FramesWithCompressedData {
const frames: FramesWithCompressedData = []
function processChannelData(datas: Uint8Array[]): string {
const frames: string[] = []

for (let data of datas) {
if (data[0] !== 0) throw new Error('Assertion failed: data[0] must be 0 (derivation version)')
@@ -64,18 +63,18 @@ function processChannelData(datas: Uint8Array[]): FramesWithCompressedData {
.map((byte) => byte.toString(16).padStart(2, '0'))
.join('')

frames.push({
channelId,
frameNumber: frameNum,
data: frameData,
isLast
})
frames.push(frameData)

data = data.slice(end) // Move to the next chunk of data
}
}

return frames
const channel = Buffer.from(frames.join(''), 'hex')
console.log('full channel', channel.length, 'bytes')
//console.log(channel.slice(0, 100).toString())
console.log(channel.toString('hex').slice(0, 100))

return frames.join('')
}

/**
@@ -196,19 +195,7 @@ async function processFile(filename: string): Promise<void> {
datas.push(blobData.slice(4, declaredLength + 4))
}

const channel_parts: string[] = []
const rawFrames = processChannelData(datas)

for (const rawFrame of rawFrames) {
channel_parts.push(rawFrame.data)
}
const channel = Buffer.from(channel_parts.join(''), 'hex')

console.log('full channel', channel.length, 'bytes')
//console.log(channel.slice(0, 100).toString())
console.log(channel.toString('hex').slice(0, 100))

const fullChannel = channel_parts.join('')
const fullChannel = processChannelData(datas)

const decompressed = await decompressBatches(fullChannel)
const dataToDecode: Uint8Array = decompressed