A fast, efficient list of Buffer objects optimized for reading and writing across multiple binary chunks without unnecessary copying.
npm install dynbuffer- Zero-copy operations: Read and write across buffer boundaries without copying data
- Buffer-compatible API: Familiar methods that mirror Node.js Buffer API
- Variable-length integers: Built-in support for varint encoding with ZigZag for signed values
- Memory efficient: Only concatenates buffers when explicitly requested
- TypeScript ready: Written in JavaScript with clear type patterns
const { DynamicBuffer } = require('dynbuffer')
// Create from existing buffers
const db = new DynamicBuffer([
Buffer.from([1, 2]),
Buffer.from([3, 4, 5])
])
// Read across buffer boundaries seamlessly
console.log(db.readUInt16BE(1)) // Reads bytes 1-2: [2, 3]
// Append more data
db.append(Buffer.from([6, 7, 8]))
// Access the full buffer when needed
console.log(db.buffer) // <Buffer 01 02 03 04 05 06 07 08>Creates a new DynamicBuffer instance.
buffers(Buffer | Buffer[]): Optional initial buffer(s)
const db1 = new DynamicBuffer() // Empty
const db2 = new DynamicBuffer(Buffer.from([1, 2, 3])) // Single buffer
const db3 = new DynamicBuffer([buf1, buf2, buf3]) // Multiple buffersReturns the total length of all buffers combined.
Returns a concatenated Buffer of all internal buffers. Only performs concatenation when accessed.
Direct access to the internal buffer array (use with caution).
Checks if an object is a DynamicBuffer instance.
DynamicBuffer.isDynamicBuffer(new DynamicBuffer()) // true
DynamicBuffer.isDynamicBuffer(Buffer.from([1, 2])) // falseAppends a buffer to the end. Returns this for chaining.
db.append(Buffer.from([1, 2, 3]))Prepends a buffer to the beginning. Returns this for chaining.
db.prepend(Buffer.from([1, 2, 3]))Appends all buffers from another DynamicBuffer. Returns this for chaining.
const other = new DynamicBuffer([buf1, buf2])
db.appendFrom(other)Prepends all buffers from another DynamicBuffer. Returns this for chaining.
db.prependFrom(other)Returns the byte at the specified offset.
const byte = db.get(5) // Returns byte at position 5Returns a new Buffer containing the specified slice.
const slice = db.slice(2, 8) // Buffer from positions 2-7Returns a new DynamicBuffer containing the specified range.
const sub = db.subarray(2, 8) // DynamicBuffer from positions 2-7Converts to string using the specified encoding.
const str = db.toString('utf8', 0, 10)Creates a copy of the DynamicBuffer.
deep(boolean): If true, creates copies of internal buffers
const shallow = db.clone()
const deep = db.clone(true)Removes bytes from the beginning up to the specified offset. Returns this for chaining.
db.consume(4) // Remove first 4 bytesAll read methods support an optional offset parameter (defaults to 0).
db.readUInt8(offset)
db.readInt8(offset)
db.readUInt16BE(offset)
db.readUInt16LE(offset)
db.readInt16BE(offset)
db.readInt16LE(offset)
db.readUInt32BE(offset)
db.readUInt32LE(offset)
db.readInt32BE(offset)
db.readInt32LE(offset)
db.readBigUInt64BE(offset)
db.readBigUInt64LE(offset)
db.readBigInt64BE(offset)
db.readBigInt64LE(offset)db.readFloatBE(offset)
db.readFloatLE(offset)
db.readDoubleBE(offset)
db.readDoubleLE(offset)Returns [value, bytesRead] tuple.
const [value, bytesRead] = db.readUnsignedVarInt(offset)
const [value, bytesRead] = db.readUnsignedVarInt64(offset)
const [value, bytesRead] = db.readVarInt(offset) // ZigZag decoded
const [value, bytesRead] = db.readVarInt64(offset) // ZigZag decodedAll write methods support an optional append parameter (defaults to true). When append is false, data is prepended.
db.writeUInt8(value, append)
db.writeInt8(value, append)
db.writeUInt16BE(value, append)
db.writeUInt16LE(value, append)
db.writeInt16BE(value, append)
db.writeInt16LE(value, append)
db.writeUInt32BE(value, append)
db.writeUInt32LE(value, append)
db.writeInt32BE(value, append)
db.writeInt32LE(value, append)
db.writeBigUInt64BE(value, append)
db.writeBigUInt64LE(value, append)
db.writeBigInt64BE(value, append)
db.writeBigInt64LE(value, append)db.writeFloatBE(value, append)
db.writeFloatLE(value, append)
db.writeDoubleBE(value, append)
db.writeDoubleLE(value, append)db.writeUnsignedVarInt(value, append)
db.writeUnsignedVarInt64(value, append)
db.writeVarInt(value, append) // ZigZag encoded
db.writeVarInt64(value, append) // ZigZag encodedconst { DynamicBuffer } = require('dynbuffer')
const message = new DynamicBuffer()
// Write header
message.writeUInt32BE(0x12345678) // Magic number
message.writeUInt16BE(1) // Version
message.writeVarInt(payload.length) // Payload length
// Append payload
message.append(payload)
// Send the complete message
socket.write(message.buffer)const parser = new DynamicBuffer()
socket.on('data', (chunk) => {
parser.append(chunk)
while (parser.length >= 4) {
const messageLength = parser.readUInt32BE(0)
if (parser.length >= 4 + messageLength) {
// Extract complete message
const message = parser.slice(4, 4 + messageLength)
processMessage(message)
// Remove processed data
parser.consume(4 + messageLength)
} else {
break // Wait for more data
}
}
})const db = new DynamicBuffer()
// Write variable-length integers
db.writeVarInt(42) // Positive number
db.writeVarInt(-42) // Negative number (ZigZag encoded)
db.writeVarInt64(123456789012345n) // Large number
// Read them back
let offset = 0
const [val1, bytes1] = db.readVarInt(offset)
offset += bytes1
const [val2, bytes2] = db.readVarInt(offset)
offset += bytes2
const [val3, bytes3] = db.readVarInt64(offset)DynamicBuffer is designed for scenarios where you need to:
- Build up binary data incrementally without knowing the final size
- Read structured data from multiple buffer chunks
- Avoid expensive buffer concatenation operations
- Work with streaming data where messages may span multiple chunks
The library only allocates new memory when absolutely necessary (like calling .buffer or .slice()).
Apache-2.0
This project follows standard Node.js conventions. Run npm test to execute the test suite.