aboutsummaryrefslogtreecommitdiffstats
path: root/packages/contracts/test/libraries
diff options
context:
space:
mode:
authorJacob Evans <dekz@dekz.net>2018-06-18 19:50:35 +0800
committerGitHub <noreply@github.com>2018-06-18 19:50:35 +0800
commit190eafc30e2e444ed15b76217a6162ec04b33f73 (patch)
treeb20cbad73ff7a069dc0f0ef43ebc0373c714ad02 /packages/contracts/test/libraries
parentd4ee0e862297c16f8ee62efccd31f1193052c64e (diff)
parent0c238448fda99c4d7997901d0fe4d72cb06b79cc (diff)
downloaddexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar.gz
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar.bz2
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar.lz
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar.xz
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.tar.zst
dexon-sol-tools-190eafc30e2e444ed15b76217a6162ec04b33f73.zip
Merge branch 'v2-prototype' into bug/contracts/eip712-191-prefix
Diffstat (limited to 'packages/contracts/test/libraries')
-rw-r--r--packages/contracts/test/libraries/lib_bytes.ts491
-rw-r--r--packages/contracts/test/libraries/lib_mem.ts190
2 files changed, 598 insertions, 83 deletions
diff --git a/packages/contracts/test/libraries/lib_bytes.ts b/packages/contracts/test/libraries/lib_bytes.ts
index 26cfa8291..a31a4789c 100644
--- a/packages/contracts/test/libraries/lib_bytes.ts
+++ b/packages/contracts/test/libraries/lib_bytes.ts
@@ -1,15 +1,14 @@
-import { BlockchainLifecycle, devConstants, web3Factory } from '@0xproject/dev-utils';
-import { AssetProxyId } from '@0xproject/types';
+import { BlockchainLifecycle } from '@0xproject/dev-utils';
+import { assetProxyUtils, generatePseudoRandomSalt } from '@0xproject/order-utils';
import { BigNumber } from '@0xproject/utils';
-import { Web3Wrapper } from '@0xproject/web3-wrapper';
import BN = require('bn.js');
import * as chai from 'chai';
-import { LogWithDecodedArgs, TransactionReceiptWithDecodedLogs } from 'ethereum-types';
import ethUtil = require('ethereumjs-util');
-import * as Web3 from 'web3';
+import * as _ from 'lodash';
-import { TestLibBytesContract } from '../../src/contract_wrappers/generated/test_lib_bytes';
+import { TestLibBytesContract } from '../../src/generated_contract_wrappers/test_lib_bytes';
import { artifacts } from '../../src/utils/artifacts';
+import { expectRevertOrOtherErrorAsync } from '../../src/utils/assertions';
import { chaiSetup } from '../../src/utils/chai_setup';
import { constants } from '../../src/utils/constants';
import { provider, txDefaults, web3Wrapper } from '../../src/utils/web3_wrapper';
@@ -19,7 +18,6 @@ const expect = chai.expect;
const blockchainLifecycle = new BlockchainLifecycle(web3Wrapper);
describe('LibBytes', () => {
- let owner: string;
let libBytes: TestLibBytesContract;
const byteArrayShorterThan32Bytes = '0x012345';
const byteArrayShorterThan20Bytes = byteArrayShorterThan32Bytes;
@@ -30,8 +28,20 @@ describe('LibBytes', () => {
const byteArrayLongerThan32BytesLastBytesSwapped =
'0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abefcd';
let testAddress: string;
+ let testAddressB: string;
const testBytes32 = '0x102030405060708090a0b0c0d0e0f0102030405060708090a0b0c0d0e0f01020';
+ const testBytes32B = '0x534877abd8443578526845cdfef020047528759477fedef87346527659aced32';
const testUint256 = new BigNumber(testBytes32, 16);
+ const testUint256B = new BigNumber(testBytes32B, 16);
+ let shortData: string;
+ let shortTestBytes: string;
+ let shortTestBytesAsBuffer: Buffer;
+ let wordOfData: string;
+ let wordOfTestBytes: string;
+ let wordOfTestBytesAsBuffer: Buffer;
+ let longData: string;
+ let longTestBytes: string;
+ let longTestBytesAsBuffer: Buffer;
before(async () => {
await blockchainLifecycle.startAsync();
@@ -42,8 +52,8 @@ describe('LibBytes', () => {
before(async () => {
// Setup accounts & addresses
const accounts = await web3Wrapper.getAvailableAddressesAsync();
- owner = accounts[0];
testAddress = accounts[1];
+ testAddressB = accounts[2];
// Deploy LibBytes
libBytes = await TestLibBytesContract.deployFrom0xArtifactAsync(artifacts.TestLibBytes, provider, txDefaults);
// Verify lengths of test data
@@ -53,6 +63,26 @@ describe('LibBytes', () => {
expect(byteArrayLongerThan32BytesLength).to.be.greaterThan(32);
const testBytes32Length = ethUtil.toBuffer(testBytes32).byteLength;
expect(testBytes32Length).to.be.equal(32);
+ // Create short test bytes
+ shortData = '0xffffaa';
+ const encodedShortData = ethUtil.toBuffer(shortData);
+ const shortDataLength = new BigNumber(encodedShortData.byteLength);
+ const encodedShortDataLength = assetProxyUtils.encodeUint256(shortDataLength);
+ shortTestBytesAsBuffer = Buffer.concat([encodedShortDataLength, encodedShortData]);
+ shortTestBytes = ethUtil.bufferToHex(shortTestBytesAsBuffer);
+ // Create test bytes one word in length
+ wordOfData = ethUtil.bufferToHex(assetProxyUtils.encodeUint256(generatePseudoRandomSalt()));
+ const encodedWordOfData = ethUtil.toBuffer(wordOfData);
+ const wordOfDataLength = new BigNumber(encodedWordOfData.byteLength);
+ const encodedWordOfDataLength = assetProxyUtils.encodeUint256(wordOfDataLength);
+ wordOfTestBytesAsBuffer = Buffer.concat([encodedWordOfDataLength, encodedWordOfData]);
+ wordOfTestBytes = ethUtil.bufferToHex(wordOfTestBytesAsBuffer);
+ // Create long test bytes (combines short test bytes with word of test bytes)
+ longData = ethUtil.bufferToHex(Buffer.concat([encodedShortData, encodedWordOfData]));
+ const longDataLength = new BigNumber(encodedShortData.byteLength + encodedWordOfData.byteLength);
+ const encodedLongDataLength = assetProxyUtils.encodeUint256(longDataLength);
+ longTestBytesAsBuffer = Buffer.concat([encodedLongDataLength, encodedShortData, encodedWordOfData]);
+ longTestBytes = ethUtil.bufferToHex(longTestBytesAsBuffer);
});
beforeEach(async () => {
await blockchainLifecycle.startAsync();
@@ -61,13 +91,15 @@ describe('LibBytes', () => {
await blockchainLifecycle.revertAsync();
});
- describe('popByte', () => {
+ describe('popLastByte', () => {
it('should revert if length is 0', async () => {
- return expect(libBytes.publicPopByte.callAsync(constants.NULL_BYTES)).to.be.rejectedWith(constants.REVERT);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicPopLastByte.callAsync(constants.NULL_BYTES),
+ constants.LIB_BYTES_GREATER_THAN_ZERO_LENGTH_REQUIRED,
+ );
});
-
it('should pop the last byte from the input and return it', async () => {
- const [newBytes, poppedByte] = await libBytes.publicPopByte.callAsync(byteArrayLongerThan32Bytes);
+ const [newBytes, poppedByte] = await libBytes.publicPopLastByte.callAsync(byteArrayLongerThan32Bytes);
const expectedNewBytes = byteArrayLongerThan32Bytes.slice(0, -2);
const expectedPoppedByte = `0x${byteArrayLongerThan32Bytes.slice(-2)}`;
expect(newBytes).to.equal(expectedNewBytes);
@@ -75,15 +107,15 @@ describe('LibBytes', () => {
});
});
- describe('popAddress', () => {
+ describe('popLast20Bytes', () => {
it('should revert if length is less than 20', async () => {
- return expect(libBytes.publicPopAddress.callAsync(byteArrayShorterThan20Bytes)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicPopLast20Bytes.callAsync(byteArrayShorterThan20Bytes),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED,
);
});
-
it('should pop the last 20 bytes from the input and return it', async () => {
- const [newBytes, poppedAddress] = await libBytes.publicPopAddress.callAsync(byteArrayLongerThan32Bytes);
+ const [newBytes, poppedAddress] = await libBytes.publicPopLast20Bytes.callAsync(byteArrayLongerThan32Bytes);
const expectedNewBytes = byteArrayLongerThan32Bytes.slice(0, -40);
const expectedPoppedAddress = `0x${byteArrayLongerThan32Bytes.slice(-40)}`;
expect(newBytes).to.equal(expectedNewBytes);
@@ -99,7 +131,6 @@ describe('LibBytes', () => {
);
return expect(areBytesEqual).to.be.true();
});
-
it('should return true if byte arrays are equal (both arrays > 32 bytes)', async () => {
const areBytesEqual = await libBytes.publicAreBytesEqual.callAsync(
byteArrayLongerThan32Bytes,
@@ -107,7 +138,6 @@ describe('LibBytes', () => {
);
return expect(areBytesEqual).to.be.true();
});
-
it('should return false if byte arrays are not equal (first array < 32 bytes, second array > 32 bytes)', async () => {
const areBytesEqual = await libBytes.publicAreBytesEqual.callAsync(
byteArrayShorterThan32Bytes,
@@ -115,7 +145,6 @@ describe('LibBytes', () => {
);
return expect(areBytesEqual).to.be.false();
});
-
it('should return false if byte arrays are not equal (first array > 32 bytes, second array < 32 bytes)', async () => {
const areBytesEqual = await libBytes.publicAreBytesEqual.callAsync(
byteArrayLongerThan32Bytes,
@@ -123,7 +152,6 @@ describe('LibBytes', () => {
);
return expect(areBytesEqual).to.be.false();
});
-
it('should return false if byte arrays are not equal (same length, but a byte in first word differs)', async () => {
const areBytesEqual = await libBytes.publicAreBytesEqual.callAsync(
byteArrayLongerThan32BytesFirstBytesSwapped,
@@ -131,7 +159,6 @@ describe('LibBytes', () => {
);
return expect(areBytesEqual).to.be.false();
});
-
it('should return false if byte arrays are not equal (same length, but a byte in last word differs)', async () => {
const areBytesEqual = await libBytes.publicAreBytesEqual.callAsync(
byteArrayLongerThan32BytesLastBytesSwapped,
@@ -141,15 +168,50 @@ describe('LibBytes', () => {
});
});
+ describe('deepCopyBytes', () => {
+ it('should revert if dest is shorter than source', async () => {
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicDeepCopyBytes.callAsync(byteArrayShorterThan32Bytes, byteArrayLongerThan32Bytes),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_SOURCE_BYTES_LENGTH_REQUIRED,
+ );
+ });
+ it('should overwrite dest with source if source and dest have equal length', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length - 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(zeroedBytesAfterCopy).to.be.equal(byteArrayLongerThan32Bytes);
+ });
+ it('should overwrite the leftmost len(source) bytes of dest if dest is larger than source', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length * 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ const copiedBytes = zeroedBytesAfterCopy.slice(0, byteArrayLongerThan32Bytes.length);
+ return expect(copiedBytes).to.be.equal(byteArrayLongerThan32Bytes);
+ });
+ it('should not overwrite the rightmost bytes of dest if dest is larger than source', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length * 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ const expectedNotCopiedBytes = zeroedByteArrayLongerThan32Bytes.slice(byteArrayLongerThan32Bytes.length);
+ const notCopiedBytes = zeroedBytesAfterCopy.slice(byteArrayLongerThan32Bytes.length);
+ return expect(notCopiedBytes).to.be.equal(expectedNotCopiedBytes);
+ });
+ });
+
describe('readAddress', () => {
- it('should successfully read address when the address takes up the whole array)', async () => {
+ it('should successfully read address when the address takes up the whole array', async () => {
const byteArray = ethUtil.addHexPrefix(testAddress);
const testAddressOffset = new BigNumber(0);
const address = await libBytes.publicReadAddress.callAsync(byteArray, testAddressOffset);
return expect(address).to.be.equal(testAddress);
});
-
- it('should successfully read address when it is offset in the array)', async () => {
+ it('should successfully read address when it is offset in the array', async () => {
const addressByteArrayBuffer = ethUtil.toBuffer(testAddress);
const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, addressByteArrayBuffer]);
@@ -158,80 +220,145 @@ describe('LibBytes', () => {
const address = await libBytes.publicReadAddress.callAsync(combinedByteArray, testAddressOffset);
return expect(address).to.be.equal(testAddress);
});
-
- it('should fail if the byte array is too short to hold an address)', async () => {
+ it('should fail if the byte array is too short to hold an address', async () => {
const shortByteArray = '0xabcdef';
const offset = new BigNumber(0);
- return expect(libBytes.publicReadAddress.callAsync(shortByteArray, offset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadAddress.callAsync(shortByteArray, offset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED,
);
});
-
- it('should fail if the length between the offset and end of the byte array is too short to hold an address)', async () => {
- const byteArray = ethUtil.addHexPrefix(testAddress);
+ it('should fail if the length between the offset and end of the byte array is too short to hold an address', async () => {
+ const byteArray = testAddress;
const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
- return expect(libBytes.publicReadAddress.callAsync(byteArray, badOffset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadAddress.callAsync(byteArray, badOffset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED,
);
});
});
- /// @TODO Implement test cases for writeAddress. Test template below.
- /// Currently, the generated contract wrappers do not support this library's write methods.
- /*
describe('writeAddress', () => {
- it('should successfully write address when the address takes up the whole array)', async () => {});
- it('should successfully write address when it is offset in the array)', async () => {});
- it('should fail if the byte array is too short to hold an address)', async () => {});
- it('should fail if the length between the offset and end of the byte array is too short to hold an address)', async () => {});
+ it('should successfully write address when the address takes up the whole array', async () => {
+ const byteArray = testAddress;
+ const testAddressOffset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteAddress.callAsync(
+ byteArray,
+ testAddressOffset,
+ testAddressB,
+ );
+ return expect(newByteArray).to.be.equal(testAddressB);
+ });
+ it('should successfully write address when it is offset in the array', async () => {
+ const addressByteArrayBuffer = ethUtil.toBuffer(testAddress);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, addressByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testAddressOffset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteAddress.callAsync(
+ combinedByteArray,
+ testAddressOffset,
+ testAddressB,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const addressFromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const addressFromOffset = ethUtil.addHexPrefix(ethUtil.bufferToHex(addressFromOffsetBuffer));
+ return expect(addressFromOffset).to.be.equal(testAddressB);
+ });
+ it('should fail if the byte array is too short to hold an address', async () => {
+ const offset = new BigNumber(0);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteAddress.callAsync(byteArrayShorterThan20Bytes, offset, testAddress),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold an address', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteAddress.callAsync(byteArray, badOffset, testAddress),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED,
+ );
+ });
});
- */
describe('readBytes32', () => {
- it('should successfully read bytes32 when the bytes32 takes up the whole array)', async () => {
+ it('should successfully read bytes32 when the bytes32 takes up the whole array', async () => {
const testBytes32Offset = new BigNumber(0);
const bytes32 = await libBytes.publicReadBytes32.callAsync(testBytes32, testBytes32Offset);
return expect(bytes32).to.be.equal(testBytes32);
});
-
it('should successfully read bytes32 when it is offset in the array)', async () => {
const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
- const testAddressOffset = new BigNumber(prefixByteArrayBuffer.byteLength);
- const bytes32 = await libBytes.publicReadBytes32.callAsync(combinedByteArray, testAddressOffset);
+ const testBytes32Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes32 = await libBytes.publicReadBytes32.callAsync(combinedByteArray, testBytes32Offset);
return expect(bytes32).to.be.equal(testBytes32);
});
-
- it('should fail if the byte array is too short to hold a bytes32)', async () => {
+ it('should fail if the byte array is too short to hold a bytes32', async () => {
const offset = new BigNumber(0);
- return expect(libBytes.publicReadBytes32.callAsync(byteArrayShorterThan32Bytes, offset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes32.callAsync(byteArrayShorterThan32Bytes, offset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
);
});
-
- it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32)', async () => {
+ it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32', async () => {
const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength);
- return expect(libBytes.publicReadBytes32.callAsync(testBytes32, badOffset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes32.callAsync(testBytes32, badOffset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
);
});
});
- /// @TODO Implement test cases for writeBytes32. Test template below.
- /// Currently, the generated contract wrappers do not support this library's write methods.
- /*
describe('writeBytes32', () => {
- it('should successfully write bytes32 when the address takes up the whole array)', async () => {});
- it('should successfully write bytes32 when it is offset in the array)', async () => {});
- it('should fail if the byte array is too short to hold a bytes32)', async () => {});
- it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32)', async () => {});
+ it('should successfully write bytes32 when the address takes up the whole array', async () => {
+ const byteArray = testBytes32;
+ const testBytes32Offset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteBytes32.callAsync(
+ byteArray,
+ testBytes32Offset,
+ testBytes32B,
+ );
+ return expect(newByteArray).to.be.equal(testBytes32B);
+ });
+ it('should successfully write bytes32 when it is offset in the array', async () => {
+ const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testBytes32Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteBytes32.callAsync(
+ combinedByteArray,
+ testBytes32Offset,
+ testBytes32B,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const bytes32FromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const bytes32FromOffset = ethUtil.addHexPrefix(ethUtil.bufferToHex(bytes32FromOffsetBuffer));
+ return expect(bytes32FromOffset).to.be.equal(testBytes32B);
+ });
+ it('should fail if the byte array is too short to hold a bytes32', async () => {
+ const offset = new BigNumber(0);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteBytes32.callAsync(byteArrayShorterThan32Bytes, offset, testBytes32),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteBytes32.callAsync(byteArray, badOffset, testBytes32),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
});
- */
describe('readUint256', () => {
- it('should successfully read uint256 when the uint256 takes up the whole array)', async () => {
+ it('should successfully read uint256 when the uint256 takes up the whole array', async () => {
const formattedTestUint256 = new BN(testUint256.toString(10));
const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
const byteArray = ethUtil.bufferToHex(testUint256AsBuffer);
@@ -239,8 +366,7 @@ describe('LibBytes', () => {
const uint256 = await libBytes.publicReadUint256.callAsync(byteArray, testUint256Offset);
return expect(uint256).to.bignumber.equal(testUint256);
});
-
- it('should successfully read uint256 when it is offset in the array)', async () => {
+ it('should successfully read uint256 when it is offset in the array', async () => {
const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
const formattedTestUint256 = new BN(testUint256.toString(10));
const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
@@ -250,41 +376,80 @@ describe('LibBytes', () => {
const uint256 = await libBytes.publicReadUint256.callAsync(combinedByteArray, testUint256Offset);
return expect(uint256).to.bignumber.equal(testUint256);
});
-
- it('should fail if the byte array is too short to hold a uint256)', async () => {
+ it('should fail if the byte array is too short to hold a uint256', async () => {
const offset = new BigNumber(0);
- return expect(libBytes.publicReadUint256.callAsync(byteArrayShorterThan32Bytes, offset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadUint256.callAsync(byteArrayShorterThan32Bytes, offset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
);
});
-
- it('should fail if the length between the offset and end of the byte array is too short to hold a uint256)', async () => {
+ it('should fail if the length between the offset and end of the byte array is too short to hold a uint256', async () => {
const formattedTestUint256 = new BN(testUint256.toString(10));
const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
const byteArray = ethUtil.bufferToHex(testUint256AsBuffer);
const badOffset = new BigNumber(testUint256AsBuffer.byteLength);
- return expect(libBytes.publicReadUint256.callAsync(byteArray, badOffset)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadUint256.callAsync(byteArray, badOffset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
);
});
});
- /// @TODO Implement test cases for writeUint256. Test template below.
- /// Currently, the generated contract wrappers do not support this library's write methods.
- /*
describe('writeUint256', () => {
- it('should successfully write uint256 when the address takes up the whole array)', async () => {});
- it('should successfully write uint256 when it is offset in the array)', async () => {});
- it('should fail if the byte array is too short to hold a uint256)', async () => {});
- it('should fail if the length between the offset and end of the byte array is too short to hold a uint256)', async () => {});
+ it('should successfully write uint256 when the address takes up the whole array', async () => {
+ const byteArray = testBytes32;
+ const testUint256Offset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteUint256.callAsync(
+ byteArray,
+ testUint256Offset,
+ testUint256B,
+ );
+ const newByteArrayAsUint256 = new BigNumber(newByteArray, 16);
+ return expect(newByteArrayAsUint256).to.be.bignumber.equal(testUint256B);
+ });
+ it('should successfully write uint256 when it is offset in the array', async () => {
+ const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteUint256.callAsync(
+ combinedByteArray,
+ testUint256Offset,
+ testUint256B,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const uint256FromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const uint256FromOffset = new BigNumber(
+ ethUtil.addHexPrefix(ethUtil.bufferToHex(uint256FromOffsetBuffer)),
+ 16,
+ );
+ return expect(uint256FromOffset).to.be.bignumber.equal(testUint256B);
+ });
+ it('should fail if the byte array is too short to hold a uint256', async () => {
+ const offset = new BigNumber(0);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteUint256.callAsync(byteArrayShorterThan32Bytes, offset, testUint256),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a uint256', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteUint256.callAsync(byteArray, badOffset, testUint256),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
});
- */
describe('readFirst4', () => {
+ // AssertionError: expected promise to be rejected with an error including 'revert' but it was fulfilled with '0x08c379a0'
it('should revert if byte array has a length < 4', async () => {
const byteArrayLessThan4Bytes = '0x010101';
- return expect(libBytes.publicReadFirst4.callAsync(byteArrayLessThan4Bytes)).to.be.rejectedWith(
- constants.REVERT,
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadFirst4.callAsync(byteArrayLessThan4Bytes),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_4_LENGTH_REQUIRED,
);
});
it('should return the first 4 bytes of a byte array of arbitrary length', async () => {
@@ -293,4 +458,164 @@ describe('LibBytes', () => {
expect(first4Bytes).to.equal(expectedFirst4Bytes);
});
});
+
+ describe('readBytes', () => {
+ it('should successfully read short, nested array of bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytes.callAsync(shortTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully read short, nested array of bytes when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, shortTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully read a nested array of bytes - one word in length - when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytes.callAsync(wordOfTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully read a nested array of bytes - one word in length - when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, wordOfTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully read long, nested array of bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytes.callAsync(longTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should successfully read long, nested array of bytes when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, longTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
+ // The length of the nested array is 32 bytes. By storing less than 32 bytes, a length cannot be read.
+ const offset = new BigNumber(0);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes.callAsync(byteArrayShorterThan32Bytes, offset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if we store a nested byte array length, without a nested byte array', async () => {
+ const offset = new BigNumber(0);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes.callAsync(testBytes32, offset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArrayShorterThan32Bytes).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes.callAsync(byteArrayShorterThan32Bytes, badOffset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the nested byte array', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicReadBytes.callAsync(testBytes32, badOffset),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED,
+ );
+ });
+ });
+
+ describe('writeBytes', () => {
+ it('should successfully write short, nested array of bytes when it takes up the whole array)', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, shortData);
+ const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(shortData);
+ });
+ it('should successfully write short, nested array of bytes when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + shortTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData);
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, shortData);
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully write a nested array of bytes - one word in length - when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(wordOfTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, wordOfData);
+ const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(wordOfData);
+ });
+ it('should successfully write a nested array of bytes - one word in length - when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + wordOfTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData);
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, wordOfData);
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully write a long, nested bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(longTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, longData);
+ const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(longData);
+ });
+ it('should successfully write long, nested array of bytes when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + longTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData);
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, longData);
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
+ const offset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(1));
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteBytes.callAsync(emptyByteArray, offset, longData),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array)', async () => {
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
+ const badOffset = new BigNumber(ethUtil.toBuffer(shortTestBytesAsBuffer).byteLength);
+ return expectRevertOrOtherErrorAsync(
+ libBytes.publicWriteBytes.callAsync(emptyByteArray, badOffset, shortData),
+ constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED,
+ );
+ });
+ });
});
+// tslint:disable:max-file-line-count
diff --git a/packages/contracts/test/libraries/lib_mem.ts b/packages/contracts/test/libraries/lib_mem.ts
new file mode 100644
index 000000000..00f7c4d8b
--- /dev/null
+++ b/packages/contracts/test/libraries/lib_mem.ts
@@ -0,0 +1,190 @@
+import { BigNumber } from '@0xproject/utils';
+import * as chai from 'chai';
+
+import { TestLibMemContract } from '../../src/generated_contract_wrappers/test_lib_mem';
+import { artifacts } from '../../src/utils/artifacts';
+import { chaiSetup } from '../../src/utils/chai_setup';
+import { provider, txDefaults } from '../../src/utils/web3_wrapper';
+
+chaiSetup.configure();
+const expect = chai.expect;
+
+// BUG: Ideally we would use Buffer.from(memory).toString('hex')
+// https://github.com/Microsoft/TypeScript/issues/23155
+const toHex = (buf: Uint8Array): string => buf.reduce((a, v) => a + ('00' + v.toString(16)).slice(-2), '0x');
+
+const fromHex = (str: string): Uint8Array => Uint8Array.from(Buffer.from(str.slice(2), 'hex'));
+
+describe('LibMem', () => {
+ let testLibMem: TestLibMemContract;
+
+ before(async () => {
+ // Deploy TestLibMem
+ testLibMem = await TestLibMemContract.deployFrom0xArtifactAsync(artifacts.TestLibMem, provider, txDefaults);
+ });
+
+ describe('memCopy', () => {
+ // Create memory 0x000102...FF
+ const memSize = 256;
+ const memory = new Uint8Array(memSize).map((_, i) => i);
+ const memHex = toHex(memory);
+
+ // Reference implementation to test against
+ const refMemcpy = (_mem: Uint8Array, dest: number, source: number, length: number): Uint8Array =>
+ Uint8Array.from(memory).copyWithin(dest, source, source + length);
+
+ // Test vectors: destination, source, length, job description
+ type Tests = Array<[number, number, number, string]>;
+
+ const test = (tests: Tests) =>
+ tests.forEach(([dest, source, length, job]) =>
+ it(job, async () => {
+ const expected = refMemcpy(memory, dest, source, length);
+ const resultStr = await testLibMem.testMemcpy.callAsync(
+ memHex,
+ new BigNumber(dest),
+ new BigNumber(source),
+ new BigNumber(length),
+ );
+ const result = fromHex(resultStr);
+ expect(result).to.deep.equal(expected);
+ }),
+ );
+
+ test([[0, 0, 0, 'copies zero bytes with overlap']]);
+
+ describe('copies forward', () =>
+ test([
+ [128, 0, 0, 'zero bytes'],
+ [128, 0, 1, 'one byte'],
+ [128, 0, 11, 'eleven bytes'],
+ [128, 0, 31, 'thirty-one bytes'],
+ [128, 0, 32, 'one word'],
+ [128, 0, 64, 'two words'],
+ [128, 0, 96, 'three words'],
+ [128, 0, 33, 'one word and one byte'],
+ [128, 0, 72, 'two words and eight bytes'],
+ [128, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word', () =>
+ test([
+ [16, 0, 0, 'zero bytes'],
+ [16, 0, 1, 'one byte'],
+ [16, 0, 11, 'eleven bytes'],
+ [16, 0, 16, 'sixteen bytes'],
+ ]));
+
+ describe('copies forward with one byte overlap', () =>
+ test([
+ [0, 0, 1, 'one byte'],
+ [10, 0, 11, 'eleven bytes'],
+ [30, 0, 31, 'thirty-one bytes'],
+ [31, 0, 32, 'one word'],
+ [32, 0, 33, 'one word and one byte'],
+ [71, 0, 72, 'two words and eight bytes'],
+ [99, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with thirty-one bytes overlap', () =>
+ test([
+ [0, 0, 31, 'thirty-one bytes'],
+ [1, 0, 32, 'one word'],
+ [2, 0, 33, 'one word and one byte'],
+ [41, 0, 72, 'two words and eight bytes'],
+ [69, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with one word overlap', () =>
+ test([
+ [0, 0, 32, 'one word'],
+ [1, 0, 33, 'one word and one byte'],
+ [41, 0, 72, 'two words and eight bytes'],
+ [69, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with one word and one byte overlap', () =>
+ test([
+ [0, 0, 33, 'one word and one byte'],
+ [40, 0, 72, 'two words and eight bytes'],
+ [68, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with two words overlap', () =>
+ test([
+ [0, 0, 64, 'two words'],
+ [8, 0, 72, 'two words and eight bytes'],
+ [36, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word and one byte overlap', () =>
+ test([[0, 0, 1, 'one byte'], [10, 0, 11, 'eleven bytes'], [15, 0, 16, 'sixteen bytes']]));
+
+ describe('copies backward', () =>
+ test([
+ [0, 128, 0, 'zero bytes'],
+ [0, 128, 1, 'one byte'],
+ [0, 128, 11, 'eleven bytes'],
+ [0, 128, 31, 'thirty-one bytes'],
+ [0, 128, 32, 'one word'],
+ [0, 128, 64, 'two words'],
+ [0, 128, 96, 'three words'],
+ [0, 128, 33, 'one word and one byte'],
+ [0, 128, 72, 'two words and eight bytes'],
+ [0, 128, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward within one word', () =>
+ test([
+ [0, 16, 0, 'zero bytes'],
+ [0, 16, 1, 'one byte'],
+ [0, 16, 11, 'eleven bytes'],
+ [0, 16, 16, 'sixteen bytes'],
+ ]));
+
+ describe('copies backward with one byte overlap', () =>
+ test([
+ [0, 0, 1, 'one byte'],
+ [0, 10, 11, 'eleven bytes'],
+ [0, 30, 31, 'thirty-one bytes'],
+ [0, 31, 32, 'one word'],
+ [0, 32, 33, 'one word and one byte'],
+ [0, 71, 72, 'two words and eight bytes'],
+ [0, 99, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with thirty-one bytes overlap', () =>
+ test([
+ [0, 0, 31, 'thirty-one bytes'],
+ [0, 1, 32, 'one word'],
+ [0, 2, 33, 'one word and one byte'],
+ [0, 41, 72, 'two words and eight bytes'],
+ [0, 69, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with one word overlap', () =>
+ test([
+ [0, 0, 32, 'one word'],
+ [0, 1, 33, 'one word and one byte'],
+ [0, 41, 72, 'two words and eight bytes'],
+ [0, 69, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with one word and one byte overlap', () =>
+ test([
+ [0, 0, 33, 'one word and one byte'],
+ [0, 40, 72, 'two words and eight bytes'],
+ [0, 68, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with two words overlap', () =>
+ test([
+ [0, 0, 64, 'two words'],
+ [0, 8, 72, 'two words and eight bytes'],
+ [0, 36, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word and one byte overlap', () =>
+ test([[0, 0, 1, 'one byte'], [0, 10, 11, 'eleven bytes'], [0, 15, 16, 'sixteen bytes']]));
+ });
+});