diff options
author | Fabio Berger <me@fabioberger.com> | 2018-06-12 06:23:48 +0800 |
---|---|---|
committer | Fabio Berger <me@fabioberger.com> | 2018-06-12 06:23:48 +0800 |
commit | 7e78f5941ad9cb2fd5225899f79823c7376894c0 (patch) | |
tree | 13fadf90f821e9fead1373a2e567e6d86fe27e0f /packages/contracts/test/libraries | |
parent | 20f93185975d16c77492f05eb86dd89695539cd9 (diff) | |
parent | bc0ae6be318a15bf8670a6da9a59d9bdb12cadae (diff) | |
download | dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar.gz dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar.bz2 dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar.lz dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar.xz dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.tar.zst dexon-sol-tools-7e78f5941ad9cb2fd5225899f79823c7376894c0.zip |
Merge branch 'v2-prototype' into feature/combinatorial-testing
* v2-prototype: (68 commits)
Stop exporting ArtifactWriter
Fix no-unused-variable tslint rule to include parameters and fix issues
Fix linter exclude rule
Validate all signature types rather then only ECSignatures
Store the instantiated OrderValidationUtils
Remove global hooks from tests and deploy contracts from within the specific tests
Add EmitStatement to ASTVisitor
Fix tslint issues
Add back artifacts file
Fix a bug in SolCompilerArtifacts adapter config overriding
Move OrderValidationUtils (+ tests) and ExchangeTransferSimulator to order-utils
export parseECSignature method
Export ArtifactWriter from migrations package
Remove unused artifact file
Pass in generated contract wrapper to orderValidationUtils at instantiation
Refactor orderValidationUtils to use the generated contract wrapper instead of the higher-level one
Refactor ExchangeTransferSimulator public interface to accet an AbstractBalanceAndProxyAllowanceLazyStore so that this module could be re-used in different contexts.
Increase timeout for contract migrations
Remove some copy-paste code
Await transactions in migrations
...
Diffstat (limited to 'packages/contracts/test/libraries')
-rw-r--r-- | packages/contracts/test/libraries/lib_bytes.ts | 224 | ||||
-rw-r--r-- | packages/contracts/test/libraries/lib_mem.ts | 190 |
2 files changed, 405 insertions, 9 deletions
diff --git a/packages/contracts/test/libraries/lib_bytes.ts b/packages/contracts/test/libraries/lib_bytes.ts index 0996cdc84..2fefb7aeb 100644 --- a/packages/contracts/test/libraries/lib_bytes.ts +++ b/packages/contracts/test/libraries/lib_bytes.ts @@ -1,4 +1,5 @@ import { BlockchainLifecycle } from '@0xproject/dev-utils'; +import { assetProxyUtils, generatePseudoRandomSalt } from '@0xproject/order-utils'; import { BigNumber } from '@0xproject/utils'; import BN = require('bn.js'); import * as chai from 'chai'; @@ -28,6 +29,15 @@ describe('LibBytes', () => { let testAddress: string; const testBytes32 = '0x102030405060708090a0b0c0d0e0f0102030405060708090a0b0c0d0e0f01020'; const testUint256 = new BigNumber(testBytes32, 16); + let shortData: string; + let shortTestBytes: string; + let shortTestBytesAsBuffer: Buffer; + let wordOfData: string; + let wordOfTestBytes: string; + let wordOfTestBytesAsBuffer: Buffer; + let longData: string; + let longTestBytes: string; + let longTestBytesAsBuffer: Buffer; before(async () => { await blockchainLifecycle.startAsync(); @@ -48,6 +58,26 @@ describe('LibBytes', () => { expect(byteArrayLongerThan32BytesLength).to.be.greaterThan(32); const testBytes32Length = ethUtil.toBuffer(testBytes32).byteLength; expect(testBytes32Length).to.be.equal(32); + // Create short test bytes + shortData = '0xffffaa'; + const encodedShortData = ethUtil.toBuffer(shortData); + const shortDataLength = new BigNumber(encodedShortData.byteLength); + const encodedShortDataLength = assetProxyUtils.encodeUint256(shortDataLength); + shortTestBytesAsBuffer = Buffer.concat([encodedShortDataLength, encodedShortData]); + shortTestBytes = ethUtil.bufferToHex(shortTestBytesAsBuffer); + // Create test bytes one word in length + wordOfData = ethUtil.bufferToHex(assetProxyUtils.encodeUint256(generatePseudoRandomSalt())); + const encodedWordOfData = ethUtil.toBuffer(wordOfData); + const wordOfDataLength = new BigNumber(encodedWordOfData.byteLength); + const encodedWordOfDataLength = assetProxyUtils.encodeUint256(wordOfDataLength); + wordOfTestBytesAsBuffer = Buffer.concat([encodedWordOfDataLength, encodedWordOfData]); + wordOfTestBytes = ethUtil.bufferToHex(wordOfTestBytesAsBuffer); + // Create long test bytes (combines short test bytes with word of test bytes) + longData = ethUtil.bufferToHex(Buffer.concat([encodedShortData, encodedWordOfData])); + const longDataLength = new BigNumber(encodedShortData.byteLength + encodedWordOfData.byteLength); + const encodedLongDataLength = assetProxyUtils.encodeUint256(longDataLength); + longTestBytesAsBuffer = Buffer.concat([encodedLongDataLength, encodedShortData, encodedWordOfData]); + longTestBytes = ethUtil.bufferToHex(longTestBytesAsBuffer); }); beforeEach(async () => { await blockchainLifecycle.startAsync(); @@ -60,7 +90,7 @@ describe('LibBytes', () => { it('should revert if length is 0', async () => { return expectRevertOrOtherErrorAsync( libBytes.publicPopByte.callAsync(constants.NULL_BYTES), - constants.LIB_BYTES_GT_ZERO_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_THAN_ZERO_LENGTH_REQUIRED, ); }); @@ -77,7 +107,7 @@ describe('LibBytes', () => { it('should revert if length is less than 20', async () => { return expectRevertOrOtherErrorAsync( libBytes.publicPopAddress.callAsync(byteArrayShorterThan20Bytes), - constants.LIB_BYTES_GTE_20_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED, ); }); @@ -163,7 +193,7 @@ describe('LibBytes', () => { const offset = new BigNumber(0); return expectRevertOrOtherErrorAsync( libBytes.publicReadAddress.callAsync(shortByteArray, offset), - constants.LIB_BYTES_GTE_20_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED, ); }); @@ -172,7 +202,7 @@ describe('LibBytes', () => { const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength); return expectRevertOrOtherErrorAsync( libBytes.publicReadAddress.callAsync(byteArray, badOffset), - constants.LIB_BYTES_GTE_20_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED, ); }); }); @@ -209,7 +239,7 @@ describe('LibBytes', () => { const offset = new BigNumber(0); return expectRevertOrOtherErrorAsync( libBytes.publicReadBytes32.callAsync(byteArrayShorterThan32Bytes, offset), - constants.LIB_BYTES_GTE_32_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, ); }); @@ -217,7 +247,7 @@ describe('LibBytes', () => { const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength); return expectRevertOrOtherErrorAsync( libBytes.publicReadBytes32.callAsync(testBytes32, badOffset), - constants.LIB_BYTES_GTE_32_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, ); }); }); @@ -258,7 +288,7 @@ describe('LibBytes', () => { const offset = new BigNumber(0); return expectRevertOrOtherErrorAsync( libBytes.publicReadUint256.callAsync(byteArrayShorterThan32Bytes, offset), - constants.LIB_BYTES_GTE_32_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, ); }); @@ -269,7 +299,7 @@ describe('LibBytes', () => { const badOffset = new BigNumber(testUint256AsBuffer.byteLength); return expectRevertOrOtherErrorAsync( libBytes.publicReadUint256.callAsync(byteArray, badOffset), - constants.LIB_BYTES_GTE_32_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, ); }); }); @@ -291,7 +321,7 @@ describe('LibBytes', () => { const byteArrayLessThan4Bytes = '0x010101'; return expectRevertOrOtherErrorAsync( libBytes.publicReadFirst4.callAsync(byteArrayLessThan4Bytes), - constants.LIB_BYTES_GTE_4_LENGTH_REQUIRED, + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_4_LENGTH_REQUIRED, ); }); it('should return the first 4 bytes of a byte array of arbitrary length', async () => { @@ -300,4 +330,180 @@ describe('LibBytes', () => { expect(first4Bytes).to.equal(expectedFirst4Bytes); }); }); + + describe('readBytes', () => { + it('should successfully read short, nested array of bytes when it takes up the whole array', async () => { + const testBytesOffset = new BigNumber(0); + const bytes = await libBytes.publicReadBytes.callAsync(shortTestBytes, testBytesOffset); + return expect(bytes).to.be.equal(shortData); + }); + + it('should successfully read short, nested array of bytes when it is offset in the array', async () => { + const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef'); + const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, shortTestBytesAsBuffer]); + const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer); + const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength); + const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset); + return expect(bytes).to.be.equal(shortData); + }); + + it('should successfully read a nested array of bytes - one word in length - when it takes up the whole array', async () => { + const testBytesOffset = new BigNumber(0); + const bytes = await libBytes.publicReadBytes.callAsync(wordOfTestBytes, testBytesOffset); + return expect(bytes).to.be.equal(wordOfData); + }); + + it('should successfully read a nested array of bytes - one word in length - when it is offset in the array', async () => { + const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef'); + const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, wordOfTestBytesAsBuffer]); + const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer); + const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength); + const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset); + return expect(bytes).to.be.equal(wordOfData); + }); + + it('should successfully read long, nested array of bytes when it takes up the whole array', async () => { + const testBytesOffset = new BigNumber(0); + const bytes = await libBytes.publicReadBytes.callAsync(longTestBytes, testBytesOffset); + return expect(bytes).to.be.equal(longData); + }); + + it('should successfully read long, nested array of bytes when it is offset in the array', async () => { + const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef'); + const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, longTestBytesAsBuffer]); + const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer); + const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength); + const bytes = await libBytes.publicReadBytes.callAsync(combinedByteArray, testUint256Offset); + return expect(bytes).to.be.equal(longData); + }); + + it('should fail if the byte array is too short to hold the length of a nested byte array', async () => { + // The length of the nested array is 32 bytes. By storing less than 32 bytes, a length cannot be read. + const offset = new BigNumber(0); + return expectRevertOrOtherErrorAsync( + libBytes.publicReadBytes.callAsync(byteArrayShorterThan32Bytes, offset), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, + ); + }); + + it('should fail if we store a nested byte array length, without a nested byte array', async () => { + const offset = new BigNumber(0); + return expectRevertOrOtherErrorAsync( + libBytes.publicReadBytes.callAsync(testBytes32, offset), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED, + ); + }); + + it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => { + const badOffset = new BigNumber(ethUtil.toBuffer(byteArrayShorterThan32Bytes).byteLength); + return expectRevertOrOtherErrorAsync( + libBytes.publicReadBytes.callAsync(byteArrayShorterThan32Bytes, badOffset), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, + ); + }); + + it('should fail if the length between the offset and end of the byte array is too short to hold the nested byte array', async () => { + const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength); + return expectRevertOrOtherErrorAsync( + libBytes.publicReadBytes.callAsync(testBytes32, badOffset), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED, + ); + }); + }); + + describe('writeBytes', () => { + it('should successfully write short, nested array of bytes when it takes up the whole array)', async () => { + const testBytesOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength)); + const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, shortData); + const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytesRead).to.be.equal(shortData); + }); + + it('should successfully write short, nested array of bytes when it is offset in the array', async () => { + // Write a prefix to the array + const prefixData = '0xabcdef'; + const prefixDataAsBuffer = ethUtil.toBuffer(prefixData); + const prefixOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex( + new Buffer(prefixDataAsBuffer.byteLength + shortTestBytesAsBuffer.byteLength), + ); + let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData); + // Write data after prefix + const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength); + bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, shortData); + // Read data after prefix and validate + const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytes).to.be.equal(shortData); + }); + + it('should successfully write a nested array of bytes - one word in length - when it takes up the whole array', async () => { + const testBytesOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex(new Buffer(wordOfTestBytesAsBuffer.byteLength)); + const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, wordOfData); + const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytesRead).to.be.equal(wordOfData); + }); + + it('should successfully write a nested array of bytes - one word in length - when it is offset in the array', async () => { + // Write a prefix to the array + const prefixData = '0xabcdef'; + const prefixDataAsBuffer = ethUtil.toBuffer(prefixData); + const prefixOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex( + new Buffer(prefixDataAsBuffer.byteLength + wordOfTestBytesAsBuffer.byteLength), + ); + let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData); + // Write data after prefix + const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength); + bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, wordOfData); + // Read data after prefix and validate + const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytes).to.be.equal(wordOfData); + }); + + it('should successfully write a long, nested bytes when it takes up the whole array', async () => { + const testBytesOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex(new Buffer(longTestBytesAsBuffer.byteLength)); + const bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, testBytesOffset, longData); + const bytesRead = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytesRead).to.be.equal(longData); + }); + + it('should successfully write long, nested array of bytes when it is offset in the array', async () => { + // Write a prefix to the array + const prefixData = '0xabcdef'; + const prefixDataAsBuffer = ethUtil.toBuffer(prefixData); + const prefixOffset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex( + new Buffer(prefixDataAsBuffer.byteLength + longTestBytesAsBuffer.byteLength), + ); + let bytesWritten = await libBytes.publicWriteBytes.callAsync(emptyByteArray, prefixOffset, prefixData); + // Write data after prefix + const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength); + bytesWritten = await libBytes.publicWriteBytes.callAsync(bytesWritten, testBytesOffset, longData); + // Read data after prefix and validate + const bytes = await libBytes.publicReadBytes.callAsync(bytesWritten, testBytesOffset); + return expect(bytes).to.be.equal(longData); + }); + + it('should fail if the byte array is too short to hold the length of a nested byte array', async () => { + const offset = new BigNumber(0); + const emptyByteArray = ethUtil.bufferToHex(new Buffer(1)); + return expectRevertOrOtherErrorAsync( + libBytes.publicWriteBytes.callAsync(emptyByteArray, offset, longData), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED, + ); + }); + + it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array)', async () => { + const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength)); + const badOffset = new BigNumber(ethUtil.toBuffer(shortTestBytesAsBuffer).byteLength); + return expectRevertOrOtherErrorAsync( + libBytes.publicWriteBytes.callAsync(emptyByteArray, badOffset, shortData), + constants.LIB_BYTES_GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED, + ); + }); + }); }); +// tslint:disable:max-file-line-count diff --git a/packages/contracts/test/libraries/lib_mem.ts b/packages/contracts/test/libraries/lib_mem.ts new file mode 100644 index 000000000..00f7c4d8b --- /dev/null +++ b/packages/contracts/test/libraries/lib_mem.ts @@ -0,0 +1,190 @@ +import { BigNumber } from '@0xproject/utils'; +import * as chai from 'chai'; + +import { TestLibMemContract } from '../../src/generated_contract_wrappers/test_lib_mem'; +import { artifacts } from '../../src/utils/artifacts'; +import { chaiSetup } from '../../src/utils/chai_setup'; +import { provider, txDefaults } from '../../src/utils/web3_wrapper'; + +chaiSetup.configure(); +const expect = chai.expect; + +// BUG: Ideally we would use Buffer.from(memory).toString('hex') +// https://github.com/Microsoft/TypeScript/issues/23155 +const toHex = (buf: Uint8Array): string => buf.reduce((a, v) => a + ('00' + v.toString(16)).slice(-2), '0x'); + +const fromHex = (str: string): Uint8Array => Uint8Array.from(Buffer.from(str.slice(2), 'hex')); + +describe('LibMem', () => { + let testLibMem: TestLibMemContract; + + before(async () => { + // Deploy TestLibMem + testLibMem = await TestLibMemContract.deployFrom0xArtifactAsync(artifacts.TestLibMem, provider, txDefaults); + }); + + describe('memCopy', () => { + // Create memory 0x000102...FF + const memSize = 256; + const memory = new Uint8Array(memSize).map((_, i) => i); + const memHex = toHex(memory); + + // Reference implementation to test against + const refMemcpy = (_mem: Uint8Array, dest: number, source: number, length: number): Uint8Array => + Uint8Array.from(memory).copyWithin(dest, source, source + length); + + // Test vectors: destination, source, length, job description + type Tests = Array<[number, number, number, string]>; + + const test = (tests: Tests) => + tests.forEach(([dest, source, length, job]) => + it(job, async () => { + const expected = refMemcpy(memory, dest, source, length); + const resultStr = await testLibMem.testMemcpy.callAsync( + memHex, + new BigNumber(dest), + new BigNumber(source), + new BigNumber(length), + ); + const result = fromHex(resultStr); + expect(result).to.deep.equal(expected); + }), + ); + + test([[0, 0, 0, 'copies zero bytes with overlap']]); + + describe('copies forward', () => + test([ + [128, 0, 0, 'zero bytes'], + [128, 0, 1, 'one byte'], + [128, 0, 11, 'eleven bytes'], + [128, 0, 31, 'thirty-one bytes'], + [128, 0, 32, 'one word'], + [128, 0, 64, 'two words'], + [128, 0, 96, 'three words'], + [128, 0, 33, 'one word and one byte'], + [128, 0, 72, 'two words and eight bytes'], + [128, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward within one word', () => + test([ + [16, 0, 0, 'zero bytes'], + [16, 0, 1, 'one byte'], + [16, 0, 11, 'eleven bytes'], + [16, 0, 16, 'sixteen bytes'], + ])); + + describe('copies forward with one byte overlap', () => + test([ + [0, 0, 1, 'one byte'], + [10, 0, 11, 'eleven bytes'], + [30, 0, 31, 'thirty-one bytes'], + [31, 0, 32, 'one word'], + [32, 0, 33, 'one word and one byte'], + [71, 0, 72, 'two words and eight bytes'], + [99, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward with thirty-one bytes overlap', () => + test([ + [0, 0, 31, 'thirty-one bytes'], + [1, 0, 32, 'one word'], + [2, 0, 33, 'one word and one byte'], + [41, 0, 72, 'two words and eight bytes'], + [69, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward with one word overlap', () => + test([ + [0, 0, 32, 'one word'], + [1, 0, 33, 'one word and one byte'], + [41, 0, 72, 'two words and eight bytes'], + [69, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward with one word and one byte overlap', () => + test([ + [0, 0, 33, 'one word and one byte'], + [40, 0, 72, 'two words and eight bytes'], + [68, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward with two words overlap', () => + test([ + [0, 0, 64, 'two words'], + [8, 0, 72, 'two words and eight bytes'], + [36, 0, 100, 'three words and four bytes'], + ])); + + describe('copies forward within one word and one byte overlap', () => + test([[0, 0, 1, 'one byte'], [10, 0, 11, 'eleven bytes'], [15, 0, 16, 'sixteen bytes']])); + + describe('copies backward', () => + test([ + [0, 128, 0, 'zero bytes'], + [0, 128, 1, 'one byte'], + [0, 128, 11, 'eleven bytes'], + [0, 128, 31, 'thirty-one bytes'], + [0, 128, 32, 'one word'], + [0, 128, 64, 'two words'], + [0, 128, 96, 'three words'], + [0, 128, 33, 'one word and one byte'], + [0, 128, 72, 'two words and eight bytes'], + [0, 128, 100, 'three words and four bytes'], + ])); + + describe('copies backward within one word', () => + test([ + [0, 16, 0, 'zero bytes'], + [0, 16, 1, 'one byte'], + [0, 16, 11, 'eleven bytes'], + [0, 16, 16, 'sixteen bytes'], + ])); + + describe('copies backward with one byte overlap', () => + test([ + [0, 0, 1, 'one byte'], + [0, 10, 11, 'eleven bytes'], + [0, 30, 31, 'thirty-one bytes'], + [0, 31, 32, 'one word'], + [0, 32, 33, 'one word and one byte'], + [0, 71, 72, 'two words and eight bytes'], + [0, 99, 100, 'three words and four bytes'], + ])); + + describe('copies backward with thirty-one bytes overlap', () => + test([ + [0, 0, 31, 'thirty-one bytes'], + [0, 1, 32, 'one word'], + [0, 2, 33, 'one word and one byte'], + [0, 41, 72, 'two words and eight bytes'], + [0, 69, 100, 'three words and four bytes'], + ])); + + describe('copies backward with one word overlap', () => + test([ + [0, 0, 32, 'one word'], + [0, 1, 33, 'one word and one byte'], + [0, 41, 72, 'two words and eight bytes'], + [0, 69, 100, 'three words and four bytes'], + ])); + + describe('copies backward with one word and one byte overlap', () => + test([ + [0, 0, 33, 'one word and one byte'], + [0, 40, 72, 'two words and eight bytes'], + [0, 68, 100, 'three words and four bytes'], + ])); + + describe('copies backward with two words overlap', () => + test([ + [0, 0, 64, 'two words'], + [0, 8, 72, 'two words and eight bytes'], + [0, 36, 100, 'three words and four bytes'], + ])); + + describe('copies forward within one word and one byte overlap', () => + test([[0, 0, 1, 'one byte'], [0, 10, 11, 'eleven bytes'], [0, 15, 16, 'sixteen bytes']])); + }); +}); |