JS: Change hash function to sha512

For browser support we use npm amscrypto.js-sha512. For node we use its
builtin crypto module.
This commit is contained in:
Erik Arvidsson
2016-07-12 11:07:38 -07:00
parent 454a7d5b48
commit f2a83346ca
29 changed files with 430 additions and 136 deletions

View File

@@ -65,8 +65,8 @@ func (suite *ChunkStoreTestSuite) TestChunkStoreRoot() {
oldRoot := suite.Store.Root()
suite.True(oldRoot.IsEmpty())
bogusRoot := hash.Parse("8888899999aaaaabbbbbcccccdddddee")
newRoot := hash.Parse("11111222223333344444555556666677")
bogusRoot := hash.Parse("8habda5skfek1265pc5d5l1orptn5dr0")
newRoot := hash.Parse("8la6qjbh81v85r6q67lqbfrkmpds14lg")
// Try to update root with bogus oldRoot
result := suite.Store.UpdateRoot(newRoot, bogusRoot)

View File

@@ -1,7 +1,7 @@
{
"name": "@attic/noms",
"license": "Apache-2.0",
"version": "50.0.1",
"version": "51.0.0",
"description": "Noms JS SDK",
"repository": "https://github.com/attic-labs/noms",
"main": "dist/commonjs/noms.js",
@@ -10,9 +10,9 @@
"npm": ">=3"
},
"dependencies": {
"asmcrypto.js-sha512": "^0.0.1",
"babel-regenerator-runtime": "^6.5.0",
"babel-runtime": "^6.9.2",
"rusha": "^0.8.3",
"text-encoding-utf-8": "^1.0.1",
"tingodb": "^0.4.2",
"signed-varint": "2.0.0"

67
js/rebase-shas.js Normal file
View File

@@ -0,0 +1,67 @@
'use strict';
const readline = require('readline');
const glob = require('glob');
const fs = require('fs');
const rl = readline.createInterface({
input: process.stdin,
});
const replacements = new Map();
let minus, plus;
rl.on('line', line => {
{
const m = line.match(/\-(sha1\-[a-f0-9]{40})/);
if (m) {
minus = m[1];
}
}
{
const m = line.match(/\+(sha1\-[a-f0-9]{40})/);
if (m) {
plus = m[1];
}
}
if (minus && plus) {
// console.log(minus, ' -> ', plus);
replacements.set(minus, plus);
minus = plus = undefined;
}
});
rl.on('close', fixTests);
function fixFiles(err, files) {
for (const path of files) {
fixFile(path);
}
}
function fixFile(path) {
const s = fs.readFileSync(path, 'utf8');
const s2 = s.split('\n').map(line => {
for (const entry of replacements) {
line = swap(line, entry[0], entry[1]);
}
return line;
}).join('\n');
fs.writeFileSync(path, s2);
}
function fixTests() {
glob('**/*-test.js', fixFiles);
glob('../**/*_test.go', fixFiles);
}
function swap(line, s1, s2) {
if (line.indexOf(s1) !== -1) {
return line.replace(s1, s2);
}
if (line.indexOf(s2) !== -1) {
return line.replace(s2, s1);
}
return line;
}

105
js/src/base32.js2 Normal file
View File

@@ -0,0 +1,105 @@
// @flow
// Copyright 2016 The Noms Authors. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
// This is based on https://github.com/chrisumbel/thirty-two which is
/*
Copyright (c) 2011, Chris Umbel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
// The following changes have been done:
// 1. Change the alphabet to 0-9a-v
// 2. No padding. This is only meant to be used with Noms Hash.
// 3. Use Uin8Array to work in a browser
// 4. Flow/EsLint
import {alloc} from './bytes.js';
const charTable = '0123456789abcdefghijklmnopqrstuv';
export function encode(plain: Uint8Array): string {
let i = 0;
let shiftIndex = 0;
let digit = 0;
let encoded = '';
const len = plain.length;
// byte by byte isn't as pretty as quintet by quintet but tests a bit
// faster. will have to revisit.
while (i < len) {
const current = plain[i];
if (shiftIndex > 3) {
digit = current & (0xff >> shiftIndex);
shiftIndex = (shiftIndex + 5) % 8;
digit = (digit << shiftIndex) | ((i + 1 < len) ? plain[i + 1] : 0) >> (8 - shiftIndex);
i++;
} else {
digit = (current >> (8 - (shiftIndex + 5))) & 0x1f;
shiftIndex = (shiftIndex + 5) % 8;
if (shiftIndex === 0) {
i++;
}
}
encoded += charTable[digit];
}
// No padding!
return encoded;
}
export function decode(encoded: string): Uint8Array {
let shiftIndex = 0;
let plainChar = 0;
let plainPos = 0;
const decoded = alloc(32);
// byte by byte isn't as pretty as octet by octet but tests a bit faster. will have to revisit.
for (let i = 0; i < encoded.length; i++) {
const plainDigit = charCodeToNum(encoded.charCodeAt(i));
if (shiftIndex <= 3) {
shiftIndex = (shiftIndex + 5) % 8;
if (shiftIndex === 0) {
decoded[plainPos++] = plainChar | plainDigit;
plainChar = 0;
} else {
plainChar |= 0xff & (plainDigit << (8 - shiftIndex));
}
} else {
shiftIndex = (shiftIndex + 5) % 8;
decoded[plainPos++] = plainChar | 0xff & (plainDigit >>> shiftIndex);
plainChar = 0xff & (plainDigit << (8 - shiftIndex));
}
}
return decoded;
}
function charCodeToNum(cc: number): number {
// This only accepts the char code for '0' - '9', 'a' - 'v'
return cc - (cc <= 57 ? 48 : 87); // '9', '0', 'a' - 10
}

View File

@@ -158,19 +158,19 @@ suite('Blob', () => {
}
test('Blob 1K', () =>
blobTestSuite(10, 'sha1-225cb62f282db9950802a8a0dce55b577af16e86', 3, 2, 2));
blobTestSuite(10, 'drbq98mhjfhgsvb7kl2ijg71eoiqnbqf', 3, 2, 2));
test('LONG: Blob 4K', () =>
blobTestSuite(12, 'sha1-5171d9ff4c8b7420a22cdec5c1282b6fbcafa0d5', 9, 2, 2));
blobTestSuite(12, '7hlg5p73koq5o2r67rmaqockgoths01c', 9, 2, 2));
test('LONG: Blob 16K', () =>
blobTestSuite(14, 'sha1-8741539c258f9c464b08d099cb2521f19138eae7', 2, 2, 2));
blobTestSuite(14, '32tqt716moooai9i7fhb2louq7ik49du', 2, 2, 2));
test('LONG: Blob 64K', () =>
blobTestSuite(16, 'sha1-f2563df4e20835fb3402837272a24f58e9e48bd8', 3, 2, 2));
blobTestSuite(16, '5d355t85ru7vidao5r26rv8ba5r2ftb0', 4, 2, 2));
test('LONG: Blob 256K', () =>
blobTestSuite(18, 'sha1-f97d8d77fb1e3ef21f2ccccbde810151b4e8c4e9', 8, 2, 2));
blobTestSuite(18, 'g491n4skkncon9o951shu67u92iajjkj', 10, 2, 2));
suite('BlobWriter', () => {
let db;

View File

@@ -5,11 +5,10 @@
// http://www.apache.org/licenses/LICENSE-2.0
import {TextEncoder, TextDecoder} from './text-encoding.js';
import Rusha from 'rusha';
import {SHA512} from 'asmcrypto.js-sha512';
const decoder = new TextDecoder();
const encoder = new TextEncoder();
const r = new Rusha();
const littleEndian = true;
export function alloc(size: number): Uint8Array {
@@ -106,9 +105,13 @@ export function compare(b1: Uint8Array, b2: Uint8Array): number {
return 0;
}
export function sha1(data: Uint8Array): Uint8Array {
const ta = r.rawDigest(data);
return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
// This should be imported but this prevents the cyclic dependency.
const byteLength = 20;
export function sha512(data: Uint8Array): Uint8Array {
const full: Uint8Array = SHA512.bytes(data);
// Safari does not have slice on Uint8Array yet.
return new Uint8Array(full.buffer, full.byteOffset, byteLength);
}
function asciiToBinary(cc: number): number {

View File

@@ -168,11 +168,11 @@ suite('Bytes', () => {
test(BrowserBytes, -1, [0, 2, 3], [1, 2, 3]);
});
test('sha1', () => {
test('sha512', () => {
function test(arr: number[]) {
// Node uses a Buffer, browser uses a Uint8Array
const n = NodeBytes.sha1(NodeBytes.fromValues(arr));
const b = BrowserBytes.sha1(BrowserBytes.fromValues(arr));
const n = NodeBytes.sha512(NodeBytes.fromValues(arr));
const b = BrowserBytes.sha512(BrowserBytes.fromValues(arr));
assertUint8Equal(n, b);
}

View File

@@ -75,7 +75,7 @@ export function readUtf8(buff: Uint8Array, start: number, end: number): string {
export function encodeUtf8(str: string, buff: Uint8Array, dv: DataView, offset: number): number {
const size = Buffer.byteLength(str);
// $FlowIssue
buff.writeUInt32LE(size, offset);
buff.writeUInt32BE(size, offset);
offset += 4;
// $FlowIssue
@@ -89,8 +89,11 @@ export function compare(b1: Uint8Array, b2: Uint8Array): number {
return b1.compare(b2);
}
export function sha1(data: Uint8Array): Uint8Array {
const hash = crypto.createHash('sha1');
/**
* Returns the first 20 bytes of the sha512 of data.
*/
export function sha512(data: Uint8Array): Uint8Array {
const hash = crypto.createHash('sha512');
hash.update(data);
return hash.digest();
return hash.digest().slice(0, 20);
}

View File

@@ -5,15 +5,14 @@
// http://www.apache.org/licenses/LICENSE-2.0
import Chunk from './chunk.js';
import Hash from './hash.js';
import Hash, {byteLength as hashByteLength} from './hash.js';
import {invariant} from './assert.js';
import * as Bytes from './bytes.js';
const headerSize = 4; // uint32
const bigEndian = false; // Passing false to DataView methods makes them use big-endian byte order.
const sha1Size = 20;
const chunkLengthSize = 4; // uint32
const chunkHeaderSize = sha1Size + chunkLengthSize;
const chunkHeaderSize = hashByteLength + chunkLengthSize;
export type ChunkStream = (cb: (chunk: Chunk) => void) => Promise<void>
@@ -50,7 +49,7 @@ function serializeChunk(chunk: Chunk, buffer: Uint8Array, dv: DataView, offset:
'Invalid chunk buffer');
Bytes.copy(chunk.hash.digest, buffer, offset);
offset += sha1Size;
offset += hashByteLength;
const chunkLength = chunk.data.length;
dv.setUint32(offset, chunkLength, bigEndian);
@@ -68,14 +67,14 @@ function serializeHints(hints: Set<Hash>, buff: Uint8Array, dv: DataView): numbe
hints.forEach(hash => {
Bytes.copy(hash.digest, buff, offset);
offset += sha1Size;
offset += hashByteLength;
});
return offset;
}
function serializedHintsLength(hints: Set<Hash>): number {
return headerSize + sha1Size * hints.size;
return headerSize + hashByteLength * hints.size;
}
function serializedChunkLength(chunk: Chunk): number {
@@ -95,10 +94,10 @@ function deserializeHints(buff: Uint8Array, dv: DataView): {hints: Array<Hash>,
const numHints = dv.getUint32(offset, bigEndian);
offset += headerSize;
invariant(buff.byteLength - offset >= sha1Size * numHints, 'Invalid hint buffer');
invariant(buff.byteLength - offset >= hashByteLength * numHints, 'Invalid hint buffer');
for (let i = 0; i < numHints; i++) {
const hash = new Hash(Bytes.slice(buff, offset, offset + sha1Size)); // copy
offset += sha1Size;
const hash = new Hash(Bytes.slice(buff, offset, offset + hashByteLength)); // copy
offset += hashByteLength;
hints.push(hash);
}
@@ -114,8 +113,8 @@ export function deserializeChunks(buff: Uint8Array, dv: DataView, offset: number
invariant(buff.byteLength - offset >= chunkHeaderSize, 'Invalid chunk buffer');
// No need to copy the data out since we are not holding on to the hash object.
const hash = new Hash(Bytes.subarray(buff, offset, offset + sha1Size));
offset += sha1Size;
const hash = new Hash(Bytes.subarray(buff, offset, offset + hashByteLength));
offset += hashByteLength;
const chunkLength = dv.getUint32(offset, bigEndian);
offset += chunkLengthSize;

View File

@@ -15,15 +15,15 @@ suite('Chunk', () => {
test('construct', () => {
const c = Chunk.fromString('abc');
assert.isTrue(c.hash.equals(
notNull(Hash.parse('sha1-a9993e364706816aba3e25717850c26c9cd0d89d'))));
notNull(Hash.parse('rmnjb8cjc5tblj21ed4qs821649eduie'))));
assert.isFalse(c.isEmpty());
});
test('construct with hash', () => {
const hash = notNull(Hash.parse('sha1-0000000000000000000000000000000000000001'));
const hash = notNull(Hash.parse('00000000000000000000000000000001'));
const c = Chunk.fromString('abc', hash);
assert.isTrue(c.hash.equals(
notNull(Hash.parse('sha1-0000000000000000000000000000000000000001'))));
notNull(Hash.parse('00000000000000000000000000000001'))));
assert.isFalse(c.isEmpty());
});

View File

@@ -5,7 +5,7 @@
// http://www.apache.org/licenses/LICENSE-2.0
import Chunk from './chunk.js';
import Hash, {sha1Size} from './hash.js';
import Hash, {byteLength as hashByteLength} from './hash.js';
import ValueDecoder from './value-decoder.js';
import ValueEncoder from './value-encoder.js';
import {invariant} from './assert.js';
@@ -45,7 +45,7 @@ export function decodeValue(chunk: Chunk, vr: ValueReader): Value {
const maxUInt32 = Math.pow(2, 32);
const littleEndian = true;
const bigEndian = false;
export interface NomsReader {
readBytes(): Uint8Array;
@@ -95,14 +95,15 @@ export class BinaryNomsReader {
}
readUint32(): number {
const v = this.dv.getUint32(this.offset, littleEndian);
const v = this.dv.getUint32(this.offset, bigEndian);
this.offset += 4;
return v;
}
readUint64(): number {
const lsi = this.readUint32();
// Big endian
const msi = this.readUint32();
const lsi = this.readUint32();
const v = msi * maxUInt32 + lsi;
invariant(v <= Number.MAX_SAFE_INTEGER);
return v;
@@ -131,8 +132,8 @@ export class BinaryNomsReader {
readHash(): Hash {
// Make a copy of the data.
const digest = Bytes.slice(this.buff, this.offset, this.offset + sha1Size);
this.offset += sha1Size;
const digest = Bytes.slice(this.buff, this.offset, this.offset + hashByteLength);
this.offset += hashByteLength;
return new Hash(digest);
}
}
@@ -186,16 +187,17 @@ export class BinaryNomsWriter {
writeUint32(v: number): void {
this.ensureCapacity(4);
this.dv.setUint32(this.offset, v, littleEndian);
this.dv.setUint32(this.offset, v, bigEndian);
this.offset += 4;
}
writeUint64(v: number): void {
invariant(v <= Number.MAX_SAFE_INTEGER);
const v2 = (v / maxUInt32) | 0;
const v1 = v % maxUInt32;
this.writeUint32(v1);
this.writeUint32(v2);
const msi = (v / maxUInt32) | 0;
const lsi = v % maxUInt32;
// Big endian
this.writeUint32(msi);
this.writeUint32(lsi);
}
writeNumber(v: number): void {
@@ -221,8 +223,8 @@ export class BinaryNomsWriter {
}
writeHash(h: Hash): void {
this.ensureCapacity(sha1Size);
this.ensureCapacity(hashByteLength);
Bytes.copy(h.digest, this.buff, this.offset);
this.offset += sha1Size;
this.offset += hashByteLength;
}
}

View File

@@ -64,8 +64,8 @@ suite('compare.js', () => {
'a', 'b', 'c',
// The order of these are done by the hash.
boolType,
new Set([0, 1, 2, 3]),
boolType,
// Value - values cannot be value
// Cycle - values cannot be cycle

View File

@@ -49,10 +49,10 @@ suite('Encode human readable types', () => {
assertWriteType('Ref<Number>', makeRefType(numberType));
assertWriteType('Map<Number, String>', makeMapType(numberType, stringType));
assertWriteType('String | Number', makeUnionType([numberType, stringType]));
assertWriteType('Number | String', makeUnionType([numberType, stringType]));
assertWriteType('Bool', makeUnionType([boolType]));
assertWriteType('', makeUnionType([]));
assertWriteType('List<String | Number>', makeListType(makeUnionType([numberType, stringType])));
assertWriteType('List<Number | String>', makeListType(makeUnionType([numberType, stringType])));
assertWriteType('List<>', makeListType(makeUnionType([])));
});

View File

@@ -303,8 +303,8 @@ suite('Encoding', () => {
assertEncoding([
uint8(SetKind), uint8(SetKind), uint8(NumberKind), false,
uint32(2), // len
uint8(SetKind), uint8(NumberKind), false, uint32(1) /* len */, uint8(NumberKind), float64(0),
uint8(SetKind), uint8(NumberKind), false, uint32(3) /* len */, uint8(NumberKind), float64(1), uint8(NumberKind), float64(2), uint8(NumberKind), float64(3),
uint8(SetKind), uint8(NumberKind), false, uint32(1) /* len */, uint8(NumberKind), float64(0),
],
new Set([new Set([0]), new Set([1, 2, 3])]));
});
@@ -327,9 +327,9 @@ suite('Encoding', () => {
});
test('compound blob', () => {
const r1 = Hash.parse('sha1-0000000000000000000000000000000000000001');
const r2 = Hash.parse('sha1-0000000000000000000000000000000000000002');
const r3 = Hash.parse('sha1-0000000000000000000000000000000000000003');
const r1 = Hash.parse('00000000000000000000000000000001');
const r2 = Hash.parse('00000000000000000000000000000002');
const r3 = Hash.parse('00000000000000000000000000000003');
assertEncoding(
[
@@ -453,7 +453,7 @@ suite('Encoding', () => {
test('list of union', () => {
assertEncoding([
uint8(ListKind), uint8(UnionKind), uint32(3) /* len */, uint8(BoolKind), uint8(StringKind), uint8(NumberKind), false,
uint8(ListKind), uint8(UnionKind), uint32(3) /* len */, uint8(BoolKind), uint8(NumberKind), uint8(StringKind), false,
uint32(4) /* len */, uint8(StringKind), '0', uint8(NumberKind), float64(1), uint8(StringKind), '2', uint8(BoolKind), true,
],
new List(['0', 1, '2', true]));
@@ -471,7 +471,7 @@ suite('Encoding', () => {
const structType = makeStructType('S', ['x'], [numberType]);
assertEncoding([
uint8(ListKind), uint8(UnionKind), uint32(2) /* len */, uint8(BoolKind), uint8(TypeKind), false,
uint8(ListKind), uint8(UnionKind), uint32(2) /* len */, uint8(TypeKind), uint8(BoolKind), false,
uint32(4) /* len */, uint8(BoolKind), true, uint8(TypeKind), uint8(NumberKind), uint8(TypeKind), uint8(TypeKind), uint8(TypeKind), uint8(StructKind), 'S', uint32(1) /* len */, 'x', uint8(NumberKind),
],
new List([true, numberType, typeType, structType]));
@@ -479,7 +479,7 @@ suite('Encoding', () => {
test('ref', () => {
const type = makeRefType(numberType);
const r = Hash.parse('sha1-0123456789abcdef0123456789abcdef01234567');
const r = Hash.parse('0123456789abcdefghijklmnopqrstuv');
assertEncoding([
uint8(RefKind), uint8(NumberKind), r.toString(), uint64(4),
@@ -516,7 +516,7 @@ suite('Encoding', () => {
test('union list', () => {
assertEncoding([
uint8(ListKind), uint8(UnionKind), uint32(2) /* len */, uint8(StringKind), uint8(NumberKind),
uint8(ListKind), uint8(UnionKind), uint32(2) /* len */, uint8(NumberKind), uint8(StringKind),
false, uint32(2) /* len */, uint8(StringKind), 'hi', uint8(NumberKind), float64(42),
],
new List(['hi', 42]));

View File

@@ -17,26 +17,28 @@ suite('Hash', () => {
}
assertParseError('foo');
assertParseError('sha1');
assertParseError('sha1-0');
// too few digits
assertParseError('0000000000000000000000000000000');
// too many digits
assertParseError('sha1-00000000000000000000000000000000000000000');
assertParseError('000000000000000000000000000000000');
// 'g' not valid hex
assertParseError('sha1- 000000000000000000000000000000000000000g');
// 'w' not valid base32
assertParseError('00000000000000000000000000000000w');
// sha2 not supported
assertParseError('sha2-0000000000000000000000000000000000000000');
// no prefix
assertParseError('sha1-00000000000000000000000000000000');
assertParseError('sha2-00000000000000000000000000000000');
const valid = 'sha1-0000000000000000000000000000000000000000';
const valid = '00000000000000000000000000000000';
assert.isNotNull(Hash.parse(valid));
});
test('equals', () => {
const r0 = notNull(Hash.parse('sha1-0000000000000000000000000000000000000000'));
const r01 = notNull(Hash.parse('sha1-0000000000000000000000000000000000000000'));
const r1 = notNull(Hash.parse('sha1-0000000000000000000000000000000000000001'));
const r0 = notNull(Hash.parse('00000000000000000000000000000000'));
const r01 = notNull(Hash.parse('00000000000000000000000000000000'));
const r1 = notNull(Hash.parse('00000000000000000000000000000001'));
assert.isTrue(r0.equals(r01));
assert.isTrue(r01.equals(r0));
@@ -45,15 +47,14 @@ suite('Hash', () => {
});
test('toString', () => {
const s = 'sha1-0123456789abcdef0123456789abcdef01234567';
const s = '0123456789abcdefghijklmnopqrstuv';
const r = notNull(Hash.parse(s));
assert.strictEqual(s, r.toString());
});
test('fromData', () => {
const r = Hash.fromData(Bytes.fromString('abc'));
assert.strictEqual('sha1-a9993e364706816aba3e25717850c26c9cd0d89d', r.toString());
assert.strictEqual('rmnjb8cjc5tblj21ed4qs821649eduie', r.toString());
});
test('isEmpty', () => {

View File

@@ -4,10 +4,13 @@
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
import * as Bytes from './bytes.js';
import {alloc, compare, sha512} from './bytes.js';
import {encode, decode} from './base32';
export const sha1Size = 20;
const pattern = /^sha1-[0-9a-f]{40}$/;
export const byteLength = 20;
export const stringLength = 32
;
const pattern = /^[0-9a-v]{32}$/;
export default class Hash {
_digest: Uint8Array;
@@ -33,23 +36,23 @@ export default class Hash {
}
compare(other: Hash): number {
return Bytes.compare(this._digest, other._digest);
return compare(this._digest, other._digest);
}
toString(): string {
return 'sha1-' + Bytes.toHexString(this._digest);
return encode(this._digest);
}
static parse(s: string): ?Hash {
if (pattern.test(s)) {
return new Hash(Bytes.fromHexString(s.substring(5)));
return new Hash(decode(s));
}
return null;
}
static fromData(data: Uint8Array): Hash {
return new Hash(Bytes.sha1(data));
return new Hash(sha512(data));
}
}
export const emptyHash = new Hash(Bytes.alloc(sha1Size));
export const emptyHash = new Hash(alloc(byteLength));

View File

@@ -36,7 +36,7 @@ import {TestDatabase} from './test-util.js';
import {IndexedMetaSequence} from './meta-sequence.js';
const testListSize = 5000;
const listOfNRef = 'sha1-cb53c5de1ccef77930f19fce6c425998a763b231';
const listOfNRef = 'pe9nceojcmtq2972kqhkmhqu40ckhvbh';
async function assertToJS(list: List, nums: Array<any>, start: number = 0,
end: number = nums.length): Promise<void> {
@@ -148,18 +148,18 @@ suite('List', () => {
}
test('List 1K', async () => {
await listTestSuite(10, 'sha1-d797568943812c45ec530c80d3a2654a77649890', 17, 5, 1);
await listTestSuite(10, '6a50jldrfobup4j0d0a55hk7i86iu3re', 15, 17, 2);
});
test('LONG: List 4K', async () => {
await listTestSuite(12, 'sha1-be2dbb48eaee147211a3f57da879feefd3e44269', 2, 2, 2);
await listTestSuite(12, 'g77lk69og8i9gmu6l211rtia4dhkrmge', 2, 3, 2);
});
test('LONG: list of ref, set of n numbers, length', async () => {
const nums = intSequence(testListSize);
const refs = nums.map(n => new Ref(newStruct('num', {n})));
const s = new List(refs);
assert.strictEqual('sha1-6a02619eb8074f89ee2f0453837140f6e796609f', s.hash.toString());
assert.strictEqual('l185tn53r279itlmhfud2f56jopivtnj', s.hash.toString());
assert.strictEqual(testListSize, s.length);
const height = deriveCollectionHeight(s);
@@ -623,7 +623,7 @@ suite('ListWriter', () => {
});
test('ListWriter with ValueReadWriter', async () => {
const values = intSequence(75);
const values = intSequence(150);
const l = new List(values);
// The number of writes depends on how many chunks we've encountered.
@@ -669,7 +669,7 @@ suite('ListWriter', () => {
assert.isTrue(equals(l.type, makeListType(numberType)));
}
await t(10, ListLeafSequence);
await t(100, IndexedMetaSequence);
await t(15, ListLeafSequence);
await t(150, IndexedMetaSequence);
});
});

View File

@@ -22,7 +22,7 @@ import {
newIndexedMetaSequenceBoundaryChecker,
newIndexedMetaSequenceChunkFn,
} from './meta-sequence.js';
import {sha1Size} from './hash.js';
import {byteLength} from './hash.js';
import Ref from './ref.js';
import {getValueChunks} from './sequence.js';
import {makeListType, makeUnionType, getTypeOfValue} from './type.js';
@@ -50,7 +50,7 @@ function newListLeafChunkFn<T: Value>(vr: ?ValueReader, vw: ?ValueWriter): makeC
}
function newListLeafBoundaryChecker<T: Value>(): BoundaryChecker<T> {
return new BuzHashBoundaryChecker(listWindowSize, sha1Size, listPattern,
return new BuzHashBoundaryChecker(listWindowSize, byteLength, listPattern,
(v: T) => getHashOfValue(v).digest
);
}

View File

@@ -40,7 +40,7 @@ import {
} from './type.js';
const testMapSize = 1000;
const mapOfNRef = 'sha1-0ce27caa55f6fec82da76e1bc84fe459b7387791';
const mapOfNRef = '7t3bkoj5tnc3vlv03fgupmvtgtc4i1na';
const smallRandomMapSize = 50;
const randomMapSize = 500;
@@ -85,21 +85,21 @@ suite('BuildMap', () => {
}
test('Map 1K', async () => {
await mapTestSuite(10, 'sha1-ccda04ba3961a70124e029c2e9af7b0537e726db', 16, i => i);
await mapTestSuite(10, 'trvmij0jsl2o647qko06r6furo0lnkj0', 2, i => i);
});
test('LONG: Map 4K', async () => {
await mapTestSuite(12, 'sha1-80e91e9538aeaabe75793c6c29d03954ac81d221', 2, i => i);
await mapTestSuite(12, 'vnff1ufe5isqsam9vof6vmfgf37jdk3l', 2, i => i);
});
const newNumberStruct = i => newStruct('', {n: i});
test('Map 1K structs', async () => {
await mapTestSuite(10, 'sha1-17a96ed265da91aa992be70dba34cd9c3b9000df', 2, newNumberStruct);
await mapTestSuite(10, '0afm8gkghskk18hhbcuiljfdemqskoj5', 21, newNumberStruct);
});
test('LONG: Map 4K structs', async () => {
await mapTestSuite(12, 'sha1-ed658ef24dbc4fa2fecefa1e215bc06887199935', 2, newNumberStruct);
await mapTestSuite(12, 't2t6vjr9nodu9k024ufnugj5qeahmgol', 2, newNumberStruct);
});
test('unique keys - strings', async () => {
@@ -154,7 +154,7 @@ suite('BuildMap', () => {
const kvRefs = kvs.map(entry => entry.map(n => new Ref(newStruct('num', {n}))));
const m = new Map(kvRefs);
assert.strictEqual(m.hash.toString(), 'sha1-5c36c25f8d62e72b3d02089febab440049236631');
assert.strictEqual(m.hash.toString(), 'ck8vuj44jp52kllgo39s30u49i1s8bl8');
const height = deriveCollectionHeight(m);
assert.isTrue(height > 0);
// height + 1 because the leaves are Ref values (with height 1).
@@ -268,7 +268,7 @@ suite('BuildMap', () => {
const sortedKeys = numbers.concat(strings, structs);
const m = new Map(kvs);
assert.strictEqual(m.hash.toString(), 'sha1-4e3eb68ff102c74aa7305753ee2ebcc4ebdebf62');
assert.strictEqual(m.hash.toString(), 'a75m96ff3ta1poibue3uri7veluap4g4');
const height = deriveCollectionHeight(m);
assert.isTrue(height > 0);
assert.strictEqual(height, m.sequence.items[0].ref.height);
@@ -423,10 +423,11 @@ suite('MapLeaf', () => {
});
test('chunks', () => {
const r1 = db.writeValue('x');
const r2 = db.writeValue(true);
const r3 = db.writeValue('b');
const r4 = db.writeValue(false);
const r1 = db.writeValue('b');
const r2 = db.writeValue(false);
const r3 = db.writeValue('x');
const r4 = db.writeValue(true);
const m = new Map([[r1, r2], [r3, r4]]);
assert.strictEqual(4, m.chunks.length);
assert.isTrue(equals(r1, m.chunks[0]));
@@ -434,7 +435,6 @@ suite('MapLeaf', () => {
assert.isTrue(equals(r3, m.chunks[2]));
assert.isTrue(equals(r4, m.chunks[3]));
});
});
suite('CompoundMap', () => {

View File

@@ -14,7 +14,7 @@ import type {AsyncIterator} from './async-iterator.js';
import {chunkSequence, chunkSequenceSync} from './sequence-chunker.js';
import Collection from './collection.js';
import {compare, equals} from './compare.js';
import {sha1Size} from './hash.js';
import {byteLength} from './hash.js';
import {getHashOfValue} from './get-hash.js';
import {getTypeOfValue, makeMapType, makeUnionType} from './type.js';
import {
@@ -54,7 +54,7 @@ function newMapLeafChunkFn<K: Value, V: Value>(vr: ?ValueReader):
function newMapLeafBoundaryChecker<K: Value, V: Value>():
BoundaryChecker<MapEntry<K, V>> {
return new BuzHashBoundaryChecker(mapWindowSize, sha1Size, mapPattern,
return new BuzHashBoundaryChecker(mapWindowSize, byteLength, mapPattern,
(entry: MapEntry<K, V>) => getHashOfValue(entry[KEY]).digest);
}

View File

@@ -26,7 +26,7 @@ suite('MemoryStore', () => {
ms.put(c);
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.strictEqual('sha1-a9993e364706816aba3e25717850c26c9cd0d89d', c.hash.toString());
assert.strictEqual('rmnjb8cjc5tblj21ed4qs821649eduie', c.hash.toString());
const oldRoot = await ms.getRoot();
await ms.updateRoot(c.hash, oldRoot);
@@ -43,10 +43,10 @@ suite('MemoryStore', () => {
const oldRoot = await ms.getRoot();
assert.isTrue(oldRoot.isEmpty());
// sha1('Bogus, Dude')
const bogusRoot = notNull(Hash.parse('sha1-81c870618113ba29b6f2b396ea3a69c6f1d626c5'));
// sha1('Hello, World')
const newRoot = notNull(Hash.parse('sha1-907d14fb3af2b0d4f18c2d46abe8aedce17367bd'));
// sha512 'Bogus, Dude'
const bogusRoot = notNull(Hash.parse('8habda5skfek1265pc5d5l1orptn5dr0'));
// sha512 'Hello, World'
const newRoot = notNull(Hash.parse('8la6qjbh81v85r6q67lqbfrkmpds14lg'));
// Try to update root with bogus oldRoot
let result = await ms.updateRoot(newRoot, bogusRoot);
@@ -59,7 +59,7 @@ suite('MemoryStore', () => {
test('get non-existing', async () => {
const ms = new MemoryStore();
const hash = notNull(Hash.parse('sha1-1111111111111111111111111111111111111111'));
const hash = notNull(Hash.parse('11111111111111111111111111111111'));
const c = await ms.get(hash);
assert.isTrue(c.isEmpty());
});

View File

@@ -6,7 +6,7 @@
import BuzHashBoundaryChecker from './buzhash-boundary-checker.js';
import {compare} from './compare.js';
import {default as Hash, sha1Size} from './hash.js';
import Hash, {byteLength} from './hash.js';
import type {BoundaryChecker, makeChunkFn} from './sequence-chunker.js';
import type {ValueReader, ValueWriter} from './value-store.js';
import type Value from './value.js'; // eslint-disable-line no-unused-vars
@@ -299,7 +299,7 @@ const orderedSequenceWindowSize = 1;
const objectPattern = ((1 << 6) | 0) - 1;
export function newOrderedMetaSequenceBoundaryChecker(): BoundaryChecker<MetaTuple> {
return new BuzHashBoundaryChecker(orderedSequenceWindowSize, sha1Size, objectPattern,
return new BuzHashBoundaryChecker(orderedSequenceWindowSize, byteLength, objectPattern,
(mt: MetaTuple) => mt.ref.targetHash.digest
);
}
@@ -334,7 +334,7 @@ export function newIndexedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader,
}
export function newIndexedMetaSequenceBoundaryChecker(): BoundaryChecker<MetaTuple> {
return new BuzHashBoundaryChecker(objectWindowSize, sha1Size, objectPattern,
return new BuzHashBoundaryChecker(objectWindowSize, byteLength, objectPattern,
(mt: MetaTuple) => mt.ref.targetHash.digest
);
}

View File

@@ -37,7 +37,7 @@ import {
} from './type.js';
const testSetSize = 5000;
const setOfNRef = 'sha1-ae7716c21164c7095686610371fd8e4af7b4e7c2';
const setOfNRef = 'hem0jotqomqkd1ngaffgtceo84utldic';
const smallRandomSetSize = 200;
const randomSetSize = 2000;
@@ -72,21 +72,21 @@ suite('BuildSet', () => {
}
test('Set 1K', async () => {
await setTestSuite(10, 'sha1-1520836622fd7cd2964c3d49c3076a270422e255', 16, i => i);
await setTestSuite(10, 'bcoils8qvfk5d0cfodutr0pck7h05vib', 18, i => i);
});
test('LONG: Set 4K', async () => {
await setTestSuite(12, 'sha1-874d250b19dab05ddc63feb301ba95bdafcf8a7d', 2, i => i);
await setTestSuite(12, '6f0tmpn92p9ti9c9rogeflag1v3bimeg', 4, i => i);
});
const newNumberStruct = i => newStruct('', {n: i});
test('Set 1K structs', async () => {
await setTestSuite(10, 'sha1-217eba8e53962c0efea24f4c22e6a525bb1663dd', 14, newNumberStruct);
await setTestSuite(10, 'fop5t31l0vvfsjojd1drumggb5309lqi', 21, newNumberStruct);
});
test('LONG: Set 4K structs', async () => {
await setTestSuite(12, 'sha1-3ac7ebc9123028d1ade619f539ad4d488a3ab6ea', 2, newNumberStruct);
await setTestSuite(12, '9qi585g38ro42lj143iqm1gdsostrlnn', 70, newNumberStruct);
});
test('unique keys - strings', async () => {
@@ -124,7 +124,7 @@ suite('BuildSet', () => {
const nums = intSequence(testSetSize);
const structs = nums.map(n => newStruct('num', {n}));
const s = new Set(structs);
assert.strictEqual('sha1-dd51e00ce152fbcab72d625ec2c2895f9264ec8f', s.hash.toString());
assert.strictEqual('jqvqpuj3glvltvo9q2eepq90n0g6btkb', s.hash.toString());
const height = deriveCollectionHeight(s);
assert.isTrue(height > 0);
assert.strictEqual(height, s.sequence.items[0].ref.height);
@@ -139,7 +139,7 @@ suite('BuildSet', () => {
const nums = intSequence(testSetSize);
const refs = nums.map(n => new Ref(newStruct('num', {n})));
const s = new Set(refs);
assert.strictEqual('sha1-73ceda53a24ffc2d76d17a34b772468cfe84576f', s.hash.toString());
assert.strictEqual('57cs3p6o8dpibm38fh9j0krfo4pr8108', s.hash.toString());
const height = deriveCollectionHeight(s);
assert.isTrue(height > 0);
// height + 1 because the leaves are Ref values (with height 1).
@@ -231,7 +231,7 @@ suite('BuildSet', () => {
vals.sort(compare);
const s = new Set(vals);
assert.strictEqual('sha1-7b6b734e9cb67af9a93dd82ae82a60a2d4ae8ad5', s.hash.toString());
assert.strictEqual('87ic99f79hemako95rddc524nvke4t4q', s.hash.toString());
const height = deriveCollectionHeight(s);
assert.isTrue(height > 0);
assert.strictEqual(height, s.sequence.items[0].ref.height);

View File

@@ -25,7 +25,7 @@ import {OrderedSequence, OrderedSequenceCursor, OrderedSequenceIterator} from
'./ordered-sequence.js';
import diff from './ordered-sequence-diff.js';
import {makeSetType, makeUnionType, getTypeOfValue} from './type.js';
import {sha1Size} from './hash.js';
import {byteLength} from './hash.js';
import {removeDuplicateFromOrdered} from './map.js';
import {getValueChunks} from './sequence.js';
import {Kind} from './noms-kind.js';
@@ -45,7 +45,7 @@ function newSetLeafChunkFn<T:Value>(vr: ?ValueReader): makeChunkFn {
}
function newSetLeafBoundaryChecker<T:Value>(): BoundaryChecker<T> {
return new BuzHashBoundaryChecker(setWindowSize, sha1Size, setPattern, (v: T) => {
return new BuzHashBoundaryChecker(setWindowSize, byteLength, setPattern, (v: T) => {
const hash = getHashOfValue(v);
return hash.digest;
});

View File

@@ -71,12 +71,12 @@ suite('Specs', () => {
});
test('HashSpec', async () => {
const testHash = Hash.parse('sha1-0000000000000000000000000000000000000000');
const testHash = Hash.parse('00000000000000000000000000000000');
invariant(testHash);
const invalid = [
'mem', 'mem:', 'http', 'http:', 'http://foo', 'monkey', 'monkey:balls',
'mem:not-hash', 'mem:sha1-', 'mem:sha2-0000', `mem:::${testHash}`,
'http://foo:blah', 'https://foo:sha1',
'mem:not-hash', 'mem:0000', `mem:::${testHash}`,
'http://foo:blah',
];
invalid.forEach(s => assert.isNull(HashSpec.parse(s)));
@@ -103,7 +103,7 @@ suite('Specs', () => {
assert.equal(spec.database.scheme, 'http');
assert.equal(spec.database.path, '//foo:8000/test');
const testHash = Hash.parse('sha1-0000000000000000000000000000000000000000');
const testHash = Hash.parse('00000000000000000000000000000000');
invariant(testHash);
spec = parseObjectSpec(`http://foo:8000/test::${testHash}`);
invariant(spec);

View File

@@ -4,4 +4,4 @@
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
export default '3';
export default '4';

View File

@@ -44,14 +44,14 @@ suite('cross platform test', () => {
async function testSuite(): Promise<void> {
// please update Go and JS to keep them in sync - see types/xp_test.go
const testValues = [
new TestValue(true, 'sha1-3f29546453678b855931c174a97d6c0894b8f546', 'bool - true'),
new TestValue(false, 'sha1-1489f923c4dca729178b3e3233458550d8dddf29', 'bool - false'),
new TestValue(-1, 'sha1-47ec8d98366433dc002e7721c9e37d5067547937', 'num - -1'),
new TestValue(0, 'sha1-9508e90548b0440a4a61e5743b76c1e309b23b7f', 'num - 0'),
new TestValue(1, 'sha1-9f36f27018671b24dcdf70c9eb857d5ea2a064c8', 'num - 1'),
new TestValue('', 'sha1-e1bc1dae59f116abb43f9dafbb2acc9b141aa6b0', 'str - empty'),
new TestValue('0', 'sha1-a1c90c71d1ffdb51138677c578e6f2e8a011070d', 'str - 0'),
new TestValue('false', 'sha1-e15d53dc6c9d3aa6eca4eea28382c9c45ba8fd9e', 'str - false'),
new TestValue(true, 'g19moobgrm32dn083bokhksuobulq28c', 'bool - true'),
new TestValue(false, 'bqjhrhmgmjqnnssqln87o84c6no6pklq', 'bool - false'),
new TestValue(-1, 'hq0jvv1enraehfggfk8s27ll1rmirt96', 'num - -1'),
new TestValue(0, 'elie88b5iouak7onvi2mpkcgoqqr771l', 'num - 0'),
new TestValue(1, '6h9ldndhjoq0r5sbn1955gaearq5dovc', 'num - 1'),
new TestValue('', 'ssfs0o2eq3kg50p37q2crhhqhjcs2391', 'str - empty'),
new TestValue('0', 'jngc7d11d2h0c6s2f15l10rckvu753rb', 'str - 0'),
new TestValue('false', '1v3a1t4to25kkohm1bhh2thebmls0lp0', 'str - false'),
];
await testTypes(testValues);

33
js/test-perf.js Normal file
View File

@@ -0,0 +1,33 @@
'use strict';
const noms = require('.');
function test(n) {
const values = [];
for (let i = 0; i < n; i++) {
const s = noms.newStruct('', {
b: i % 2 === 0,
n: i,
s: String(i),
});
values.push(s);
}
const db = noms.DatabaseSpec.parse('mem').database();
const d1 = Date.now();
const l = new noms.List(values);
console.log('Create list', Date.now() - d1);
const d2 = Date.now();
const r = db.writeValue(l);
console.log('Write list', Date.now() - d2);
//
// const d3 = Date.now();
// return db.readValue(r.hash).then(l => {
// console.log(l);
// console.log('Read list', Date.now() - d3);
// });
}
test(50000);//.catch(err => console.error(err));

78
js/test-type.js Normal file
View File

@@ -0,0 +1,78 @@
'use strict';
const noms = require('.');
function testList(n) {
const a = new Array(n);
for (let i = 0; i < a.length; i++) {
a[i] = i;
}
a.push('a');
let l = new noms.List(a);
console.log(l.length, l.type.describe());
l.remove(l.length - 1, l.length).then(ll => {
l = ll;
console.log(l.length, l.type.describe());
l.remove(l.length - 1, l.length).then(ll => {
l = ll;
console.log(l.length, l.type.describe());
});
});
}
function testSet(n) {
const a = new Array(n);
for (let i = 0; i < a.length; i++) {
a[i] = i;
}
a.push('a');
let s = new noms.Set(a);
console.log(s.size, s.type.describe());
s.remove('a').then(ss => {
s = ss;
console.log(s.size, s.type.describe());
s.remove(0).then(ss => {
s = ss;
console.log(s.size, s.type.describe());
});
});
}
function testMap(n) {
const a = new Array(n);
for (let i = 0; i < a.length; i++) {
a[i] = [i, i];
}
a.push(['a', 'a']);
let m = new noms.Map(a);
// console.log(m.sequence);
console.log(m.size, m.type.describe());
m.remove('a').then(ss => {
m = ss;
console.log(m.size, m.type.describe());
m.remove(0).then(ss => {
m = ss;
console.log(m.size, m.type.describe());
});
});
}
// testList(10);
// testSet(10);
testMap(10);
// testList(10000);
// testSet(10000);
testMap(100);