Update to Flow 0.30.0 (#2337)

This requires all parameterized types to have type params. Fortunately
one can use `T<any>` which has the same behavior as the old `T` syntax.

We should tighten the types further after this but this unblocks us.

Fixes #2301
This commit is contained in:
Erik Arvidsson
2016-08-11 11:27:41 -07:00
committed by GitHub
parent 6697c2e6fc
commit 6178251012
53 changed files with 337 additions and 319 deletions

View File

@@ -34,7 +34,7 @@
"chai": "^3.5.0",
"chokidar": "^1.6.0",
"commander": "^2.9.0",
"flow-bin": "0.27.0",
"flow-bin": "0.30.0",
"fs-extra": "^0.30.0",
"mocha": "^2.5.3"
},

View File

@@ -33,7 +33,7 @@ import Ref from './ref.js';
suite('validate type', () => {
function assertInvalid(t: Type, v) {
function assertInvalid(t: Type<any>, v) {
assert.throws(() => { assertSubtype(t, v); });
}
@@ -46,7 +46,7 @@ suite('validate type', () => {
valueType,
];
function assertAll(t: Type, v) {
function assertAll(t: Type<any>, v) {
for (const at of allTypes) {
if (at === valueType || equals(t, at)) {
assertSubtype(at, v);

View File

@@ -35,16 +35,16 @@ import type Value from './value.js';
* newStruct("S", {x: 42, y: true}) < struct S {x: Number}, extra fields OK.
* ```
*/
export default function assertSubtype(requiredType: Type, v: Value): void {
export default function assertSubtype(requiredType: Type<any>, v: Value): void {
assert(isSubtype(requiredType, getTypeOfValue(v)), v, requiredType);
}
export function isSubtype(requiredType: Type, concreteType: Type): boolean {
export function isSubtype(requiredType: Type<any>, concreteType: Type<any>): boolean {
return isSubtypeInternal(requiredType, concreteType, []);
}
export function isSubtypeInternal(requiredType: Type, concreteType: Type,
parentStructTypes: Type[]): boolean {
export function isSubtypeInternal(requiredType: Type<any>, concreteType: Type<any>,
parentStructTypes: Type<any>[]): boolean {
if (equals(requiredType, concreteType)) {
return true;
}
@@ -111,8 +111,8 @@ export function isSubtypeInternal(requiredType: Type, concreteType: Type,
invariant(false);
}
function compoundSubtype(requiredType: Type, concreteType: Type,
parentStructTypes: Type[]): boolean {
function compoundSubtype(requiredType: Type<any>, concreteType: Type<any>,
parentStructTypes: Type<any>[]): boolean {
// In a compound type it is OK to have an empty union.
if (concreteType.kind === Kind.Union && concreteType.desc.elemTypes.length === 0) {
return true;
@@ -122,6 +122,6 @@ function compoundSubtype(requiredType: Type, concreteType: Type,
function assert(b, v, t) {
if (!b) {
throw new TypeError(`${v} is not a valid ${kindToString(t.kind)}`);
throw new TypeError(`${String(v)} is not a valid ${kindToString(t.kind)}`);
}
}

View File

@@ -19,7 +19,7 @@ import {blobType} from './type.js';
import {invariant} from './assert.js';
import {hashValueByte} from './rolling-value-hasher.js';
export default class Blob extends Collection<IndexedSequence> {
export default class Blob extends Collection<IndexedSequence<any>> {
constructor(bytes: Uint8Array) {
const chunker = new SequenceChunker(null, null, null, newBlobLeafChunkFn(null),
newIndexedMetaSequenceChunkFn(Kind.Blob, null), blobHashValueBytes);
@@ -51,12 +51,12 @@ export default class Blob extends Collection<IndexedSequence> {
}
export class BlobReader {
_sequence: IndexedSequence;
_sequence: IndexedSequence<any>;
_cursor: Promise<SequenceCursor<number, IndexedSequence<number>>>;
_pos: number;
_lock: string;
constructor(sequence: IndexedSequence) {
constructor(sequence: IndexedSequence<any>) {
this._sequence = sequence;
this._cursor = sequence.newCursorAt(0);
this._pos = 0;
@@ -83,7 +83,7 @@ export class BlobReader {
});
}
_readCur(cur: SequenceCursor): Promise<Uint8Array> {
_readCur(cur: SequenceCursor<any, any>): Promise<Uint8Array> {
let arr = cur.sequence.items;
invariant(arr instanceof Uint8Array);
@@ -150,13 +150,13 @@ export class BlobLeafSequence extends IndexedSequence<number> {
return idx + 1;
}
getCompareFn(other: IndexedSequence): EqualsFn {
getCompareFn(other: IndexedSequence<any>): EqualsFn {
return (idx: number, otherIdx: number) =>
this.items[idx] === other.items[otherIdx];
}
}
function newBlobLeafChunkFn(vr: ?ValueReader): makeChunkFn {
function newBlobLeafChunkFn(vr: ?ValueReader): makeChunkFn<any, any> {
return (items: Array<number>) => {
const blobLeaf = new BlobLeafSequence(vr, Bytes.fromValues(items));
const blob = Blob.fromSequence(blobLeaf);
@@ -174,7 +174,7 @@ type BlobWriterState = 'writable' | 'closed';
export class BlobWriter {
_state: BlobWriterState;
_blob: ?Promise<Blob>;
_chunker: SequenceChunker;
_chunker: SequenceChunker<any, any>;
_vrw: ?ValueReadWriter;
constructor(vrw: ?ValueReadWriter) {

View File

@@ -97,6 +97,8 @@ export function compare(b1: Uint8Array, b2: Uint8Array): number {
*/
export function sha512(data: Uint8Array): Uint8Array {
const hash = crypto.createHash('sha512');
// $FlowIssue
hash.update(data);
// $FlowIssue
return hash.digest().slice(0, 20);
}

View File

@@ -47,7 +47,7 @@ export function decodeValue(chunk: Chunk, vr: ValueReader): Value {
return v;
}
function ensureTypeSerialization(t: Type) {
function ensureTypeSerialization(t: Type<any>) {
if (!t.serialization) {
const w = new BinaryNomsWriter();
const enc = new ValueEncoder(w, null);
@@ -79,7 +79,7 @@ export interface NomsWriter {
writeBool(v:boolean): void;
writeString(v: string): void;
writeHash(h: Hash): void;
appendType(t: Type): void;
appendType(t: Type<any>): void;
}
export class BinaryNomsReader extends BinaryReader {
@@ -116,7 +116,7 @@ export class BinaryNomsWriter extends BinaryWriter {
this.offset += hashByteLength;
}
appendType(t: Type): void {
appendType(t: Type<any>): void {
// Note: The JS & Go impls differ here. The Go impl eagerly serializes types as they are
// constructed. The JS does it lazily so as to avoid cyclic package dependencies.
ensureTypeSerialization(t);

View File

@@ -11,7 +11,7 @@ import {ValueBase} from './value.js';
import {invariant} from './assert.js';
import {init as initValueBase} from './value.js';
export default class Collection<S: Sequence> extends ValueBase {
export default class Collection<S: Sequence<any>> extends ValueBase {
sequence: S;
constructor(sequence: S) {
@@ -19,7 +19,7 @@ export default class Collection<S: Sequence> extends ValueBase {
this.sequence = sequence;
}
get type(): Type {
get type(): Type<any> {
return this.sequence.type;
}
@@ -27,14 +27,14 @@ export default class Collection<S: Sequence> extends ValueBase {
return !this.sequence.isMeta && this.sequence.items.length === 0;
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return this.sequence.chunks;
}
/**
* Creates a new Collection from a sequence.
*/
static fromSequence<T: Collection, S: Sequence>(s: S): T {
static fromSequence<T: Collection<any>, S: Sequence<any>>(s: S): T {
const col = Object.create(this.prototype);
invariant(col instanceof this);
initValueBase(col);

View File

@@ -32,7 +32,8 @@ const parentsIndex = 1;
const valueIndex = 2;
export default class Commit<T: Value> extends Struct {
constructor(value: T, parents: Set<Ref<Commit>> = new Set(), meta: Struct = getEmptyStruct()) {
constructor(value: T, parents: Set<Ref<Commit<any>>> = new Set(),
meta: Struct = getEmptyStruct()) {
const t = makeCommitType(getTypeOfValue(value), valueTypesFromParents(parents, 'value'),
getTypeOfValue(meta), valueTypesFromParents(parents, 'meta'));
super(t, [meta, parents, value]);
@@ -49,7 +50,7 @@ export default class Commit<T: Value> extends Struct {
return new Commit(value, this.parents);
}
get parents(): Set<Ref<Commit<*>>> {
get parents(): Set<Ref<Commit<any>>> {
invariant(this.type.desc.fields[parentsIndex].name === 'parents');
// $FlowIssue: _values is private.
const parents: Set<Ref<Commit>> = this._values[parentsIndex];
@@ -57,7 +58,7 @@ export default class Commit<T: Value> extends Struct {
return parents;
}
setParents(parents: Set<Ref<Commit<*>>>): Commit<T> {
setParents(parents: Set<Ref<Commit<any>>>): Commit<T> {
return new Commit(this.value, parents);
}
@@ -75,8 +76,8 @@ export default class Commit<T: Value> extends Struct {
}
// ../../go/datas/commit.go for the motivation for how this is computed.
function makeCommitType(valueType: Type<*>, parentsValueTypes: Type<*>[],
metaType: Type<*>, parentsMetaTypes: Type<*>[]): Type<StructDesc> {
function makeCommitType(valueType: Type<any>, parentsValueTypes: Type<any>[],
metaType: Type<any>, parentsMetaTypes: Type<any>[]): Type<StructDesc> {
const fieldNames = ['meta', 'parents', 'value'];
const parentsValueUnionType = makeUnionType(parentsValueTypes.concat(valueType));
const parentsMetaUnionType = makeUnionType(parentsMetaTypes.concat(metaType));
@@ -97,7 +98,7 @@ function makeCommitType(valueType: Type<*>, parentsValueTypes: Type<*>[],
]);
}
function valueTypesFromParents(parents: Set, fieldName: string): Type<*>[] {
function valueTypesFromParents(parents: Set<any>, fieldName: string): Type<any>[] {
const elemType = getSetElementType(parents.type);
switch (elemType.kind) {
case Kind.Union:
@@ -107,21 +108,21 @@ function valueTypesFromParents(parents: Set, fieldName: string): Type<*>[] {
}
}
function getSetElementType(t: Type<CompoundDesc>): Type<*> {
function getSetElementType(t: Type<CompoundDesc>): Type<any> {
invariant(t.kind === Kind.Set);
return t.desc.elemTypes[0];
}
function fieldTypeFromRefOfCommit(t: Type<CompoundDesc>, fieldName: string): Type<*> {
function fieldTypeFromRefOfCommit(t: Type<CompoundDesc>, fieldName: string): Type<any> {
return fieldTypeFromCommit(getRefElementType(t), fieldName);
}
function getRefElementType(t: Type<CompoundDesc>): Type<*> {
function getRefElementType(t: Type<CompoundDesc>): Type<any> {
invariant(t.kind === Kind.Ref);
return t.desc.elemTypes[0];
}
function fieldTypeFromCommit(t: Type<StructDesc>, fieldName: string): Type<*> {
function fieldTypeFromCommit(t: Type<StructDesc>, fieldName: string): Type<any> {
invariant(t.desc.name === 'Commit');
return notNull(t.desc.getField(fieldName));
}

View File

@@ -18,7 +18,7 @@ import {equals} from './compare.js';
export default class Database {
_vs: ValueStore;
_rt: RootTracker;
_datasets: Promise<Map<string, Ref<Commit>>>;
_datasets: Promise<Map<string, Ref<Commit<any>>>>;
constructor(bs: BatchStore, cacheSize: number = 0) {
this._vs = new ValueStore(bs, cacheSize);
@@ -34,7 +34,7 @@ export default class Database {
return ds;
}
_datasetsFromRootRef(rootRef: Promise<Hash>): Promise<Map<string, Ref<Commit>>> {
_datasetsFromRootRef(rootRef: Promise<Hash>): Promise<Map<string, Ref<Commit<any>>>> {
return rootRef.then(rootRef => {
if (rootRef.isEmpty()) {
return Promise.resolve(new Map());
@@ -45,16 +45,16 @@ export default class Database {
}
// TODO: This should return Promise<Ref<Commit> | null>.
headRef(datasetID: string): Promise<?Ref<Commit>> {
headRef(datasetID: string): Promise<?Ref<Commit<any>>> {
return this._datasets.then(datasets => datasets.get(datasetID));
}
// TODO: This should return Promise<Commit | null>
head(datasetID: string): Promise<?Commit> {
head(datasetID: string): Promise<?Commit<any>> {
return this.headRef(datasetID).then(hr => hr ? this.readValue(hr.targetHash) : null);
}
datasets(): Promise<Map<string, Ref<Commit>>> {
datasets(): Promise<Map<string, Ref<Commit<any>>>> {
return this._datasets;
}
@@ -67,7 +67,7 @@ export default class Database {
return this._vs.writeValue(v);
}
async _descendsFrom(commit: Commit, currentHeadRef: Ref<Commit>): Promise<boolean> {
async _descendsFrom(commit: Commit<any>, currentHeadRef: Ref<Commit<any>>): Promise<boolean> {
let ancestors = commit.parents;
while (!(await ancestors.has(currentHeadRef))) {
if (ancestors.isEmpty()) {
@@ -78,10 +78,10 @@ export default class Database {
return true;
}
async commit(datasetId: string, commit: Commit): Promise<Database> {
async commit(datasetId: string, commit: Commit<any>): Promise<Database> {
const currentRootRefP = this._rt.getRoot();
const datasetsP = this._datasetsFromRootRef(currentRootRefP);
let currentDatasets = await (datasetsP:Promise<Map>);
let currentDatasets = await (datasetsP:Promise<Map<any, any>>);
const currentRootRef = await currentRootRefP;
const commitRef = this.writeValue(commit);
@@ -111,8 +111,8 @@ export default class Database {
}
}
async function getAncestors(commits: Set<Ref<Commit>>, database: Database):
Promise<Set<Ref<Commit>>> {
async function getAncestors(commits: Set<Ref<Commit<any>>>, database: Database):
Promise<Set<Ref<Commit<any>>>> {
let ancestors = new Set();
await commits.map(async (commitRef) => {
const commit = await database.readValue(commitRef.targetHash);

View File

@@ -36,11 +36,11 @@ export default class Dataset {
return this._id;
}
headRef(): Promise<?Ref<Commit>> {
headRef(): Promise<?Ref<Commit<any>>> {
return this._database.headRef(this._id);
}
head(): Promise<?Commit> {
head(): Promise<?Commit<any>> {
return this._database.head(this._id);
}
@@ -51,7 +51,7 @@ export default class Dataset {
// Commit updates the commit that a dataset points at. If parents is provided then an the promise
// is rejected if the commit does not descend from the parents.
async commit(v: Value,
parents: ?Array<Ref<Commit>> = undefined): Promise<Dataset> {
parents: ?Array<Ref<Commit<any>>> = undefined): Promise<Dataset> {
if (!parents) {
const headRef = await this.headRef();
parents = headRef ? [headRef] : [];

View File

@@ -26,7 +26,7 @@ import {
} from './type.js';
suite('Encode human readable types', () => {
function assertWriteType(expected: string, t: Type) {
function assertWriteType(expected: string, t: Type<any>) {
let actual = '';
const w = {
write(s: string) {

View File

@@ -66,11 +66,11 @@ export class TypeWriter {
this._w = new Writer(w);
}
writeType(t: Type) {
writeType(t: Type<any>) {
this._writeType(t, []);
}
_writeType(t: Type, parentStructTypes: Type[]) {
_writeType(t: Type<any>, parentStructTypes: Type<any>[]) {
switch (t.kind) {
case Kind.Blob:
case Kind.Bool:
@@ -121,7 +121,7 @@ export class TypeWriter {
this._w.write(`Cycle<${i}>`);
}
_writeStructType(t: Type, parentStructTypes: Type[]) {
_writeStructType(t: Type<any>, parentStructTypes: Type<any>[]) {
const idx = parentStructTypes.indexOf(t);
if (idx !== -1) {
this._writeCycle(parentStructTypes.length - idx - 1);
@@ -138,7 +138,7 @@ export class TypeWriter {
this._w.indent();
let first = true;
desc.forEachField((name: string, type: Type) => {
desc.forEachField((name: string, type: Type<any>) => {
if (first) {
this._w.newLine();
first = false;
@@ -156,7 +156,7 @@ export class TypeWriter {
}
}
export function describeType(t: Type): string {
export function describeType(t: Type<any>): string {
let s = '';
const w = new TypeWriter({
write(s2: string) {

View File

@@ -20,12 +20,11 @@ import ValueDecoder from './value-decoder.js';
import ValueEncoder from './value-encoder.js';
import type Value from './value.js';
import {ValueBase} from './value.js';
import type {NomsKind} from './noms-kind.js';
import {Kind} from './noms-kind.js';
import {TestDatabase} from './test-util.js';
import {encodeValue, decodeValue} from './codec.js';
import {equals} from './compare.js';
import {invariant} from './assert.js';
import {invariant, notNull} from './assert.js';
import {newStruct, newStructWithType} from './struct.js';
import {
OrderedKey,
@@ -47,7 +46,9 @@ import {
stringType,
typeType,
} from './type.js';
import type {Type} from './type.js';
import {staticTypeCache} from './type-cache.js';
import type TypeCache from './type-cache.js';
function assertRoundTrips(v: Value) {
const db = new TestDatabase();
@@ -61,7 +62,7 @@ class Bogus extends ValueBase {
super();
}
get type(): Type {
get type(): Type<any> {
return makeCycleType(0);
}
}
@@ -126,19 +127,19 @@ suite('Encoding - roundtrip', () => {
});
suite('Encoding', () => {
function uint8(v: NomsKind): NomsKind {
function uint8(v) {
return {type: 'uint8', value: v};
}
function uint32(v: NomsKind): NomsKind {
function uint32(v) {
return {type: 'uint32', value: v};
}
function uint64(v: NomsKind): NomsKind {
function uint64(v) {
return {type: 'uint64', value: v};
}
function float64(v: NomsKind): NomsKind {
function float64(v) {
return {type: 'float64', value: v};
}
@@ -222,7 +223,7 @@ suite('Encoding', () => {
}
readHash(): Hash {
return Hash.parse(this.readString());
return notNull(Hash.parse(this.readString()));
}
}
@@ -270,7 +271,7 @@ suite('Encoding', () => {
this.writeString(h.toString());
}
appendType(t: Type): void {
appendType(t: Type<any>): void {
const enc = new ValueEncoder(this, null);
enc.writeType(t, []);
}
@@ -300,7 +301,7 @@ suite('Encoding', () => {
assert.deepEqual(encoding, w.toArray());
const r = new TestReader(encoding);
const dec = new ValueDecoder(r, null, staticTypeCache);
const dec = new ValueDecoder(r, new TestDatabase(), staticTypeCache);
const v2 = dec.readValue();
assert.isTrue(equals(v, v2));
}
@@ -385,6 +386,8 @@ suite('Encoding', () => {
const r2 = Hash.parse('00000000000000000000000000000002');
const r3 = Hash.parse('00000000000000000000000000000003');
invariant(r1 && r2 && r3);
assertEncoding(
[
uint8(BlobKind), true,
@@ -424,7 +427,7 @@ suite('Encoding', () => {
Bytes.copy(data, buff);
buff[data.byteLength] = 5; // Add a bogus extra byte
const c2 = new Chunk(buff);
assert.throws(() => decodeValue(c2, null));
assert.throws(() => decodeValue(c2, new TestDatabase()));
});
test('struct with list', () => {
@@ -545,6 +548,7 @@ suite('Encoding', () => {
test('ref', () => {
const type = makeRefType(numberType);
const r = Hash.parse('0123456789abcdefghijklmnopqrstuv');
invariant(r);
assertEncoding([
uint8(RefKind), uint8(NumberKind), r.toString(), uint64(4),

View File

@@ -142,7 +142,7 @@ export class Delegate {
async updateRoot(current: Hash, last: Hash): Promise<boolean> {
const ch = this._rpc.root.indexOf('?') >= 0 ? '&' : '?';
const params = `${ch}current=${current}&last=${last}`;
const params = `${ch}current=${current.toString()}&last=${last.toString()}`;
try {
const {headers} = await fetchText(this._rpc.root + params, {method: 'POST'});
const versionErr = checkVersion(headers);
@@ -163,7 +163,7 @@ function checkVersion(headers: Map<string, string>): ?Error {
const version = headers.get(versionHeader);
if (version !== nomsVersion) {
return new Error(
`SDK version ${nomsVersion} is not compatible with data of version ${version}.`);
`SDK version ${nomsVersion} is not compatible with data of version ${String(version)}.`);
}
return null;
}

View File

@@ -11,8 +11,8 @@ import {IndexedMetaSequence} from './meta-sequence.js';
import {invariant} from './assert.js';
import type {IndexedSequence} from './indexed-sequence.js';
export function diff(last: IndexedSequence, lastHeight: number, lastOffset: number,
current: IndexedSequence, currentHeight: number, currentOffset: number,
export function diff(last: IndexedSequence<any>, lastHeight: number, lastOffset: number,
current: IndexedSequence<any>, currentHeight: number, currentOffset: number,
maxSpliceMatrixSize: number): Promise<Array<Splice>> {
if (lastHeight > currentHeight) {

View File

@@ -17,15 +17,15 @@ export class IndexedSequence<T> extends Sequence<T> {
throw new Error('override');
}
getCompareFn(other: IndexedSequence): EqualsFn {
getCompareFn(other: IndexedSequence<any>): EqualsFn {
return (idx: number, otherIdx: number) =>
// $FlowIssue
equals(this.items[idx], other.items[otherIdx]);
}
async newCursorAt(idx: number): Promise<IndexedSequenceCursor> {
let cursor: ?IndexedSequenceCursor = null;
let sequence: ?IndexedSequence = this;
async newCursorAt(idx: number): Promise<IndexedSequenceCursor<any>> {
let cursor: ?IndexedSequenceCursor<any> = null;
let sequence: ?IndexedSequence<any> = this;
while (sequence) {
cursor = new IndexedSequenceCursor(cursor, sequence, 0);
@@ -41,7 +41,7 @@ export class IndexedSequence<T> extends Sequence<T> {
}
}
export class IndexedSequenceCursor<T> extends SequenceCursor<T, IndexedSequence> {
export class IndexedSequenceCursor<T> extends SequenceCursor<T, IndexedSequence<any>> {
advanceToOffset(idx: number): number {
this.idx = search(this.length, (i: number) => idx < this.sequence.cumulativeNumberOfLeaves(i));

View File

@@ -40,14 +40,14 @@ import {IndexedMetaSequence} from './meta-sequence.js';
const testListSize = 5000;
const listOfNRef = 'tqpbqlu036sosdq9kg3lka7sjaklgslg';
async function assertToJS(list: List, nums: Array<any>, start: number = 0,
async function assertToJS(list: List<any>, nums: Array<any>, start: number = 0,
end: number = nums.length): Promise<void> {
const jsArray = await list.toJS(start, end);
const expect = nums.slice(start, end);
assert.deepEqual(expect, jsArray);
}
async function validateList(l: List, values: number[]): Promise<void> {
async function validateList(l: List<any>, values: number[]): Promise<void> {
assert.isTrue(equals(new List(values), l));
const out = [];
await l.forEach(v => void(out.push(v)));
@@ -59,7 +59,7 @@ async function validateList(l: List, values: number[]): Promise<void> {
suite('List', () => {
function testPrependChunkDiff(nums: Array<any>, list: List, expectCount: number) {
function testPrependChunkDiff(nums: Array<any>, list: List<any>, expectCount: number) {
const nn = new Array(nums.length + 1);
nn[0] = 0;
for (let i = 0; i < nums.length; i++) {
@@ -70,7 +70,7 @@ suite('List', () => {
assert.strictEqual(expectCount, chunkDiffCount(list, v2));
}
function testAppendChunkDiff(nums: Array<any>, list: List, expectCount: number) {
function testAppendChunkDiff(nums: Array<any>, list: List<any>, expectCount: number) {
const nn = new Array(nums.length + 1);
nn[0] = 0;
for (let i = 0; i < nums.length; i++) {
@@ -82,7 +82,7 @@ suite('List', () => {
assert.strictEqual(expectCount, chunkDiffCount(list, v2));
}
async function testToJS(expect: Array<any>, list: List): Promise<void> {
async function testToJS(expect: Array<any>, list: List<any>): Promise<void> {
const length = expect.length;
let start = 0;
@@ -93,7 +93,7 @@ suite('List', () => {
}
}
async function testGet(nums: Array<any>, list: List): Promise<void> {
async function testGet(nums: Array<any>, list: List<any>): Promise<void> {
const incr = Math.round(nums.length / 256); // test 256 indices
for (let i = 0; i < nums.length; i += incr) {
@@ -101,7 +101,7 @@ suite('List', () => {
}
}
async function testForEach(nums: Array<any>, list: List): Promise<void> {
async function testForEach(nums: Array<any>, list: List<any>): Promise<void> {
const out = [];
await list.forEach(v => {
out.push(v);
@@ -110,7 +110,7 @@ suite('List', () => {
assert.deepEqual(nums, out);
}
async function testForEachAsyncCB(nums: Array<any>, list: List): Promise<void> {
async function testForEachAsyncCB(nums: Array<any>, list: List<any>): Promise<void> {
let resolver = null;
const p = new Promise(resolve => resolver = resolve);
@@ -307,7 +307,7 @@ suite('CompoundList', () => {
await db.close();
});
function build(): List {
function build(): List<any> {
const l1 = new List(['a', 'b']);
const r1 = db.writeValue(l1);
const l2 = new List(['e', 'f']);
@@ -372,7 +372,7 @@ suite('CompoundList', () => {
});
test('Remove last when not loaded', async () => {
const reload = async (l: List): Promise<List> => {
const reload = async (l: List<any>): Promise<List<any>> => {
const l2 = await db.readValue(db.writeValue(l).targetHash);
invariant(l2 instanceof List);
return l2;

View File

@@ -26,7 +26,7 @@ import {Kind} from './noms-kind.js';
import {DEFAULT_MAX_SPLICE_MATRIX_SIZE} from './edit-distance.js';
import {hashValueBytes} from './rolling-value-hasher.js';
function newListLeafChunkFn<T: Value>(vr: ?ValueReader): makeChunkFn {
function newListLeafChunkFn<T: Value>(vr: ?ValueReader): makeChunkFn<any, any> {
return (items: Array<T>) => {
const seq = newListLeafSequence(vr, items);
const list = List.fromSequence(seq);
@@ -35,7 +35,7 @@ function newListLeafChunkFn<T: Value>(vr: ?ValueReader): makeChunkFn {
};
}
export default class List<T: Value> extends Collection<IndexedSequence> {
export default class List<T: Value> extends Collection<IndexedSequence<any>> {
constructor(values: Array<T> = []) {
const seq = chunkSequenceSync(
values,
@@ -124,7 +124,7 @@ export default class List<T: Value> extends Collection<IndexedSequence> {
}
export class ListLeafSequence<T: Value> extends IndexedSequence<T> {
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return getValueChunks(this.items);
}
@@ -178,7 +178,7 @@ export class ListWriter<T: Value> {
this._state = 'closed';
}
get list(): Promise<List> {
get list(): Promise<List<any>> {
assert(this._state === 'closed');
invariant(this._list);
return this._list;

View File

@@ -53,7 +53,7 @@ function intKVs(count: number): [number, number][] {
return kvs;
}
async function validateMap(m: Map, kvs: [[number, number]]): Promise<void> {
async function validateMap(m: Map<any, any>, kvs: [[number, number]]): Promise<void> {
assert.isTrue(equals(new Map(kvs), m));
const out = [];
@@ -451,7 +451,7 @@ suite('CompoundMap', () => {
await db.close();
});
function build(vwr: ValueReadWriter): Array<Map> {
function build(vwr: ValueReadWriter): Array<Map<any, any>> {
const l1 = new Map([['a', false], ['b', false]]);
const r1 = vwr.writeValue(l1);
const l2 = new Map([['e', true], ['f', true]]);
@@ -790,7 +790,7 @@ suite('CompoundMap', () => {
});
test('Remove last when not loaded', async () => {
const reload = async (m: Map): Promise<Map> => {
const reload = async (m: Map<any, any>): Promise<Map<any, any>> => {
const m2 = await db.readValue(db.writeValue(m).targetHash);
invariant(m2 instanceof Map);
return m2;

View File

@@ -35,7 +35,7 @@ const KEY = 0;
const VALUE = 1;
function newMapLeafChunkFn<K: Value, V: Value>(vr: ?ValueReader):
makeChunkFn {
makeChunkFn<any, any> {
return (items: Array<MapEntry<K, V>>) => {
const key = new OrderedKey(items.length > 0 ? items[items.length - 1][KEY] : false);
const seq = newMapLeafSequence(vr, items);
@@ -44,7 +44,7 @@ function newMapLeafChunkFn<K: Value, V: Value>(vr: ?ValueReader):
};
}
function mapHashValueBytes(entry: MapEntry, rv: RollingValueHasher) {
function mapHashValueBytes(entry: MapEntry<any, any>, rv: RollingValueHasher) {
hashValueBytes(entry[KEY], rv);
hashValueBytes(entry[VALUE], rv);
}
@@ -79,7 +79,7 @@ function buildMapData<K: Value, V: Value>(
}
export default class Map<K: Value, V: Value> extends
Collection<OrderedSequence> {
Collection<OrderedSequence<any, any>> {
constructor(kvs: Array<MapEntry<K, V>> = []) {
const seq = chunkSequenceSync(
buildMapData(kvs),
@@ -139,7 +139,7 @@ export default class Map<K: Value, V: Value> extends
return new OrderedSequenceIterator(this.sequence.newCursorAtValue(k));
}
_splice(cursor: OrderedSequenceCursor, insert: Array<MapEntry<K, V>>, remove: number):
_splice(cursor: OrderedSequenceCursor<any, any>, insert: Array<MapEntry<K, V>>, remove: number):
Promise<Map<K, V>> {
const vr = this.sequence.vr;
return chunkSequence(cursor, vr, insert, remove, newMapLeafChunkFn(vr),
@@ -185,17 +185,17 @@ export default class Map<K: Value, V: Value> extends
export class MapLeafSequence<K: Value, V: Value> extends
OrderedSequence<K, MapEntry<K, V>> {
getKey(idx: number): OrderedKey {
getKey(idx: number): OrderedKey<any> {
return new OrderedKey(this.items[idx][KEY]);
}
getCompareFn(other: OrderedSequence): EqualsFn {
getCompareFn(other: OrderedSequence<any, any>): EqualsFn {
return (idx: number, otherIdx: number) =>
equals(this.items[idx][KEY], other.items[otherIdx][KEY]) &&
equals(this.items[idx][VALUE], other.items[otherIdx][VALUE]);
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
const chunks = [];
for (const entry of this.items) {
if (entry[KEY] instanceof ValueBase) {

View File

@@ -37,36 +37,36 @@ import type {EqualsFn} from './edit-distance.js';
import {hashValueBytes} from './rolling-value-hasher.js';
import RollingValueHasher from './rolling-value-hasher.js';
export type MetaSequence = Sequence<MetaTuple>;
export type MetaSequence<T: Value> = Sequence<MetaTuple<T>>;
export class MetaTuple<T: Value> {
ref: Ref;
ref: Ref<any>;
key: OrderedKey<T>;
numLeaves: number;
child: ?Collection;
child: ?Collection<any>;
constructor(ref: Ref, key: OrderedKey<T>, numLeaves: number, child: ?Collection) {
constructor(ref: Ref<any>, key: OrderedKey<T>, numLeaves: number, child: ?Collection<any>) {
this.ref = ref;
this.key = key;
this.numLeaves = numLeaves;
this.child = child;
}
getChildSequence(vr: ?ValueReader): Promise<Sequence> {
getChildSequence(vr: ?ValueReader): Promise<Sequence<any>> {
return this.child ?
Promise.resolve(this.child.sequence) :
notNull(vr).readValue(this.ref.targetHash).then((c: Collection) => {
invariant(c, () => `Could not read sequence ${this.ref.targetHash}`);
notNull(vr).readValue(this.ref.targetHash).then((c: Collection<any>) => {
invariant(c, () => `Could not read sequence ${this.ref.targetHash.toString()}`);
return c.sequence;
});
}
getChildSequenceSync(): Sequence {
getChildSequenceSync(): Sequence<any> {
return notNull(this.child).sequence;
}
}
export function metaHashValueBytes(tuple: MetaTuple, rv: RollingValueHasher) {
export function metaHashValueBytes(tuple: MetaTuple<any>, rv: RollingValueHasher) {
let val = tuple.key.v;
if (!tuple.key.isOrderedByValue) {
// See https://github.com/attic-labs/noms/issues/1688#issuecomment-227528987
@@ -95,7 +95,7 @@ export class OrderedKey<T: Value> {
}
}
static fromHash(h: Hash): OrderedKey {
static fromHash(h: Hash): OrderedKey<any> {
const k = Object.create(this.prototype);
k.isOrderedByValue = false;
k.v = null;
@@ -112,7 +112,7 @@ export class OrderedKey<T: Value> {
return this.v;
}
compare(other: OrderedKey): number {
compare(other: OrderedKey<any>): number {
if (this.isOrderedByValue && other.isOrderedByValue) {
return compare(notNull(this.v), notNull(other.v));
}
@@ -127,25 +127,25 @@ export class OrderedKey<T: Value> {
}
// The elemTypes of the collection inside the Ref<Collection<?, ?>>
function getCollectionTypes(tuple: MetaTuple): Type[] {
function getCollectionTypes(tuple: MetaTuple<any>): Type<any>[] {
return tuple.ref.type.desc.elemTypes[0].desc.elemTypes;
}
export function newListMetaSequence(vr: ?ValueReader, items: Array<MetaTuple>):
export function newListMetaSequence(vr: ?ValueReader, items: Array<MetaTuple<any>>):
IndexedMetaSequence {
const t = makeListType(makeUnionType(items.map(tuple => getCollectionTypes(tuple)[0])));
return new IndexedMetaSequence(vr, t, items);
}
export function newBlobMetaSequence(vr: ?ValueReader, items: Array<MetaTuple>):
export function newBlobMetaSequence(vr: ?ValueReader, items: Array<MetaTuple<any>>):
IndexedMetaSequence {
return new IndexedMetaSequence(vr, blobType, items);
}
export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
export class IndexedMetaSequence extends IndexedSequence<MetaTuple<any>> {
_offsets: Array<number>;
constructor(vr: ?ValueReader, t: Type, items: Array<MetaTuple>) {
constructor(vr: ?ValueReader, t: Type<any>, items: Array<MetaTuple<any>>) {
super(vr, t, items);
let cum = 0;
this._offsets = this.items.map(i => {
@@ -162,7 +162,7 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
return this._offsets[this._offsets.length - 1];
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return getMetaSequenceChunks(this);
}
@@ -192,7 +192,7 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
});
}
getChildSequence(idx: number): Promise<?Sequence> {
getChildSequence(idx: number): Promise<?Sequence<any>> {
if (!this.isMeta) {
return Promise.resolve(null);
}
@@ -201,7 +201,7 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
return mt.getChildSequence(this.vr);
}
getChildSequenceSync(idx: number): ?Sequence {
getChildSequenceSync(idx: number): ?Sequence<any> {
if (!this.isMeta) {
return null;
}
@@ -212,7 +212,7 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
// Returns the sequences pointed to by all items[i], s.t. start <= i < end, and returns the
// concatentation as one long composite sequence
getCompositeChildSequence(start: number, length: number): Promise<IndexedSequence> {
getCompositeChildSequence(start: number, length: number): Promise<IndexedSequence<any>> {
if (length === 0) {
return Promise.resolve(new EmptySequence());
}
@@ -238,14 +238,14 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple> {
return this._offsets[idx];
}
getCompareFn(other: IndexedSequence): EqualsFn {
getCompareFn(other: IndexedSequence<any>): EqualsFn {
return (idx: number, otherIdx: number) =>
this.items[idx].ref.targetHash.equals(other.items[otherIdx].ref.targetHash);
}
}
export function newMapMetaSequence<K: Value>(vr: ?ValueReader,
tuples: Array<MetaTuple>): OrderedMetaSequence<K> {
tuples: Array<MetaTuple<any>>): OrderedMetaSequence<K> {
const kt = makeUnionType(tuples.map(mt => getCollectionTypes(mt)[0]));
const vt = makeUnionType(tuples.map(mt => getCollectionTypes(mt)[1]));
const t = makeMapType(kt, vt);
@@ -253,15 +253,15 @@ export function newMapMetaSequence<K: Value>(vr: ?ValueReader,
}
export function newSetMetaSequence<K: Value>(vr: ?ValueReader,
tuples: Array<MetaTuple>): OrderedMetaSequence<K> {
tuples: Array<MetaTuple<any>>): OrderedMetaSequence<K> {
const t = makeSetType(makeUnionType(tuples.map(mt => getCollectionTypes(mt)[0])));
return new OrderedMetaSequence(vr, t, tuples);
}
export class OrderedMetaSequence<K: Value> extends OrderedSequence<K, MetaTuple> {
export class OrderedMetaSequence<K: Value> extends OrderedSequence<K, MetaTuple<any>> {
_numLeaves: number;
constructor(vr: ?ValueReader, t: Type, items: Array<MetaTuple>) {
constructor(vr: ?ValueReader, t: Type<any>, items: Array<MetaTuple<any>>) {
super(vr, t, items);
this._numLeaves = items.reduce((l, mt) => l + mt.numLeaves, 0);
}
@@ -274,11 +274,11 @@ export class OrderedMetaSequence<K: Value> extends OrderedSequence<K, MetaTuple>
return this._numLeaves;
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return getMetaSequenceChunks(this);
}
getChildSequence(idx: number): Promise<?Sequence> {
getChildSequence(idx: number): Promise<?Sequence<any>> {
if (!this.isMeta) {
return Promise.resolve(null);
}
@@ -287,7 +287,7 @@ export class OrderedMetaSequence<K: Value> extends OrderedSequence<K, MetaTuple>
return mt.getChildSequence(this.vr);
}
getChildSequenceSync(idx: number): ?Sequence {
getChildSequenceSync(idx: number): ?Sequence<any> {
if (!this.isMeta) {
return null;
}
@@ -296,22 +296,23 @@ export class OrderedMetaSequence<K: Value> extends OrderedSequence<K, MetaTuple>
return mt.getChildSequenceSync();
}
getKey(idx: number): OrderedKey {
getKey(idx: number): OrderedKey<any> {
return this.items[idx].key;
}
getCompareFn(other: OrderedSequence): EqualsFn {
getCompareFn(other: OrderedSequence<any, any>): EqualsFn {
return (idx: number, otherIdx: number) =>
this.items[idx].ref.targetHash.equals(other.items[otherIdx].ref.targetHash);
}
}
export function newOrderedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader): makeChunkFn {
return (tuples: Array<MetaTuple>) => {
export function newOrderedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader):
makeChunkFn<any, any> {
return (tuples: Array<MetaTuple<any>>) => {
const numLeaves = tuples.reduce((l, mt) => l + mt.numLeaves, 0);
const last = tuples[tuples.length - 1];
let seq: OrderedMetaSequence;
let col: Collection;
let seq: OrderedMetaSequence<any>;
let col: Collection<any>;
if (kind === Kind.Map) {
seq = newMapMetaSequence(vr, tuples);
col = Map.fromSequence(seq);
@@ -324,15 +325,16 @@ export function newOrderedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader):
};
}
export function newIndexedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader): makeChunkFn {
return (tuples: Array<MetaTuple>) => {
export function newIndexedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader):
makeChunkFn<any, any> {
return (tuples: Array<MetaTuple<any>>) => {
const sum = tuples.reduce((l, mt) => {
const nv = mt.key.numberValue();
invariant(nv === mt.numLeaves);
return l + nv;
}, 0);
let seq: IndexedMetaSequence;
let col: Collection;
let col: Collection<any>;
if (kind === Kind.List) {
seq = newListMetaSequence(vr, tuples);
col = List.fromSequence(seq);
@@ -346,7 +348,7 @@ export function newIndexedMetaSequenceChunkFn(kind: NomsKind, vr: ?ValueReader):
};
}
function getMetaSequenceChunks(ms: MetaSequence): Array<Ref> {
function getMetaSequenceChunks(ms: MetaSequence<any>): Array<Ref<any>> {
return ms.items.map(mt => mt.ref);
}

View File

@@ -57,16 +57,16 @@ export default async function diff<K: Value, T>(
/**
* Advances |a| and |b| past their common sequence of equal values.
*/
export function fastForward(a: OrderedSequenceCursor, b: OrderedSequenceCursor): Promise<void> {
export function fastForward(a: OrderedSequenceCursor<any, any>, b: OrderedSequenceCursor<any, any>):
Promise<void> {
return a.valid && b.valid ? doFastForward(true, a, b).then() : Promise.resolve();
}
/*
* Returns an array matching |a| and |b| respectively to whether that cursor has more values.
*/
async function doFastForward(allowPastEnd: boolean,
a: OrderedSequenceCursor, b: OrderedSequenceCursor):
Promise<[boolean, boolean]> {
async function doFastForward(allowPastEnd: boolean, a: OrderedSequenceCursor<any, any>,
b: OrderedSequenceCursor<any, any>): Promise<[boolean, boolean]> {
invariant(a.valid && b.valid);
let aHasMore = true, bHasMore = true;
@@ -107,6 +107,6 @@ async function doFastForward(allowPastEnd: boolean,
return [aHasMore, bHasMore];
}
function isCurrentEqual(a: SequenceCursor, b: SequenceCursor): boolean {
function isCurrentEqual(a: SequenceCursor<any, any>, b: SequenceCursor<any, any>): boolean {
return a.sequence.getCompareFn(b.sequence)(a.idx, b.idx);
}

View File

@@ -16,7 +16,7 @@ import Sequence, {SequenceCursor} from './sequence.js';
export class OrderedSequence<K: Value, T> extends Sequence<T> {
// See newCursorAt().
newCursorAtValue(val: ?K, forInsertion: boolean = false, last: boolean = false):
Promise<OrderedSequenceCursor> {
Promise<OrderedSequenceCursor<any, any>> {
let key;
if (val !== null && val !== undefined) {
key = new OrderedKey(val);
@@ -30,10 +30,10 @@ export class OrderedSequence<K: Value, T> extends Sequence<T> {
// -cursor positioned at
// -first value, if |key| is null
// -first value >= |key|
async newCursorAt(key: ?OrderedKey, forInsertion: boolean = false, last: boolean = false):
Promise<OrderedSequenceCursor> {
let cursor: ?OrderedSequenceCursor = null;
let sequence: ?OrderedSequence = this;
async newCursorAt(key: ?OrderedKey<any>, forInsertion: boolean = false, last: boolean = false):
Promise<OrderedSequenceCursor<any, any>> {
let cursor: ?OrderedSequenceCursor<any, any> = null;
let sequence: ?OrderedSequence<any, any> = this;
while (sequence) {
cursor = new OrderedSequenceCursor(cursor, sequence, last ? -1 : 0);
@@ -53,18 +53,18 @@ export class OrderedSequence<K: Value, T> extends Sequence<T> {
/**
* Gets the key used for ordering the sequence at index |idx|.
*/
getKey(idx: number): OrderedKey { // eslint-disable-line no-unused-vars
getKey(idx: number): OrderedKey<any> { // eslint-disable-line no-unused-vars
throw new Error('override');
}
getCompareFn(other: OrderedSequence): EqualsFn { // eslint-disable-line no-unused-vars
getCompareFn(other: OrderedSequence<any, any>): EqualsFn { // eslint-disable-line no-unused-vars
throw new Error('override');
}
}
export class OrderedSequenceCursor<T, K: Value> extends
SequenceCursor<T, OrderedSequence> {
getCurrentKey(): OrderedKey {
SequenceCursor<T, OrderedSequence<any, any>> {
getCurrentKey(): OrderedKey<any> {
invariant(this.idx >= 0 && this.idx < this.length);
return this.sequence.getKey(this.idx);
}
@@ -75,7 +75,7 @@ export class OrderedSequenceCursor<T, K: Value> extends
// Moves the cursor to the first value in sequence >= key and returns true.
// If none exists, returns false.
_seekTo(key: OrderedKey, lastPositionIfNotfound: boolean = false): boolean {
_seekTo(key: OrderedKey<any>, lastPositionIfNotfound: boolean = false): boolean {
// Find smallest idx where key(idx) >= key
this.idx = search(this.length, i => this.sequence.getKey(i).compare(key) >= 0);

View File

@@ -19,7 +19,7 @@ import type Value from './value.js';
import {newStruct} from './struct.js';
function hashIdx(v: Value): string {
return `[#${getHash(v)}]`;
return `[#${getHash(v).toString()}]`;
}
async function assertResolvesTo(expect: Value | null, ref: Value, str: string) {

View File

@@ -342,7 +342,7 @@ export class HashIndexPath {
}
async resolve(value: Value): Promise<Value | null> {
let seq: OrderedSequence;
let seq: OrderedSequence<any, any>;
let getCurrentValue; // (cur: sequenceCursor): Value
if (value instanceof Set) {

View File

@@ -15,7 +15,7 @@ import {invariant} from './assert.js';
import {getTypeOfValue, makeRefType} from './type.js';
import {ValueBase, getChunksOfValue} from './value.js';
export function constructRef(t: Type, targetHash: Hash, height: number): Ref {
export function constructRef(t: Type<any>, targetHash: Hash, height: number): Ref<any> {
invariant(t.kind === Kind.Ref, () => `Not a Ref type: ${describeType(t)}`);
invariant(!targetHash.isEmpty());
const rv = Object.create(Ref.prototype);
@@ -30,7 +30,7 @@ export function maxChunkHeight(v: Value): number {
}
export default class Ref<T: Value> extends ValueBase {
_type: Type;
_type: Type<any>;
// Hash of the value this points to.
targetHash: Hash;
// The length of the longest path of Refs to find any leaf in the graph.
@@ -44,7 +44,7 @@ export default class Ref<T: Value> extends ValueBase {
this.targetHash = getHashOfValue(val);
}
get type(): Type {
get type(): Type<any> {
return this._type;
}
@@ -52,7 +52,7 @@ export default class Ref<T: Value> extends ValueBase {
return vr.readValue(this.targetHash);
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return [this];
}
}

View File

@@ -158,7 +158,7 @@ export default class RollingValueHasher {
}
}
appendType(t: Type): void { // eslint-disable-line no-unused-vars
appendType(t: Type<any>): void { // eslint-disable-line no-unused-vars
// Type bytes aren't included in the byte stream we chunk over
}
}

View File

@@ -15,17 +15,18 @@ import RollingValueHasher from './rolling-value-hasher.js';
import Ref from './ref.js';
export type makeChunkFn<T, S: Sequence> = (items: Array<T>) => [Collection<S>, OrderedKey, number];
export type makeChunkFn<T, S: Sequence<any>> = (items: Array<T>) =>
[Collection<S>, OrderedKey<any>, number];
export type hashValueBytesFn<T> = (item: T, rv: RollingValueHasher) => void;
export async function chunkSequence<T, S: Sequence<T>>(
cursor: SequenceCursor,
cursor: SequenceCursor<any, any>,
vr: ?ValueReader,
insert: Array<T>,
remove: number,
makeChunk: makeChunkFn<T, S>,
parentMakeChunk: makeChunkFn<MetaTuple, MetaSequence>,
hashValueBytes: hashValueBytesFn): Promise<Sequence> {
parentMakeChunk: makeChunkFn<MetaTuple<any>, MetaSequence<any>>,
hashValueBytes: hashValueBytesFn<any>): Promise<Sequence<any>> {
const chunker = new SequenceChunker(cursor, vr, null, makeChunk, parentMakeChunk, hashValueBytes);
if (cursor) {
@@ -50,8 +51,8 @@ export async function chunkSequence<T, S: Sequence<T>>(
export function chunkSequenceSync<T, S: Sequence<T>>(
insert: Array<T>,
makeChunk: makeChunkFn<T, S>,
parentMakeChunk: makeChunkFn<MetaTuple, MetaSequence>,
hashValueBytes: hashValueBytesFn): Sequence {
parentMakeChunk: makeChunkFn<MetaTuple<any>, MetaSequence<any>>,
hashValueBytes: hashValueBytesFn<any>): Sequence<any> {
const chunker = new SequenceChunker(null, null, null, makeChunk, parentMakeChunk, hashValueBytes);
@@ -64,17 +65,18 @@ export default class SequenceChunker<T, S: Sequence<T>> {
_cursor: ?SequenceCursor<T, S>;
_vr: ?ValueReader;
_vw: ?ValueWriter;
_parent: ?SequenceChunker<MetaTuple, MetaSequence>;
_parent: ?SequenceChunker<MetaTuple<any>, MetaSequence<any>>;
_current: Array<T>;
_makeChunk: makeChunkFn<T, S>;
_parentMakeChunk: makeChunkFn<MetaTuple, MetaSequence>;
_parentMakeChunk: makeChunkFn<MetaTuple<any>, MetaSequence<any>>;
_isLeaf: boolean;
_hashValueBytes: hashValueBytesFn;
_hashValueBytes: hashValueBytesFn<any>;
_rv: RollingValueHasher;
_done: boolean;
constructor(cursor: ?SequenceCursor, vr: ?ValueReader, vw: ?ValueWriter, makeChunk: makeChunkFn,
parentMakeChunk: makeChunkFn, hashValueBytes: hashValueBytesFn) {
constructor(cursor: ?SequenceCursor<any, any>, vr: ?ValueReader, vw: ?ValueWriter,
makeChunk: makeChunkFn<any, any>, parentMakeChunk: makeChunkFn<any, any>,
hashValueBytes: hashValueBytesFn<any>) {
this._cursor = cursor;
this._vr = vr;
this._vw = vw;
@@ -204,11 +206,11 @@ export default class SequenceChunker<T, S: Sequence<T>> {
this._parent._isLeaf = false;
}
createSequence(): [Sequence, MetaTuple] {
createSequence(): [Sequence<any>, MetaTuple<any>] {
// If the sequence chunker has a ValueWriter, eagerly write sequences.
let [col, key, numLeaves] = this._makeChunk(this._current); // eslint-disable-line prefer-const
const seq = col.sequence;
let ref: Ref;
let ref: Ref<any>;
if (this._vw) {
ref = this._vw.writeValue(col);
col = null;
@@ -246,7 +248,7 @@ export default class SequenceChunker<T, S: Sequence<T>> {
// Returns the root sequence of the resulting tree. The logic here is subtle, but hopefully
// correct and understandable. See comments inline.
async done(): Promise<Sequence> {
async done(): Promise<Sequence<any>> {
invariant(!this._done);
this._done = true;
@@ -307,7 +309,7 @@ export default class SequenceChunker<T, S: Sequence<T>> {
// Like |done|, but assumes there is no cursor, so it can be synchronous. Necessary for
// constructing collections without Promises or async/await. There is no equivalent in the Go
// code because Go is already synchronous.
doneSync(): Sequence {
doneSync(): Sequence<any> {
invariant(!this._vw);
invariant(!this._cursor);
invariant(!this._done);

View File

@@ -16,7 +16,7 @@ class TestSequence extends Sequence<any> {
}
getChildSequence(idx: number): // eslint-disable-line no-unused-vars
Promise<?Sequence> {
Promise<?Sequence<any>> {
return Promise.resolve(new TestSequence(this.items[idx]));
}
}

View File

@@ -14,16 +14,16 @@ import {ValueBase} from './value.js';
export default class Sequence<T> {
vr: ?ValueReader;
_type: Type;
_type: Type<any>;
_items: Array<T>;
constructor(vr: ?ValueReader, type: Type, items: Array<T>) {
constructor(vr: ?ValueReader, type: Type<any>, items: Array<T>) {
this.vr = vr;
this._type = type;
this._items = items;
}
get type(): Type {
get type(): Type<any> {
return this._type;
}
@@ -39,15 +39,15 @@ export default class Sequence<T> {
return this._items.length;
}
getChildSequence(idx: number): Promise<?Sequence> { // eslint-disable-line no-unused-vars
getChildSequence(idx: number): Promise<?Sequence<any>> { // eslint-disable-line no-unused-vars
return Promise.resolve(null);
}
getChildSequenceSync(idx: number): ?Sequence { // eslint-disable-line no-unused-vars
getChildSequenceSync(idx: number): ?Sequence<any> { // eslint-disable-line no-unused-vars
return null;
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return [];
}
@@ -56,12 +56,12 @@ export default class Sequence<T> {
}
}
export class SequenceCursor<T, S: Sequence> {
parent: ?SequenceCursor;
export class SequenceCursor<T, S: Sequence<any>> {
parent: ?SequenceCursor<any, any>;
sequence: S;
idx: number;
constructor(parent: ?SequenceCursor, sequence: S, idx: number) {
constructor(parent: ?SequenceCursor<any, any>, sequence: S, idx: number) {
this.parent = parent;
this.sequence = sequence;
this.idx = idx;
@@ -213,7 +213,7 @@ export class SequenceCursor<T, S: Sequence> {
}
}
export class SequenceIterator<T, S: Sequence> extends AsyncIterator<T> {
export class SequenceIterator<T, S: Sequence<any>> extends AsyncIterator<T> {
_cursor: SequenceCursor<T, S>;
_advance: Promise<boolean>;
_closed: boolean;
@@ -264,7 +264,7 @@ export class SequenceIterator<T, S: Sequence> extends AsyncIterator<T> {
}
}
export function getValueChunks<T>(items: Array<T>): Array<Ref> {
export function getValueChunks<T>(items: Array<T>): Array<Ref<any>> {
const chunks = [];
for (const item of items) {
if (item instanceof ValueBase) {

View File

@@ -42,7 +42,7 @@ const setOfNRef = 'hius38tca4nfd5lveqe3h905ass99uq2';
const smallRandomSetSize = 200;
const randomSetSize = 2000;
async function validateSet(s: Set, values: number[]): Promise<void> {
async function validateSet(s: Set<any>, values: number[]): Promise<void> {
assert.isTrue(equals(new Set(values), s));
const out = [];
@@ -366,7 +366,7 @@ suite('CompoundSet', () => {
await db.close();
});
function build(vwr: ValueReadWriter, values: Array<string>): Set {
function build(vwr: ValueReadWriter, values: Array<string>): Set<any> {
assert.isTrue(values.length > 1 && Math.log2(values.length) % 1 === 0);
let tuples = [];
@@ -376,7 +376,7 @@ suite('CompoundSet', () => {
tuples.push(new MetaTuple(r, new OrderedKey(values[i + 1]), 2, null));
}
let last: ?Set = null;
let last: ?Set<any> = null;
while (tuples.length > 1) {
const next = [];
for (let i = 0; i < tuples.length; i += 2) {
@@ -654,7 +654,7 @@ suite('CompoundSet', () => {
});
test('Remove last when not loaded', async () => {
const reload = async (s: Set): Promise<Set> => {
const reload = async (s: Set<any>): Promise<Set<any>> => {
const s2 = await db.readValue(db.writeValue(s).targetHash);
invariant(s2 instanceof Set);
return s2;

View File

@@ -27,7 +27,7 @@ import {Kind} from './noms-kind.js';
import type {EqualsFn} from './edit-distance.js';
import {hashValueBytes} from './rolling-value-hasher.js';
function newSetLeafChunkFn<T:Value>(vr: ?ValueReader): makeChunkFn {
function newSetLeafChunkFn<T:Value>(vr: ?ValueReader): makeChunkFn<any, any> {
return (items: Array<T>) => {
const key = new OrderedKey(items.length > 0 ? items[items.length - 1] : false);
const seq = newSetLeafSequence(vr, items);
@@ -43,12 +43,12 @@ function buildSetData<T: Value>(values: Array<any>): Array<T> {
}
export function newSetLeafSequence<K: Value>(
vr: ?ValueReader, items: K[]): SetLeafSequence {
vr: ?ValueReader, items: K[]): SetLeafSequence<any> {
const t = makeSetType(makeUnionType(items.map(getTypeOfValue)));
return new SetLeafSequence(vr, t, items);
}
export default class Set<T: Value> extends Collection<OrderedSequence> {
export default class Set<T: Value> extends Collection<OrderedSequence<any, any>> {
constructor(values: Array<T> = []) {
const seq = chunkSequenceSync(
buildSetData(values),
@@ -94,7 +94,7 @@ export default class Set<T: Value> extends Collection<OrderedSequence> {
return new OrderedSequenceIterator(this.sequence.newCursorAtValue(v));
}
_splice(cursor: OrderedSequenceCursor, insert: Array<T>, remove: number):
_splice(cursor: OrderedSequenceCursor<any, any>, insert: Array<T>, remove: number):
Promise<Set<T>> {
const vr = this.sequence.vr;
return chunkSequence(cursor, vr, insert, remove, newSetLeafChunkFn(vr),
@@ -148,16 +148,16 @@ export default class Set<T: Value> extends Collection<OrderedSequence> {
}
export class SetLeafSequence<K: Value> extends OrderedSequence<K, K> {
getKey(idx: number): OrderedKey {
getKey(idx: number): OrderedKey<any> {
return new OrderedKey(this.items[idx]);
}
getCompareFn(other: OrderedSequence): EqualsFn {
getCompareFn(other: OrderedSequence<any, any>): EqualsFn {
return (idx: number, otherIdx: number) =>
equals(this.items[idx], other.items[otherIdx]);
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return getValueChunks(this.items);
}
}

View File

@@ -75,14 +75,14 @@ suite('Specs', () => {
invariant(testHash);
const invalid = [
'mem', 'mem:', 'http', 'http:', 'http://foo', 'monkey', 'monkey:balls',
'mem:not-hash', 'mem:0000', `mem:::${testHash}`,
'mem:not-hash', 'mem:0000', `mem:::${testHash.toString()}`,
'http://foo:blah',
];
invalid.forEach(s => assert.isNull(HashSpec.parse(s)));
const valid = [
{spec: `mem::${testHash}`, protocol: 'mem', path: '', hash: testHash.toString()},
{spec: `http://someserver.com/some/path::${testHash}`,
{spec: `mem::${testHash.toString()}`, protocol: 'mem', path: '', hash: testHash.toString()},
{spec: `http://someserver.com/some/path::${testHash.toString()}`,
protocol: 'http', path: '//someserver.com/some/path', hash: testHash.toString()},
];
valid.forEach(tc => {
@@ -105,7 +105,7 @@ suite('Specs', () => {
const testHash = Hash.parse('00000000000000000000000000000000');
invariant(testHash);
spec = parseObjectSpec(`http://foo:8000/test::${testHash}`);
spec = parseObjectSpec(`http://foo:8000/test::${testHash.toString()}`);
invariant(spec);
assert.isNotNull(spec.value());
invariant(spec instanceof HashSpec);

View File

@@ -46,7 +46,7 @@ export const fieldNameRe = new RegExp(fieldNameComponentRe.source + '$');
* To reflect over structs you can create a new StructMirror.
*/
export default class Struct extends ValueBase {
_type: Type;
_type: Type<any>;
_values: Value[];
constructor(type: Type<StructDesc>, values: Value[]) {
@@ -55,11 +55,11 @@ export default class Struct extends ValueBase {
init(this, type, values);
}
get type(): Type {
get type(): Type<any> {
return this._type;
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
const mirror = new StructMirror(this);
const chunks = [];
@@ -76,9 +76,9 @@ export default class Struct extends ValueBase {
}
}
function validate(type: Type, values: Value[]): void {
function validate(type: Type<any>, values: Value[]): void {
let i = 0;
type.desc.forEachField((name: string, type: Type) => {
type.desc.forEachField((name: string, type: Type<any>) => {
const value = values[i];
assertSubtype(type, value);
i++;
@@ -88,9 +88,9 @@ function validate(type: Type, values: Value[]): void {
export class StructFieldMirror {
value: Value;
name: string;
type: Type;
type: Type<any>;
constructor(value: Value, name: string, type: Type) {
constructor(value: Value, name: string, type: Type<any>) {
this.value = value;
this.name = name;
this.type = type;
@@ -131,7 +131,7 @@ export class StructMirror<T: Struct> {
return findFieldIndex(name, this.desc.fields) !== -1;
}
set(name: string, value: ?Value): T {
set(name: string, value: Value): T {
const values = setValue(this._values, this.desc.fields, name, value);
return newStructWithType(this.type, values);
}
@@ -189,7 +189,7 @@ function getSetter(i: number) {
};
}
function setValue(values: Value[], fields: Field[], name: string, value: ?Value): Value[] {
function setValue(values: Value[], fields: Field[], name: string, value: Value): Value[] {
const i = findFieldIndex(name, fields);
invariant(i !== -1);
const newValues = values.concat(); // shallow clone
@@ -208,12 +208,12 @@ export function newStructWithType<T: Struct>(type: Type<StructDesc>, values: Val
return newStructWithValues(type, values);
}
function init<T: Struct>(s: T, type: Type, values: Value[]) {
function init<T: Struct>(s: T, type: Type<any>, values: Value[]) {
s._type = type;
s._values = values;
}
export function newStructWithValues<T: Struct>(type: Type, values: Value[]): T {
export function newStructWithValues<T: Struct>(type: Type<any>, values: Value[]): T {
const c = createStructClass(type);
const s = Object.create(c.prototype);
invariant(s instanceof c);

View File

@@ -54,12 +54,12 @@ export function assertValueHash(expectHashStr: string, v: Value) {
assert.strictEqual(expectHashStr, getHashOfValue(v).toString());
}
export function assertValueType(expectType: Type, v: Value) {
export function assertValueType(expectType: Type<any>, v: Value) {
assert.isTrue(equals(expectType, getTypeOfValue(v)));
}
export function assertChunkCountAndType(expectCount: number, expectType: Type,
v: Collection) {
export function assertChunkCountAndType(expectCount: number, expectType: Type<any>,
v: Collection<any>) {
const chunks = v.chunks;
assert.strictEqual(expectCount, chunks.length);
v.chunks.forEach(r => assert.isTrue(equals(expectType, r.type)));
@@ -122,7 +122,7 @@ export function intSequence(count: number, start: number = 0): Array<number> {
return nums;
}
export function deriveCollectionHeight(col: Collection): number {
export function deriveCollectionHeight(col: Collection<any>): number {
// Note: not using seq.items[0].ref.height because the purpose of this method is to
// be redundant.
return col.sequence.isMeta ? 1 + deriveCollectionHeight(notNull(col.sequence.items[0].child)) : 0;

View File

@@ -34,7 +34,7 @@ class IdentTable {
}
class TypeTrie {
t: ?Type;
t: ?Type<any>;
entries: Map<number, TypeTrie>;
constructor() {
@@ -76,7 +76,7 @@ export default class TypeCache {
return this.nextId++;
}
getCompoundType(kind: NomsKind, ...elemTypes: Type[]): Type {
getCompoundType(kind: NomsKind, ...elemTypes: Type<any>[]): Type<any> {
let trie = notNull(this.trieRoots.get(kind));
elemTypes.forEach(t => trie = notNull(trie).traverse(t.id));
if (!notNull(trie).t) {
@@ -86,7 +86,7 @@ export default class TypeCache {
return notNull(trie.t);
}
makeStructType(name: string, fieldNames: string[], fieldTypes: Type[]): Type<StructDesc> {
makeStructType(name: string, fieldNames: string[], fieldTypes: Type<any>[]): Type<StructDesc> {
if (fieldNames.length !== fieldTypes.length) {
throw new Error('Field names and types must be of equal length');
}
@@ -122,7 +122,7 @@ export default class TypeCache {
}
// Creates a new union type unless the elemTypes can be folded into a single non union type.
makeUnionType(types: Type[]): Type {
makeUnionType(types: Type<any>[]): Type<any> {
types = flattenUnionTypes(types, Object.create(null));
if (types.length === 1) {
return types[0];
@@ -134,11 +134,11 @@ export default class TypeCache {
* We sort the contituent types to dedup equivalent types in memory; we may need to sort again
* after cycles are resolved for final encoding.
*/
types.sort((t1: Type, t2: Type): number => t1.oidCompare(t2));
types.sort((t1: Type<any>, t2: Type<any>): number => t1.oidCompare(t2));
return this.getCompoundType(Kind.Union, ...types);
}
getCycleType(level: number): Type {
getCycleType(level: number): Type<any> {
const trie = notNull(this.trieRoots.get(Kind.Cycle)).traverse(level);
if (!trie.t) {
@@ -151,7 +151,7 @@ export default class TypeCache {
export const staticTypeCache = new TypeCache();
function flattenUnionTypes(types: Type[], seenTypes: {[key: Hash]: boolean}): Type[] {
function flattenUnionTypes(types: Type<any>[], seenTypes: {[key: Hash]: boolean}): Type<any>[] {
if (types.length === 0) {
return types;
}
@@ -203,7 +203,7 @@ function verifyStructName(name: string) {
}
}
function resolveStructCycles(t: Type, parentStructTypes: Type[]): Type {
function resolveStructCycles(t: Type<any>, parentStructTypes: Type<any>[]): Type<any> {
const desc = t.desc;
if (desc instanceof CompoundDesc) {
desc.elemTypes.forEach((et, i) => {
@@ -247,12 +247,12 @@ function resolveStructCycles(t: Type, parentStructTypes: Type[]): Type {
* construction arises, we can attempt to simplify the expansive type or find another means of
* comparison.
*/
function normalize(t: Type) {
walkType(t, [], (tt: Type) => {
function normalize(t: Type<any>) {
walkType(t, [], (tt: Type<any>) => {
generateOID(tt, false);
});
walkType(t, [], (tt: Type, parentStructTypes: Type[]) => {
walkType(t, [], (tt: Type<any>, parentStructTypes: Type<any>[]) => {
if (tt.kind === Kind.Struct) {
for (let i = 0; i < parentStructTypes.length; i++) {
invariant(tt.oidCompare(parentStructTypes[i]) !== 0,
@@ -261,14 +261,15 @@ function normalize(t: Type) {
}
});
walkType(t, [], (tt: Type) => {
walkType(t, [], (tt: Type<any>) => {
if (tt.kind === Kind.Union) {
tt.desc.elemTypes.sort((t1: Type, t2: Type): number => t1.oidCompare(t2));
tt.desc.elemTypes.sort((t1: Type<any>, t2: Type<any>): number => t1.oidCompare(t2));
}
});
}
function walkType(t: Type, parentStructTypes: Type[], cb: (tt: Type, parents: Type[]) => void) {
function walkType(t: Type<any>, parentStructTypes: Type<any>[],
cb: (tt: Type<any>, parents: Type<any>[]) => void) {
const desc = t.desc;
if (desc instanceof StructDesc && parentStructTypes.indexOf(t) >= 0) {
return;
@@ -282,19 +283,19 @@ function walkType(t: Type, parentStructTypes: Type[], cb: (tt: Type, parents: Ty
}
} else if (desc instanceof StructDesc) {
parentStructTypes.push(t);
desc.forEachField((_: string, tt: Type) => walkType(tt, parentStructTypes, cb));
desc.forEachField((_: string, tt: Type<any>) => walkType(tt, parentStructTypes, cb));
parentStructTypes.pop(t);
}
}
function generateOID(t: Type, allowUnresolvedCycles: boolean) {
function generateOID(t: Type<any>, allowUnresolvedCycles: boolean) {
const buf = new BinaryWriter();
encodeForOID(t, buf, allowUnresolvedCycles, t, []);
t.updateOID(Hash.fromData(buf.data));
}
function encodeForOID(t: Type, buf: BinaryWriter, allowUnresolvedCycles: boolean, root: Type,
parentStructTypes: Type[]) {
function encodeForOID(t: Type<any>, buf: BinaryWriter, allowUnresolvedCycles: boolean,
root: Type<any>, parentStructTypes: Type<any>[]) {
const desc = t.desc;
if (desc instanceof CycleDesc) {
@@ -369,8 +370,8 @@ function encodeForOID(t: Type, buf: BinaryWriter, allowUnresolvedCycles: boolean
}
}
function toUnresolvedType(t: Type, tc: TypeCache, level: number,
parentStructTypes: Type[]): [Type, boolean] {
function toUnresolvedType(t: Type<any>, tc: TypeCache, level: number,
parentStructTypes: Type<any>[]): [Type<any>, boolean] {
const idx = parentStructTypes.indexOf(t);
if (idx >= 0) {
// This type is just a placeholder. It doesn't need an id

View File

@@ -19,6 +19,7 @@ import {
typeType,
getTypeOfValue,
} from './type.js';
import type {Type} from './type.js';
import {suite, test} from 'mocha';
import {equals} from './compare.js';
import {encodeValue, decodeValue} from './codec.js';
@@ -138,7 +139,7 @@ suite('Type', () => {
[numberType, 'Number'],
[stringType, 'String'],
[makeSetType(numberType), 'Set<Number>'],
].forEach(([t, desc]) => {
].forEach(([t, desc]: [Type<any>, string]) => {
assert.equal(t.describe(), desc);
});
});

View File

@@ -19,7 +19,7 @@ import {staticTypeCache} from './type-cache.js';
export interface TypeDesc {
kind: NomsKind;
equals(other: TypeDesc): boolean;
hasUnresolvedCycle(visited: Type[]): boolean;
hasUnresolvedCycle(visited: Type<any>[]): boolean;
}
export class PrimitiveDesc {
@@ -33,16 +33,16 @@ export class PrimitiveDesc {
return other instanceof PrimitiveDesc && other.kind === this.kind;
}
hasUnresolvedCycle(visited: Type[]): boolean { // eslint-disable-line no-unused-vars
hasUnresolvedCycle(visited: Type<any>[]): boolean { // eslint-disable-line no-unused-vars
return false;
}
}
export class CompoundDesc {
kind: NomsKind;
elemTypes: Array<Type>;
elemTypes: Array<Type<any>>;
constructor(kind: NomsKind, elemTypes: Array<Type>) {
constructor(kind: NomsKind, elemTypes: Array<Type<any>>) {
this.kind = kind;
this.elemTypes = elemTypes;
}
@@ -65,14 +65,14 @@ export class CompoundDesc {
return false;
}
hasUnresolvedCycle(visited: Type[]): boolean {
hasUnresolvedCycle(visited: Type<any>[]): boolean {
return this.elemTypes.some(t => t.hasUnresolvedCycle(visited));
}
}
export type Field = {
name: string;
type: Type;
type: Type<any>;
};
export class StructDesc {
@@ -118,18 +118,18 @@ export class StructDesc {
return true;
}
hasUnresolvedCycle(visited: Type[]): boolean {
hasUnresolvedCycle(visited: Type<any>[]): boolean {
return this.fields.some(f => f.type.hasUnresolvedCycle(visited));
}
forEachField(cb: (name: string, type: Type) => void) {
forEachField(cb: (name: string, type: Type<any>) => void) {
const fields = this.fields;
for (let i = 0; i < fields.length; i++) {
cb(fields[i].name, fields[i].type);
}
}
getField(name: string): ?Type {
getField(name: string): ?Type<any> {
const f = findField(name, this.fields);
return f && f.type;
}
@@ -163,7 +163,7 @@ export class CycleDesc {
return other instanceof CycleDesc && other.level === this.level;
}
hasUnresolvedCycle(visited: Type[]): boolean { // eslint-disable-line no-unused-vars
hasUnresolvedCycle(visited: Type<any>[]): boolean { // eslint-disable-line no-unused-vars
return true;
}
}
@@ -186,11 +186,11 @@ export class Type<T: TypeDesc> extends ValueBase {
this.serialization = null;
}
get type(): Type {
get type(): Type<any> {
return typeType;
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return [];
}
@@ -206,7 +206,7 @@ export class Type<T: TypeDesc> extends ValueBase {
this._oid = o;
}
hasUnresolvedCycle(visited: Type[]): boolean {
hasUnresolvedCycle(visited: Type<any>[]): boolean {
if (visited.indexOf(this) >= 0) {
return false;
}
@@ -215,12 +215,12 @@ export class Type<T: TypeDesc> extends ValueBase {
return this._desc.hasUnresolvedCycle(visited);
}
get elemTypes(): Array<Type> {
get elemTypes(): Array<Type<any>> {
invariant(this._desc instanceof CompoundDesc);
return this._desc.elemTypes;
}
oidCompare(other: Type): number {
oidCompare(other: Type<any>): number {
return notNull(this._oid).compare(notNull(other._oid));
}
@@ -233,23 +233,23 @@ function makePrimitiveType(k: NomsKind): Type<PrimitiveDesc> {
return new Type(new PrimitiveDesc(k), k);
}
export function makeListType(elemType: Type): Type<CompoundDesc> {
export function makeListType(elemType: Type<any>): Type<CompoundDesc> {
return staticTypeCache.getCompoundType(Kind.List, elemType);
}
export function makeSetType(elemType: Type): Type<CompoundDesc> {
export function makeSetType(elemType: Type<any>): Type<CompoundDesc> {
return staticTypeCache.getCompoundType(Kind.Set, elemType);
}
export function makeMapType(keyType: Type, valueType: Type): Type<CompoundDesc> {
export function makeMapType(keyType: Type<any>, valueType: Type<any>): Type<CompoundDesc> {
return staticTypeCache.getCompoundType(Kind.Map, keyType, valueType);
}
export function makeRefType(elemType: Type): Type<CompoundDesc> {
export function makeRefType(elemType: Type<any>): Type<CompoundDesc> {
return staticTypeCache.getCompoundType(Kind.Ref, elemType);
}
export function makeStructType(name: string, fieldNames: string[], fieldTypes: Type[]):
export function makeStructType(name: string, fieldNames: string[], fieldTypes: Type<any>[]):
Type<StructDesc> {
return staticTypeCache.makeStructType(name, fieldNames, fieldTypes);
}
@@ -258,18 +258,18 @@ export function makeStructType(name: string, fieldNames: string[], fieldTypes: T
* Creates a union type unless the number of distinct types is 1, in which case that type is
* returned.
*/
export function makeUnionType(types: Type<*>[]): Type<*> {
export function makeUnionType(types: Type<any>[]): Type<any> {
return staticTypeCache.makeUnionType(types);
}
export function makeCycleType(level: number): Type {
export function makeCycleType(level: number): Type<any> {
return staticTypeCache.getCycleType(level);
}
/**
* Gives the existing primitive Type value for a NomsKind.
*/
export function getPrimitiveType(k: NomsKind): Type {
export function getPrimitiveType(k: NomsKind): Type<any> {
invariant(isPrimitiveKind(k));
switch (k) {
case Kind.Bool:
@@ -291,7 +291,7 @@ export function getPrimitiveType(k: NomsKind): Type {
// Returns the Noms type of any value. This will throw if you pass in an object that cannot be
// represented by noms.
export function getTypeOfValue(v: Value): Type {
export function getTypeOfValue(v: Value): Type<any> {
if (v instanceof ValueBase) {
return v.type;
}

View File

@@ -31,9 +31,9 @@ export default class ValueDecoder {
_ds: ValueReader;
_tc: TypeCache;
constructor(r: NomsReader, ds: ValueReader, tc: TypeCache) {
constructor(r: NomsReader, vr: ValueReader, tc: TypeCache) {
this._r = r;
this._ds = ds;
this._ds = vr;
this._tc = tc;
}
@@ -41,13 +41,13 @@ export default class ValueDecoder {
return this._r.readUint8();
}
readRef(t: Type): Ref {
readRef(t: Type<any>): Ref<any> {
const hash = this._r.readHash();
const height = this._r.readUint64();
return constructRef(t, hash, height);
}
readType(): Type {
readType(): Type<any> {
const k = this.readKind();
switch (k) {
case Kind.List:
@@ -62,7 +62,7 @@ export default class ValueDecoder {
return this.readStructType();
case Kind.Union: {
const len = this._r.readUint32();
const types: Type[] = new Array(len);
const types: Type<any>[] = new Array(len);
for (let i = 0; i < len; i++) {
types[i] = this.readType();
}
@@ -93,17 +93,17 @@ export default class ValueDecoder {
return list;
}
readListLeafSequence(t: Type): ListLeafSequence {
readListLeafSequence(t: Type<any>): ListLeafSequence<any> {
const data = this.readValueSequence();
return new ListLeafSequence(this._ds, t, data);
}
readSetLeafSequence(t: Type): SetLeafSequence {
readSetLeafSequence(t: Type<any>): SetLeafSequence<any> {
const data = this.readValueSequence();
return new SetLeafSequence(this._ds, t, data);
}
readMapLeafSequence(t: Type): MapLeafSequence {
readMapLeafSequence(t: Type<any>): MapLeafSequence<any, any> {
const count = this._r.readUint32();
const data = [];
for (let i = 0; i < count; i++) {
@@ -115,10 +115,10 @@ export default class ValueDecoder {
return new MapLeafSequence(this._ds, t, data);
}
readMetaSequence(): Array<MetaTuple> {
readMetaSequence(): Array<MetaTuple<any>> {
const count = this._r.readUint32();
const data: Array<MetaTuple> = [];
const data: Array<MetaTuple<any>> = [];
for (let i = 0; i < count; i++) {
const ref = this.readValue();
const v = this.readValue();
@@ -130,11 +130,11 @@ export default class ValueDecoder {
return data;
}
readIndexedMetaSequence(t: Type): IndexedMetaSequence {
readIndexedMetaSequence(t: Type<any>): IndexedMetaSequence {
return new IndexedMetaSequence(this._ds, t, this.readMetaSequence());
}
readOrderedMetaSequence(t: Type): OrderedMetaSequence {
readOrderedMetaSequence(t: Type<any>): OrderedMetaSequence<any> {
return new OrderedMetaSequence(this._ds, t, this.readMetaSequence());
}
@@ -191,7 +191,7 @@ export default class ValueDecoder {
throw new Error('Unreached');
}
readStruct<T: Struct>(type: Type): T {
readStruct<T: Struct>(type: Type<any>): T {
const {desc} = type;
invariant(desc instanceof StructDesc);

View File

@@ -37,12 +37,12 @@ export default class ValueEncoder {
this._w.writeUint8(k);
}
writeRef(r: Ref) {
writeRef(r: Ref<any>) {
this._w.writeHash(r.targetHash);
this._w.writeUint64(r.height);
}
writeType(t: Type, parentStructTypes: Type<StructDesc>[]) {
writeType(t: Type<any>, parentStructTypes: Type<StructDesc>[]) {
const k = t.kind;
switch (k) {
case Kind.List:
@@ -81,15 +81,15 @@ export default class ValueEncoder {
values.forEach(sv => this.writeValue(sv));
}
writeListLeafSequence(seq: ListLeafSequence) {
writeListLeafSequence(seq: ListLeafSequence<any>) {
this.writeValueList(seq.items);
}
writeSetLeafSequence(seq: SetLeafSequence) {
writeSetLeafSequence(seq: SetLeafSequence<any>) {
this.writeValueList(seq.items);
}
writeMapLeafSequence(seq: MapLeafSequence) {
writeMapLeafSequence(seq: MapLeafSequence<any, any>) {
const count = seq.items.length;
this._w.writeUint32(count);
@@ -99,7 +99,7 @@ export default class ValueEncoder {
});
}
maybeWriteMetaSequence(v: Sequence): boolean {
maybeWriteMetaSequence(v: Sequence<any>): boolean {
if (!v.isMeta) {
this._w.writeBool(false); // not a meta sequence
return false;
@@ -110,7 +110,7 @@ export default class ValueEncoder {
const count = v.items.length;
this._w.writeUint32(count);
for (let i = 0; i < count; i++) {
const tuple: MetaTuple = v.items[i];
const tuple: MetaTuple<any> = v.items[i];
invariant(tuple instanceof MetaTuple);
const child = tuple.child;
if (child && this._vw) {
@@ -218,7 +218,7 @@ export default class ValueEncoder {
case Kind.Value:
throw new Error('A value instance can never have type ' + kindToString[t.kind]);
default:
throw new Error(`Not implemented: ${t.kind} ${v}`);
throw new Error(`Not implemented: ${t.kind} ${String(v)}`);
}
}
@@ -248,7 +248,7 @@ export default class ValueEncoder {
this._w.writeUint32(desc.fieldCount);
desc.forEachField((name: string, type: Type) => {
desc.forEachField((name: string, type: Type<any>) => {
this._w.writeString(name);
this.writeType(type, parentStructTypes);
});

View File

@@ -138,7 +138,7 @@ export class SizeCache<T> {
this._size = 0;
}
entry(hash: Hash): ?CacheEntry {
entry(hash: Hash): ?CacheEntry<any> {
const key = hash.toString();
const entry = this._cache.get(key);
if (!entry) {
@@ -176,7 +176,7 @@ export class SizeCache<T> {
}
export class NoopCache<T> {
entry(hash: Hash): ?CacheEntry {} // eslint-disable-line no-unused-vars
entry(hash: Hash): ?CacheEntry<any> {} // eslint-disable-line no-unused-vars
get(hash: Hash): ?T {} // eslint-disable-line no-unused-vars
@@ -186,11 +186,12 @@ export class NoopCache<T> {
class HashCacheEntry {
present: boolean;
type: ?Type;
type: ?Type<any>;
provenance: Hash;
constructor(present: boolean = false, type: ?Type = null, provenance: Hash = emptyHash) {
invariant((!present && !type) || (present && type), `present = ${present}, type = ${type}`);
constructor(present: boolean = false, type: ?Type<any> = null, provenance: Hash = emptyHash) {
invariant((!present && !type) || (present && type),
`present = ${String(present)}, type = ${String(type)}`);
this.present = present;
this.type = type;
this.provenance = provenance;
@@ -265,7 +266,7 @@ class HashCache {
}
}
function getTargetType(refVal: Ref): Type {
function getTargetType(refVal: Ref<any>): Type<any> {
invariant(refVal.type.kind === Kind.Ref, refVal.type.kind);
return refVal.type.elemTypes[0];
}

View File

@@ -17,7 +17,7 @@ export class ValueBase {
init(this);
}
get type(): Type {
get type(): Type<any> {
throw new Error('abstract');
}
@@ -25,7 +25,7 @@ export class ValueBase {
return this._hash = ensureHash(this._hash, this);
}
get chunks(): Array<Ref> {
get chunks(): Array<Ref<any>> {
return [];
}
}
@@ -33,7 +33,7 @@ export class ValueBase {
type Value = primitive | ValueBase;
export type {Value as default};
export function getChunksOfValue(v: Value): Array<Ref> {
export function getChunksOfValue(v: Value): Array<Ref<any>> {
if (v instanceof ValueBase) {
return v.chunks;
}

View File

@@ -168,7 +168,7 @@ function makeBlobBytes(byteLength: number): Uint8Array {
return new Uint8Array(ar);
}
function buildList(count: number, createFn: createValueFn): Collection {
function buildList(count: number, createFn: createValueFn): Collection<any> {
const values = new Array(count);
for (let i = 0; i < count; i++) {
values[i] = createFn(i);
@@ -177,7 +177,8 @@ function buildList(count: number, createFn: createValueFn): Collection {
return new List(values);
}
async function buildListIncrementally(count: number, createFn: createValueFn): Promise<Collection> {
async function buildListIncrementally(count: number, createFn: createValueFn):
Promise<Collection<any>> {
let l = new List();
for (let i = 0; i < count; i++) {
l = await l.insert(i, createFn(i));
@@ -186,11 +187,11 @@ async function buildListIncrementally(count: number, createFn: createValueFn): P
return l;
}
function readList(l: List): Promise<void> {
function readList(l: List<any>): Promise<void> {
return l.forEach(() => {});
}
function buildSet(count: number, createFn: createValueFn): Collection {
function buildSet(count: number, createFn: createValueFn): Collection<any> {
const values = new Array(count);
for (let i = 0; i < count; i++) {
values[i] = createFn(i);
@@ -199,7 +200,8 @@ function buildSet(count: number, createFn: createValueFn): Collection {
return new Set(values);
}
async function buildSetIncrementally(count: number, createFn: createValueFn): Promise<Collection> {
async function buildSetIncrementally(count: number, createFn: createValueFn):
Promise<Collection<any>> {
let s = new Set();
for (let i = 0; i < count; i++) {
s = await s.add(createFn(i));
@@ -208,11 +210,11 @@ async function buildSetIncrementally(count: number, createFn: createValueFn): Pr
return s;
}
function readSet(l: Set): Promise<void> {
function readSet(l: Set<any>): Promise<void> {
return l.forEach(() => {});
}
function buildMap(count: number, createFn: createValueFn): Collection {
function buildMap(count: number, createFn: createValueFn): Collection<any> {
const values = new Array(count);
for (let i = 0; i < count * 2; i += 2) {
values[i] = [createFn(i), createFn(i + 1)];
@@ -221,7 +223,8 @@ function buildMap(count: number, createFn: createValueFn): Collection {
return new Map(values);
}
async function buildMapIncrementally(count: number, createFn: createValueFn): Promise<Collection> {
async function buildMapIncrementally(count: number, createFn: createValueFn):
Promise<Collection<any>> {
let m = new Map();
for (let i = 0; i < count * 2; i += 2) {
m = await m.set(createFn(i), createFn(i + 1));
@@ -230,6 +233,6 @@ async function buildMapIncrementally(count: number, createFn: createValueFn): Pr
return m;
}
function readMap(l: Map): Promise<void> {
function readMap(l: Map<any, any>): Promise<void> {
return l.forEach(() => {});
}

View File

@@ -11,13 +11,13 @@ export class BinaryIntEncoderDecoder {
// write n to buf, return number of bytes written
encode(buf: Buffer, n: number): number {
if (Number.isInteger(n)) {
buf.writeInt8(0);
buf.writeInt8(0, 0);
buf.writeInt32BE(n, 1);
return 5;
} else {
const [mantissa, exponent] = frexp(n);
// console.log(`${n} = ${mantissa} * 2^${exponent}`);
buf.writeInt8(1);
buf.writeInt8(1, 0);
buf.writeDoubleBE(mantissa, 1);
buf.writeInt32BE(exponent, 9);
return 12;

View File

@@ -48,7 +48,7 @@ main().catch(ex => {
process.exit(1);
});
function getEncoder(name: string): EncoderDecoder {
function getEncoder(name) {
if (name === 'string') {
return new StringEncoderDecoder();
} else if (name === 'binary') {
@@ -61,6 +61,7 @@ function getEncoder(name: string): EncoderDecoder {
console.error(`unknown encoding option: ${args.encoding}`);
process.exit(1);
}
throw new Error('unreachable');
}
async function main(): Promise<void> {

View File

@@ -9,10 +9,11 @@ export class StringEncoderDecoder {
// write n to buf, return number of bytes written
encode(buf: Buffer, n: number): number {
if (n < 1e20) {
// $FlowIssue: Buffer.prototype.write returns a number
return buf.write(n.toString(10));
} else {
return buf.write(n.toExponential());
}
// $FlowIssue: Buffer.prototype.write returns a number
return buf.write(n.toExponential());
}
// read from buf to return number

View File

@@ -66,7 +66,7 @@ async function getUser(): Promise<Struct> {
return result;
}
async function getPhotos(): Promise<List> {
async function getPhotos(): Promise<List<any>> {
// Calculate the number of expected fetches via the list of albums, so that we can show progress.
// This appears to be the fastest way (photos only let you paginate).
const batchSize = 1000;

View File

@@ -135,8 +135,8 @@ function promptForAuth(url: string): Promise<void> {
process.stdout.write(`Go to ${url} to grant permissions to access Flickr...\n`);
const rl = readline.createInterface({input: process.stdin, output: process.stdout});
rl.question('Press enter when done\n', () => {
process.stdout.write('Authenticated. Next time run:\n' +
`${process.argv.join(' ')} --auth-token=${authToken} --auth-secret=${authSecret}\n\n`);
process.stdout.write(`Authenticated. Next time run:\n${process.argv.join(' ')
} --auth-token=${String(authToken)} --auth-secret=${String(authSecret)}\n\n`);
res();
rl.close();
});

View File

@@ -25,7 +25,7 @@
"classnames": "^2.1.3",
"csv": "^1.1.0",
"flickr-oauth-and-upload": "^0.8.0",
"flow-bin": "^0.27.0",
"flow-bin": "^0.30.0",
"http-server": "^0.9.0",
"humanize": "^0.0.9",
"mocha": "^2.5.3",

View File

@@ -93,7 +93,7 @@ function maybeProcessInning(ep: Promise<XMLElement>): Promise<?Map<string, Array
return ep.then(elem => elem.get('inning')).then(inn => inn && processInning(inn));
}
function processInning(inning: NomsMap<string, NomsMap<*, *>>):
function processInning(inning: NomsMap<string, NomsMap<any, any>>):
Promise<Map<string, Array<Struct>>> {
return Promise.all([inning.get('top'), inning.get('bottom')])
.then(halves => {
@@ -122,7 +122,7 @@ function processInning(inning: NomsMap<string, NomsMap<*, *>>):
});
}
function processAbs(abs: List): Promise<PitcherPitches> {
function processAbs(abs: List<any>): Promise<PitcherPitches> {
const ps = [];
return abs.forEach(ab => {
ps.push(
@@ -160,13 +160,12 @@ function normalize<T: Value>(d: ?T | List<T>): List<T> {
type PitchData = NomsMap<string, string>;
function processPitches(d: List<PitchData>): Promise<Array<Struct>> {
const pitchPs = [];
const pitchPs: Array<Promise<?Struct>> = [];
return d.forEach((p: PitchData) => {
pitchPs.push(getPitch(p));
})
.then(() => pitchPs)
.then(pitchPs => Promise.all(pitchPs))
.then(pitches => pitches.filter((e: ?Struct): boolean => !!e));
.then(() => Promise.all(pitchPs))
.then(pitches => pitches.filter(Boolean));
}
function getPitch(p: PitchData): Promise<?Struct> {

View File

@@ -16,7 +16,7 @@ type Props = {
db: string,
}
export default function Layout(props: Props) : React.Element {
export default function Layout(props: Props) : React.Element<any> {
const children = [];
const edges = [];
const lookup = {};

View File

@@ -93,7 +93,7 @@ function formatKeyString(v: any): string {
function handleChunkLoad(hash: Hash, val: any, fromHash: ?string) {
let counter = 0;
function processMetaSequence(id, sequence: IndexedMetaSequence | OrderedMetaSequence,
function processMetaSequence(id, sequence: IndexedMetaSequence | OrderedMetaSequence<any>,
name: string) {
data.nodes[id] = {name: name};
sequence.items.forEach(tuple => {
@@ -223,7 +223,7 @@ function handleNodeClick(e: MouseEvent, id: string) {
}
class Prompt extends React.Component<void, {}, void> {
render(): React.Element {
render(): React.Element<any> {
const fontStyle: {[key: string]: any} = {
fontFamily: 'Menlo',
fontSize: 14,

View File

@@ -41,7 +41,7 @@ export default class Node extends React.Component<void, Props, State> {
};
}
render(): React.Element {
render(): React.Element<any> {
if (this.state.x !== this.props.x ||
this.state.y !== this.props.y) {
window.requestAnimationFrame(() => this.setState({
@@ -82,7 +82,7 @@ export default class Node extends React.Component<void, Props, State> {
);
}
getShape() : React.Element {
getShape() : React.Element<any> {
const className = classNames('icon', {open:this.props.isOpen});
switch (this.props.shape) {
case 'circle':