Initial DataSource setup

Summary:
For context see https://fb.workplace.com/notes/470523670998369

This diff introduces the DataSource abstraction, that can store records. If a key is set a key -> record mapping is stored, to make it easy to update existing records using `upsert`, without knowing their exact index.

Internal storage will be slightly altered in upcoming diffs, so don't pay to much attention to that part.

Reviewed By: nikoant

Differential Revision: D25953337

fbshipit-source-id: 1c3b53a2fcf61abaf061946be4af21d2aecc6c6d
This commit is contained in:
Michel Weststrate
2021-03-16 14:54:53 -07:00
committed by Facebook GitHub Bot
parent b2c542b84c
commit 0dc1abdac4
6 changed files with 291 additions and 0 deletions

View File

@@ -109,6 +109,7 @@ module.exports = {
// for reference: https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/eslint-plugin/README.md#extension-rules // for reference: https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/eslint-plugin/README.md#extension-rules
'no-unused-vars': 0, 'no-unused-vars': 0,
'no-redeclare': 0, 'no-redeclare': 0,
'no-dupe-class-members': 0,
'@typescript-eslint/no-redeclare': 1, '@typescript-eslint/no-redeclare': 1,
'@typescript-eslint/no-unused-vars': [ '@typescript-eslint/no-unused-vars': [
1, 1,

View File

@@ -34,6 +34,7 @@ test('Correct top level API exposed', () => {
"Tracked", "Tracked",
"TrackingScope", "TrackingScope",
"batch", "batch",
"createDataSource",
"createState", "createState",
"produce", "produce",
"renderReactRoot", "renderReactRoot",

View File

@@ -71,6 +71,8 @@ export {
} from './utils/Logger'; } from './utils/Logger';
export {Idler} from './utils/Idler'; export {Idler} from './utils/Idler';
export {createDataSource} from './state/datasource/DataSource';
// It's not ideal that this exists in flipper-plugin sources directly, // It's not ideal that this exists in flipper-plugin sources directly,
// but is the least pain for plugin authors. // but is the least pain for plugin authors.
// Probably we should make sure that testing-library doesn't end up in our final Flipper bundle (which packages flipper-plugin) // Probably we should make sure that testing-library doesn't end up in our final Flipper bundle (which packages flipper-plugin)

View File

@@ -0,0 +1,187 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
// TODO: support better minification
// TODO: separate views from datasource to be able to support multiple transformation simultanously
type ExtractKeyType<
T extends object,
KEY extends keyof T
> = T[KEY] extends string ? string : T[KEY] extends number ? number : never;
type AppendEvent<T> = {
type: 'append';
value: T;
};
type UpdateEvent<T> = {
type: 'update';
value: T;
index: number;
};
type DataEvent<T> = AppendEvent<T> | UpdateEvent<T>;
class DataSource<
T extends object,
KEY extends keyof T,
KEY_TYPE extends string | number | never = ExtractKeyType<T, KEY>
> {
private _records: T[] = [];
private _recordsById: Map<KEY_TYPE, T> = new Map();
private keyAttribute: undefined | keyof T;
private idToIndex: Map<KEY_TYPE, number> = new Map();
dataUpdateQueue: DataEvent<T>[] = [];
// viewUpdateQueue;
viewRecords: T[] = [];
nextViewRecords: T[] = []; // for double buffering
/**
* Returns a direct reference to the stored records.
* The collection should be treated as readonly and mutable;
* the collection might be directly written to by the datasource,
* so for an immutable state create a defensive copy:
* `datasource.records.slice()`
*/
get records(): readonly T[] {
return this._records;
}
/**
* returns a direct reference to the stored records as lookup map,
* based on the key attribute set.
* The colletion should be treated as readonly and mutable (it might change over time).
* Create a defensive copy if needed.
*/
get recordsById(): ReadonlyMap<KEY_TYPE, T> {
this.assertKeySet();
return this._recordsById;
}
constructor(keyAttribute: KEY | undefined) {
this.keyAttribute = keyAttribute;
}
private assertKeySet() {
if (!this.keyAttribute) {
throw new Error(
'No key has been set. Records cannot be looked up by key',
);
}
}
private getKey(value: T): KEY_TYPE;
private getKey(value: any): any {
this.assertKeySet();
const key = value[this.keyAttribute!];
if ((typeof key === 'string' || typeof key === 'number') && key !== '') {
return key;
}
throw new Error(`Invalid key value: '${key}'`);
}
/**
* Returns the index of a specific key in the *source* set
*/
indexOfKey(key: KEY_TYPE): number {
this.assertKeySet();
return this.idToIndex.get(key) ?? -1;
}
append(value: T) {
if (this.keyAttribute) {
const key = this.getKey(value);
if (this._recordsById.has(key)) {
throw new Error(`Duplicate key: '${key}'`);
}
this._recordsById.set(key, value);
this.idToIndex.set(key, this._records.length);
}
this._records.push(value);
this.emitDataEvent({
type: 'append',
value,
});
}
/**
* Updates or adds a record. Returns `true` if the record already existed.
* Can only be used if a key is used.
*/
upsert(value: T): boolean {
this.assertKeySet();
const key = this.getKey(value);
if (this.idToIndex.has(key)) {
const idx = this.idToIndex.get(key)!;
this.update(idx, value);
return true;
} else {
this.append(value);
return false;
}
}
update(index: number, value: T) {
if (this.keyAttribute) {
const key = this.getKey(value);
const currentKey = this.getKey(this._records[index]);
if (currentKey !== key) {
this._recordsById.delete(currentKey);
this.idToIndex.delete(currentKey);
}
this._recordsById.set(key, value);
this.idToIndex.set(key, index);
}
this._records[index] = value;
this.emitDataEvent({
type: 'update',
value,
index,
});
}
/**
* Removes the first N entries.
* @param amount
*/
shift(_amount: number) {
// increase an offset variable with amount, and correct idToIndex reads / writes with that
// removes the affected records for _records, _recordsById and idToIndex
throw new Error('Not Implemented');
}
emitDataEvent(event: DataEvent<T>) {
this.dataUpdateQueue.push(event);
// TODO: schedule
this.processEvents();
}
processEvents() {
const events = this.dataUpdateQueue.splice(0);
events.forEach((_event) => {
// TODO:
});
}
}
export function createDataSource<T extends object, KEY extends keyof T = any>(
initialSet: T[],
keyAttribute: KEY,
): DataSource<T, KEY, ExtractKeyType<T, KEY>>;
export function createDataSource<T extends object>(
initialSet?: T[],
): DataSource<T, never, never>;
export function createDataSource<T extends object, KEY extends keyof T>(
initialSet: T[] = [],
keyAttribute?: KEY | undefined,
): DataSource<T, any, any> {
const ds = new DataSource<T, KEY>(keyAttribute);
initialSet.forEach((value) => ds.append(value));
return ds;
}

View File

@@ -0,0 +1,96 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
import {createDataSource} from '../DataSource';
type Todo = {
id: string;
title: string;
done?: boolean;
};
const drinkCoffee: Todo = {
id: 'coffee',
title: 'drink coffee',
};
const eatCookie: Todo = {
id: 'cookie',
title: 'eat a cookie',
done: true,
};
const submitBug: Todo = {
id: 'bug',
title: 'submit a bug',
done: false,
};
test('can create a datasource', () => {
const ds = createDataSource<Todo>([eatCookie]);
expect(ds.records).toEqual([eatCookie]);
ds.append(drinkCoffee);
expect(ds.records).toEqual([eatCookie, drinkCoffee]);
expect(() => ds.recordsById).toThrow(/Records cannot be looked up by key/);
ds.update(1, submitBug);
expect(ds.records[1]).toBe(submitBug);
});
test('can create a keyed datasource', () => {
const ds = createDataSource<Todo>([eatCookie], 'id');
expect(ds.records).toEqual([eatCookie]);
ds.append(drinkCoffee);
expect(ds.records).toEqual([eatCookie, drinkCoffee]);
expect(ds.recordsById.get('bug')).toBe(undefined);
expect(ds.recordsById.get('cookie')).toBe(eatCookie);
expect(ds.recordsById.get('coffee')).toBe(drinkCoffee);
expect(ds.indexOfKey('bug')).toBe(-1);
expect(ds.indexOfKey('cookie')).toBe(0);
expect(ds.indexOfKey('coffee')).toBe(1);
ds.update(1, submitBug);
expect(ds.records[1]).toBe(submitBug);
expect(ds.recordsById.get('coffee')).toBe(undefined);
expect(ds.recordsById.get('bug')).toBe(submitBug);
expect(ds.indexOfKey('bug')).toBe(1);
expect(ds.indexOfKey('cookie')).toBe(0);
expect(ds.indexOfKey('coffee')).toBe(-1);
// upsert existing
const newBug = {
id: 'bug',
title: 'file a bug',
done: true,
};
ds.upsert(newBug);
expect(ds.records[1]).toBe(newBug);
expect(ds.recordsById.get('bug')).toBe(newBug);
// upsert new
const trash = {
id: 'trash',
title: 'take trash out',
};
ds.upsert(trash);
expect(ds.records[2]).toBe(trash);
expect(ds.recordsById.get('trash')).toBe(trash);
});
test('throws on invalid keys', () => {
const ds = createDataSource<Todo>([eatCookie], 'id');
expect(() => {
ds.append({id: '', title: 'test'});
}).toThrow(`Invalid key value: ''`);
expect(() => {
ds.append({id: 'cookie', title: 'test'});
}).toThrow(`Duplicate key: 'cookie'`);
});

View File

@@ -438,6 +438,10 @@ rows.update(draft => {
console.log(rows.get().length) // 2 console.log(rows.get().length) // 2
``` ```
### createDataSource
Coming soon.
## React Hooks ## React Hooks
### usePlugin ### usePlugin