Reorganise for easier extraction
Summary: To make the DataSource abstraction reusable for other teams and an upcoming talk, this diff moves all DataSource storage & virtualization logic in one folder. Will set up a build process and demo project in later diffs. Reviewed By: nikoant Differential Revision: D28056700 fbshipit-source-id: 7cfe5b40bbbe387da711f765a604a45029d451c7
This commit is contained in:
committed by
Facebook GitHub Bot
parent
5a7d4e2f17
commit
84e2646909
@@ -1,858 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
|
||||
import {
|
||||
sortedIndexBy,
|
||||
sortedLastIndexBy,
|
||||
property,
|
||||
sortBy as lodashSort,
|
||||
} from 'lodash';
|
||||
import {Persistable, registerStorageAtom} from '../plugin/PluginBase';
|
||||
|
||||
// If the dataSource becomes to large, after how many records will we start to drop items?
|
||||
const dropFactor = 0.1;
|
||||
// what is the default maximum amount of records before we start shifting the data set?
|
||||
const defaultLimit = 100 * 1000;
|
||||
// if a shift on a sorted dataset exceeds this tresholds, we assume it is faster to re-sort the entire set,
|
||||
// rather than search and remove the affected individual items
|
||||
const shiftRebuildTreshold = 0.05;
|
||||
|
||||
type ExtractKeyType<T, KEY extends keyof T> = T[KEY] extends string
|
||||
? string
|
||||
: T[KEY] extends number
|
||||
? number
|
||||
: never;
|
||||
|
||||
type AppendEvent<T> = {
|
||||
type: 'append';
|
||||
entry: Entry<T>;
|
||||
};
|
||||
type UpdateEvent<T> = {
|
||||
type: 'update';
|
||||
entry: Entry<T>;
|
||||
oldValue: T;
|
||||
oldVisible: boolean;
|
||||
index: number;
|
||||
};
|
||||
type RemoveEvent<T> = {
|
||||
type: 'remove';
|
||||
entry: Entry<T>;
|
||||
index: number;
|
||||
};
|
||||
type ShiftEvent<T> = {
|
||||
type: 'shift';
|
||||
entries: Entry<T>[];
|
||||
amount: number;
|
||||
};
|
||||
|
||||
type DataEvent<T> =
|
||||
| AppendEvent<T>
|
||||
| UpdateEvent<T>
|
||||
| RemoveEvent<T>
|
||||
| ShiftEvent<T>;
|
||||
|
||||
type Entry<T> = {
|
||||
value: T;
|
||||
id: number; // insertion based
|
||||
visible: boolean; // matches current filter?
|
||||
approxIndex: number; // we could possible live at this index in the output. No guarantees.
|
||||
};
|
||||
|
||||
type Primitive = number | string | boolean | null | undefined;
|
||||
|
||||
type OutputChange =
|
||||
| {
|
||||
type: 'shift';
|
||||
index: number;
|
||||
location: 'before' | 'in' | 'after'; // relative to current window
|
||||
delta: number;
|
||||
newCount: number;
|
||||
}
|
||||
| {
|
||||
// an item, inside the current window, was changed
|
||||
type: 'update';
|
||||
index: number;
|
||||
}
|
||||
| {
|
||||
// something big and awesome happened. Drop earlier updates to the floor and start again
|
||||
// like: clear, filter or sorting change, etc
|
||||
type: 'reset';
|
||||
newCount: number;
|
||||
};
|
||||
|
||||
export class DataSource<
|
||||
T = any,
|
||||
KEY extends keyof T = any,
|
||||
KEY_TYPE extends string | number | never = ExtractKeyType<T, KEY>
|
||||
> implements Persistable {
|
||||
private nextId = 0;
|
||||
private _records: Entry<T>[] = [];
|
||||
private _recordsById: Map<KEY_TYPE, T> = new Map();
|
||||
/**
|
||||
* @readonly
|
||||
*/
|
||||
public keyAttribute: undefined | keyof T;
|
||||
private idToIndex: Map<KEY_TYPE, number> = new Map();
|
||||
|
||||
// if we shift the window, we increase shiftOffset to correct idToIndex results, rather than remapping all values
|
||||
private shiftOffset = 0;
|
||||
|
||||
/**
|
||||
* The maximum amount of records this DataSource can have
|
||||
*/
|
||||
public limit = defaultLimit;
|
||||
|
||||
/**
|
||||
* The default view on this data source. A view applies
|
||||
* sorting, filtering and windowing to get more constrained output.
|
||||
*
|
||||
* Additional views can created through the fork method.
|
||||
*/
|
||||
public readonly view: DataSourceView<T>;
|
||||
|
||||
constructor(keyAttribute: KEY | undefined) {
|
||||
this.keyAttribute = keyAttribute;
|
||||
this.view = new DataSourceView<T>(this);
|
||||
}
|
||||
|
||||
public get size() {
|
||||
return this._records.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a defensive copy of the stored records.
|
||||
* This is a O(n) operation! Prefer using .size and .get instead if only a subset is needed.
|
||||
*/
|
||||
public records(): readonly T[] {
|
||||
return this._records.map(unwrap);
|
||||
}
|
||||
|
||||
public get(index: number) {
|
||||
return unwrap(this._records[index]);
|
||||
}
|
||||
|
||||
public has(key: KEY_TYPE) {
|
||||
this.assertKeySet();
|
||||
return this._recordsById.has(key);
|
||||
}
|
||||
|
||||
public getById(key: KEY_TYPE) {
|
||||
this.assertKeySet();
|
||||
return this._recordsById.get(key);
|
||||
}
|
||||
|
||||
public keys(): IterableIterator<KEY_TYPE> {
|
||||
this.assertKeySet();
|
||||
return this._recordsById.keys();
|
||||
}
|
||||
|
||||
public entries(): IterableIterator<[KEY_TYPE, T]> {
|
||||
this.assertKeySet();
|
||||
return this._recordsById.entries();
|
||||
}
|
||||
|
||||
public [Symbol.iterator](): IterableIterator<T> {
|
||||
const self = this;
|
||||
let offset = 0;
|
||||
return {
|
||||
next() {
|
||||
offset++;
|
||||
if (offset > self.size) {
|
||||
return {done: true, value: undefined};
|
||||
} else {
|
||||
return {
|
||||
value: self._records[offset - 1].value,
|
||||
};
|
||||
}
|
||||
},
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the index of a specific key in the *records* set.
|
||||
* Returns -1 if the record wansn't found
|
||||
*/
|
||||
public getIndexOfKey(key: KEY_TYPE): number {
|
||||
this.assertKeySet();
|
||||
const stored = this.idToIndex.get(key);
|
||||
return stored === undefined ? -1 : stored + this.shiftOffset;
|
||||
}
|
||||
|
||||
public append(value: T) {
|
||||
if (this._records.length >= this.limit) {
|
||||
// we're full! let's free up some space
|
||||
this.shift(Math.ceil(this.limit * dropFactor));
|
||||
}
|
||||
if (this.keyAttribute) {
|
||||
const key = this.getKey(value);
|
||||
if (this._recordsById.has(key)) {
|
||||
throw new Error(`Duplicate key: '${key}'`);
|
||||
}
|
||||
this._recordsById.set(key, value);
|
||||
this.storeIndexOfKey(key, this._records.length);
|
||||
}
|
||||
const entry = {
|
||||
value,
|
||||
id: ++this.nextId,
|
||||
// once we have multiple views, the following fields should be stored per view
|
||||
visible: true,
|
||||
approxIndex: -1,
|
||||
};
|
||||
this._records.push(entry);
|
||||
this.emitDataEvent({
|
||||
type: 'append',
|
||||
entry,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates or adds a record. Returns `true` if the record already existed.
|
||||
* Can only be used if a key is used.
|
||||
*/
|
||||
public upsert(value: T): boolean {
|
||||
this.assertKeySet();
|
||||
const key = this.getKey(value);
|
||||
if (this.idToIndex.has(key)) {
|
||||
this.update(this.getIndexOfKey(key), value);
|
||||
return true;
|
||||
} else {
|
||||
this.append(value);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces an item in the base data collection.
|
||||
* Note that the index is based on the insertion order, and not based on the current view
|
||||
*/
|
||||
public update(index: number, value: T) {
|
||||
const entry = this._records[index];
|
||||
const oldValue = entry.value;
|
||||
if (value === oldValue) {
|
||||
return;
|
||||
}
|
||||
const oldVisible = entry.visible;
|
||||
entry.value = value;
|
||||
if (this.keyAttribute) {
|
||||
const key = this.getKey(value);
|
||||
const currentKey = this.getKey(oldValue);
|
||||
if (currentKey !== key) {
|
||||
const existingIndex = this.getIndexOfKey(key);
|
||||
if (existingIndex !== -1 && existingIndex !== index) {
|
||||
throw new Error(
|
||||
`Trying to insert duplicate key '${key}', which already exist in the collection`,
|
||||
);
|
||||
}
|
||||
this._recordsById.delete(currentKey);
|
||||
this.idToIndex.delete(currentKey);
|
||||
}
|
||||
this._recordsById.set(key, value);
|
||||
this.storeIndexOfKey(key, index);
|
||||
}
|
||||
this.emitDataEvent({
|
||||
type: 'update',
|
||||
entry,
|
||||
oldValue,
|
||||
oldVisible,
|
||||
index,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param index
|
||||
*
|
||||
* Warning: this operation can be O(n) if a key is set
|
||||
*/
|
||||
public delete(index: number) {
|
||||
if (index < 0 || index >= this._records.length) {
|
||||
throw new Error('Out of bounds: ' + index);
|
||||
}
|
||||
const entry = this._records.splice(index, 1)[0];
|
||||
if (this.keyAttribute) {
|
||||
const key = this.getKey(entry.value);
|
||||
this._recordsById.delete(key);
|
||||
this.idToIndex.delete(key);
|
||||
if (index === 0) {
|
||||
// lucky happy case, this is more efficient
|
||||
this.shiftOffset -= 1;
|
||||
} else {
|
||||
// Optimization: this is O(n)! Should be done as an async job
|
||||
this.idToIndex.forEach((keyIndex, key) => {
|
||||
if (keyIndex + this.shiftOffset > index)
|
||||
this.storeIndexOfKey(key, keyIndex - 1);
|
||||
});
|
||||
}
|
||||
}
|
||||
this.emitDataEvent({
|
||||
type: 'remove',
|
||||
index,
|
||||
entry,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the item with the given key from this dataSource.
|
||||
* Returns false if no record with the given key was found
|
||||
*
|
||||
* Warning: this operation can be O(n) if a key is set
|
||||
*/
|
||||
public deleteByKey(keyValue: KEY_TYPE): boolean {
|
||||
this.assertKeySet();
|
||||
const index = this.getIndexOfKey(keyValue);
|
||||
if (index === -1) {
|
||||
return false;
|
||||
}
|
||||
this.delete(index);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the first N entries.
|
||||
* @param amount
|
||||
*/
|
||||
public shift(amount: number) {
|
||||
amount = Math.min(amount, this._records.length);
|
||||
if (amount === this._records.length) {
|
||||
this.clear();
|
||||
return;
|
||||
}
|
||||
// increase an offset variable with amount, and correct idToIndex reads / writes with that
|
||||
this.shiftOffset -= amount;
|
||||
// removes the affected records for _records, _recordsById and idToIndex
|
||||
const removed = this._records.splice(0, amount);
|
||||
if (this.keyAttribute) {
|
||||
removed.forEach((entry) => {
|
||||
const key = this.getKey(entry.value);
|
||||
this._recordsById.delete(key);
|
||||
this.idToIndex.delete(key);
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
this.view.isSorted &&
|
||||
removed.length > 10 &&
|
||||
removed.length > shiftRebuildTreshold * this._records.length
|
||||
) {
|
||||
// removing a large amount of items is expensive when doing it sorted,
|
||||
// let's fallback to the async processing of all data instead
|
||||
// MWE: there is a risk here that rebuilding is too blocking, as this might happen
|
||||
// in background when new data arrives, and not explicitly on a user interaction
|
||||
this.view.rebuild();
|
||||
} else {
|
||||
this.emitDataEvent({
|
||||
type: 'shift',
|
||||
entries: removed,
|
||||
amount,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The clear operation removes any records stored, but will keep the current view preferences such as sorting and filtering
|
||||
*/
|
||||
public clear() {
|
||||
this._records = [];
|
||||
this._recordsById = new Map();
|
||||
this.shiftOffset = 0;
|
||||
this.idToIndex = new Map();
|
||||
this.view.rebuild();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a fork of this dataSource, that shares the source data with this dataSource,
|
||||
* but has it's own FSRW pipeline, to allow multiple views on the same data
|
||||
*/
|
||||
public fork(): DataSourceView<T> {
|
||||
throw new Error(
|
||||
'Not implemented. Please contact oncall if this feature is needed',
|
||||
);
|
||||
}
|
||||
|
||||
private assertKeySet() {
|
||||
if (!this.keyAttribute) {
|
||||
throw new Error(
|
||||
'No key has been set. Records cannot be looked up by key',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private getKey(value: T): KEY_TYPE;
|
||||
private getKey(value: any): any {
|
||||
this.assertKeySet();
|
||||
const key = value[this.keyAttribute!];
|
||||
if ((typeof key === 'string' || typeof key === 'number') && key !== '') {
|
||||
return key;
|
||||
}
|
||||
throw new Error(`Invalid key value: '${key}'`);
|
||||
}
|
||||
|
||||
private storeIndexOfKey(key: KEY_TYPE, index: number) {
|
||||
// de-normalize the index, so that on later look ups its corrected again
|
||||
this.idToIndex.set(key, index - this.shiftOffset);
|
||||
}
|
||||
|
||||
private emitDataEvent(event: DataEvent<T>) {
|
||||
// Optimization: potentially we could schedule this to happen async,
|
||||
// using a queue,
|
||||
// or only if there is an active view (although that could leak memory)
|
||||
this.view.processEvent(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
serialize(): readonly T[] {
|
||||
return this.records();
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
deserialize(value: any[]) {
|
||||
this.clear();
|
||||
value.forEach((record) => {
|
||||
this.append(record);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
type CreateDataSourceOptions<T, K extends keyof T> = {
|
||||
/**
|
||||
* If a key is set, the given field of the records is assumed to be unique,
|
||||
* and it's value can be used to perform lookups and upserts.
|
||||
*/
|
||||
key?: K;
|
||||
/**
|
||||
* The maximum amount of records that this DataSource will store.
|
||||
* If the limit is exceeded, the oldest records will automatically be dropped to make place for the new ones
|
||||
*/
|
||||
limit?: number;
|
||||
/**
|
||||
* Should this state persist when exporting a plugin?
|
||||
* If set, the dataSource will be saved / loaded under the key provided
|
||||
*/
|
||||
persist?: string;
|
||||
};
|
||||
|
||||
export function createDataSource<T, KEY extends keyof T = any>(
|
||||
initialSet: T[],
|
||||
options: CreateDataSourceOptions<T, KEY>,
|
||||
): DataSource<T, KEY, ExtractKeyType<T, KEY>>;
|
||||
export function createDataSource<T>(
|
||||
initialSet?: T[],
|
||||
): DataSource<T, never, never>;
|
||||
export function createDataSource<T, KEY extends keyof T>(
|
||||
initialSet: T[] = [],
|
||||
options?: CreateDataSourceOptions<T, KEY>,
|
||||
): DataSource<T, any, any> {
|
||||
const ds = new DataSource<T, KEY>(options?.key);
|
||||
if (options?.limit !== undefined) {
|
||||
ds.limit = options.limit;
|
||||
}
|
||||
registerStorageAtom(options?.persist, ds);
|
||||
initialSet.forEach((value) => ds.append(value));
|
||||
return ds;
|
||||
}
|
||||
|
||||
function unwrap<T>(entry: Entry<T>): T {
|
||||
return entry?.value;
|
||||
}
|
||||
|
||||
class DataSourceView<T> {
|
||||
public readonly datasource: DataSource<T>;
|
||||
private sortBy: undefined | ((a: T) => Primitive) = undefined;
|
||||
private reverse: boolean = false;
|
||||
private filter?: (value: T) => boolean = undefined;
|
||||
|
||||
/**
|
||||
* @readonly
|
||||
*/
|
||||
public windowStart = 0;
|
||||
/**
|
||||
* @readonly
|
||||
*/
|
||||
public windowEnd = 0;
|
||||
|
||||
private outputChangeListener?: (change: OutputChange) => void;
|
||||
|
||||
/**
|
||||
* This is the base view data, that is filtered and sorted, but not reversed or windowed
|
||||
*/
|
||||
private _output: Entry<T>[] = [];
|
||||
|
||||
constructor(datasource: DataSource<T, any, any>) {
|
||||
this.datasource = datasource;
|
||||
}
|
||||
|
||||
public get size() {
|
||||
return this._output.length;
|
||||
}
|
||||
|
||||
public get isSorted() {
|
||||
return !!this.sortBy;
|
||||
}
|
||||
|
||||
public get isFiltered() {
|
||||
return !!this.filter;
|
||||
}
|
||||
|
||||
public get isReversed() {
|
||||
return this.reverse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a defensive copy of the current output.
|
||||
* Sort, filter, reverse and are applied.
|
||||
* Start and end behave like slice, and default to the currently active window.
|
||||
*/
|
||||
public output(start = this.windowStart, end = this.windowEnd): readonly T[] {
|
||||
if (this.reverse) {
|
||||
return this._output
|
||||
.slice(this._output.length - end, this._output.length - start)
|
||||
.reverse()
|
||||
.map((e) => e.value);
|
||||
} else {
|
||||
return this._output.slice(start, end).map((e) => e.value);
|
||||
}
|
||||
}
|
||||
|
||||
public setWindow(start: number, end: number) {
|
||||
this.windowStart = start;
|
||||
this.windowEnd = end;
|
||||
}
|
||||
|
||||
public setListener(
|
||||
listener: typeof DataSourceView['prototype']['outputChangeListener'],
|
||||
) {
|
||||
if (this.outputChangeListener && listener) {
|
||||
console.warn('outputChangeListener already set');
|
||||
}
|
||||
this.outputChangeListener = listener;
|
||||
}
|
||||
|
||||
public setSortBy(sortBy: undefined | keyof T | ((a: T) => Primitive)) {
|
||||
if (this.sortBy === sortBy) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
typeof sortBy === 'string' &&
|
||||
(!this.sortBy || (this.sortBy as any).sortByKey !== sortBy)
|
||||
) {
|
||||
sortBy = property(sortBy);
|
||||
Object.assign(sortBy, {
|
||||
sortByKey: sortBy,
|
||||
});
|
||||
}
|
||||
this.sortBy = sortBy as any;
|
||||
this.rebuild();
|
||||
}
|
||||
|
||||
public setFilter(filter: undefined | ((value: T) => boolean)) {
|
||||
if (this.filter !== filter) {
|
||||
this.filter = filter;
|
||||
this.rebuild();
|
||||
}
|
||||
}
|
||||
|
||||
public toggleReversed() {
|
||||
this.setReversed(!this.reverse);
|
||||
}
|
||||
|
||||
public setReversed(reverse: boolean) {
|
||||
if (this.reverse !== reverse) {
|
||||
this.reverse = reverse;
|
||||
this.notifyReset(this._output.length);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The reset operation resets any view preferences such as sorting and filtering, but keeps the current set of records.
|
||||
*/
|
||||
reset() {
|
||||
this.sortBy = undefined;
|
||||
this.reverse = false;
|
||||
this.filter = undefined;
|
||||
this.windowStart = 0;
|
||||
this.windowEnd = 0;
|
||||
this.rebuild();
|
||||
}
|
||||
|
||||
private normalizeIndex(viewIndex: number): number {
|
||||
return this.reverse ? this._output.length - 1 - viewIndex : viewIndex;
|
||||
}
|
||||
|
||||
public get(viewIndex: number): T {
|
||||
return this._output[this.normalizeIndex(viewIndex)]?.value;
|
||||
}
|
||||
|
||||
public [Symbol.iterator](): IterableIterator<T> {
|
||||
const self = this;
|
||||
let offset = this.windowStart;
|
||||
return {
|
||||
next() {
|
||||
offset++;
|
||||
if (offset > self.windowEnd || offset > self.size) {
|
||||
return {done: true, value: undefined};
|
||||
} else {
|
||||
return {
|
||||
value: self.get(offset - 1),
|
||||
};
|
||||
}
|
||||
},
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private notifyItemUpdated(viewIndex: number) {
|
||||
viewIndex = this.normalizeIndex(viewIndex);
|
||||
if (
|
||||
!this.outputChangeListener ||
|
||||
viewIndex < this.windowStart ||
|
||||
viewIndex >= this.windowEnd
|
||||
) {
|
||||
return;
|
||||
}
|
||||
this.outputChangeListener({
|
||||
type: 'update',
|
||||
index: viewIndex,
|
||||
});
|
||||
}
|
||||
|
||||
private notifyItemShift(index: number, delta: number) {
|
||||
if (!this.outputChangeListener) {
|
||||
return;
|
||||
}
|
||||
let viewIndex = this.normalizeIndex(index);
|
||||
if (this.reverse && delta < 0) {
|
||||
viewIndex -= delta; // we need to correct for normalize already using the new length after applying this change
|
||||
}
|
||||
// Idea: we could add an option to automatically shift the window for before events.
|
||||
this.outputChangeListener({
|
||||
type: 'shift',
|
||||
delta,
|
||||
index: viewIndex,
|
||||
newCount: this._output.length,
|
||||
location:
|
||||
viewIndex < this.windowStart
|
||||
? 'before'
|
||||
: viewIndex >= this.windowEnd
|
||||
? 'after'
|
||||
: 'in',
|
||||
});
|
||||
}
|
||||
|
||||
private notifyReset(count: number) {
|
||||
this.outputChangeListener?.({
|
||||
type: 'reset',
|
||||
newCount: count,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
processEvent(event: DataEvent<T>) {
|
||||
const {_output: output, sortBy, filter} = this;
|
||||
switch (event.type) {
|
||||
case 'append': {
|
||||
const {entry} = event;
|
||||
entry.visible = filter ? filter(entry.value) : true;
|
||||
if (!entry.visible) {
|
||||
// not in filter? skip this entry
|
||||
return;
|
||||
}
|
||||
if (!sortBy) {
|
||||
// no sorting? insert at the end, or beginning
|
||||
entry.approxIndex = output.length;
|
||||
output.push(entry);
|
||||
this.notifyItemShift(entry.approxIndex, 1);
|
||||
} else {
|
||||
this.insertSorted(entry);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'update': {
|
||||
const {entry} = event;
|
||||
entry.visible = filter ? filter(entry.value) : true;
|
||||
// short circuit; no view active so update straight away
|
||||
if (!filter && !sortBy) {
|
||||
output[event.index].approxIndex = event.index;
|
||||
this.notifyItemUpdated(event.index);
|
||||
} else if (!event.oldVisible) {
|
||||
if (!entry.visible) {
|
||||
// Done!
|
||||
} else {
|
||||
// insertion, not visible before
|
||||
this.insertSorted(entry);
|
||||
}
|
||||
} else {
|
||||
// Entry was visible previously
|
||||
const existingIndex = this.getSortedIndex(entry, event.oldValue);
|
||||
if (!entry.visible) {
|
||||
// Remove from output
|
||||
output.splice(existingIndex, 1);
|
||||
this.notifyItemShift(existingIndex, -1);
|
||||
} else {
|
||||
// Entry was and still is visible
|
||||
if (
|
||||
!this.sortBy ||
|
||||
this.sortBy(event.oldValue) === this.sortBy(entry.value)
|
||||
) {
|
||||
// Still at same position, so done!
|
||||
this.notifyItemUpdated(existingIndex);
|
||||
} else {
|
||||
// item needs to be moved cause of sorting
|
||||
// possible optimization: if we discover that old and new index would be the same,
|
||||
// despite different sort values, we could still emit only an update instead of two shifts
|
||||
output.splice(existingIndex, 1);
|
||||
this.notifyItemShift(existingIndex, -1);
|
||||
// find new sort index
|
||||
this.insertSorted(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'remove': {
|
||||
this.processRemoveEvent(event.index, event.entry);
|
||||
break;
|
||||
}
|
||||
case 'shift': {
|
||||
// no sorting? then all items are removed from the start so optimize for that
|
||||
if (!sortBy) {
|
||||
let amount = 0;
|
||||
if (!filter) {
|
||||
amount = event.amount;
|
||||
} else {
|
||||
// if there is a filter, count the visibles and shift those
|
||||
for (let i = 0; i < event.entries.length; i++)
|
||||
if (event.entries[i].visible) amount++;
|
||||
}
|
||||
output.splice(0, amount);
|
||||
this.notifyItemShift(0, -amount);
|
||||
} else {
|
||||
// we have sorting, so we need to remove item by item
|
||||
// we do this backward, so that approxIndex is more likely to be correct
|
||||
for (let i = event.entries.length - 1; i >= 0; i--) {
|
||||
this.processRemoveEvent(i, event.entries[i]);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error('unknown event type');
|
||||
}
|
||||
}
|
||||
|
||||
private processRemoveEvent(index: number, entry: Entry<T>) {
|
||||
const {_output: output, sortBy, filter} = this;
|
||||
|
||||
// filter active, and not visible? short circuilt
|
||||
if (!entry.visible) {
|
||||
return;
|
||||
}
|
||||
// no sorting, no filter?
|
||||
if (!sortBy && !filter) {
|
||||
output.splice(index, 1);
|
||||
this.notifyItemShift(index, -1);
|
||||
} else {
|
||||
// sorting or filter is active, find the actual location
|
||||
const existingIndex = this.getSortedIndex(entry, entry.value);
|
||||
output.splice(existingIndex, 1);
|
||||
this.notifyItemShift(existingIndex, -1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebuilds the entire view. Typically there should be no need to call this manually
|
||||
* @private
|
||||
*/
|
||||
rebuild() {
|
||||
// Pending on the size, should we batch this in smaller non-blocking steps,
|
||||
// which we update in a double-buffering mechanism, report progress, and swap out when done?
|
||||
//
|
||||
// MWE: 9-3-2020 postponed for now, one massive sort seems fine. It might shortly block,
|
||||
// but that happens only (exception: limit caused shifts) on user interaction at very roughly 1ms per 1000 records.
|
||||
// See also comment below
|
||||
const {sortBy, filter, sortHelper} = this;
|
||||
// copy base array or run filter (with side effecty update of visible)
|
||||
// @ts-ignore prevent making _record public
|
||||
const records: Entry<T>[] = this.datasource._records;
|
||||
let output = filter
|
||||
? records.filter((entry) => {
|
||||
entry.visible = filter(entry.value);
|
||||
return entry.visible;
|
||||
})
|
||||
: records.slice();
|
||||
if (sortBy) {
|
||||
// Pending on the size, should we batch this in smaller steps?
|
||||
// The following sorthing method can be taskified, however,
|
||||
// the implementation is 20x slower than a native sort. So for now we stick to a
|
||||
// blocking sort, until we have some more numbers that this is hanging for anyone
|
||||
// const filtered = output;
|
||||
// output = [];
|
||||
// filtered.forEach((entry) => {
|
||||
// const insertionIndex = sortedLastIndexBy(output, entry, sortHelper);
|
||||
// output.splice(insertionIndex, 0, entry);
|
||||
// });
|
||||
output = lodashSort(output, sortHelper); // uses array.sort under the hood
|
||||
}
|
||||
|
||||
this._output = output;
|
||||
this.notifyReset(output.length);
|
||||
}
|
||||
|
||||
private sortHelper = (a: Entry<T>) =>
|
||||
this.sortBy ? this.sortBy(a.value) : a.id;
|
||||
|
||||
private getSortedIndex(entry: Entry<T>, oldValue: T) {
|
||||
const {_output: output} = this;
|
||||
if (output[entry.approxIndex] === entry) {
|
||||
// yay!
|
||||
return entry.approxIndex;
|
||||
}
|
||||
let index = sortedIndexBy(
|
||||
output,
|
||||
{
|
||||
value: oldValue,
|
||||
id: -1,
|
||||
visible: true,
|
||||
approxIndex: -1,
|
||||
},
|
||||
this.sortHelper,
|
||||
);
|
||||
index--;
|
||||
// the item we are looking for is not necessarily the first one at the insertion index
|
||||
while (output[index] !== entry) {
|
||||
index++;
|
||||
if (index >= output.length) {
|
||||
throw new Error('illegal state: sortedIndex not found'); // sanity check to avoid browser freeze if people mess up with internals
|
||||
}
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
private insertSorted(entry: Entry<T>) {
|
||||
// apply sorting
|
||||
const insertionIndex = sortedLastIndexBy(
|
||||
this._output,
|
||||
entry,
|
||||
this.sortHelper,
|
||||
);
|
||||
entry.approxIndex = insertionIndex;
|
||||
this._output.splice(insertionIndex, 0, entry);
|
||||
this.notifyItemShift(insertionIndex, 1);
|
||||
}
|
||||
}
|
||||
@@ -1,849 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
|
||||
import {createDataSource, DataSource} from '../DataSource';
|
||||
|
||||
type Todo = {
|
||||
id: string;
|
||||
title: string;
|
||||
done?: boolean;
|
||||
};
|
||||
|
||||
const drinkCoffee: Todo = {
|
||||
id: 'coffee',
|
||||
title: 'drink coffee',
|
||||
};
|
||||
const eatCookie: Todo = {
|
||||
id: 'cookie',
|
||||
title: 'eat a cookie',
|
||||
done: true,
|
||||
};
|
||||
const submitBug: Todo = {
|
||||
id: 'bug',
|
||||
title: 'submit a bug',
|
||||
done: false,
|
||||
};
|
||||
|
||||
function unwrap<T>(array: readonly {value: T}[]): readonly T[] {
|
||||
return array.map((entry) => entry.value);
|
||||
}
|
||||
|
||||
function rawOutput<T>(ds: DataSource<T>): readonly T[] {
|
||||
// @ts-ignore
|
||||
const output = ds.view._output;
|
||||
return unwrap(output);
|
||||
}
|
||||
|
||||
test('can create a datasource', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie]);
|
||||
expect(ds.records()).toEqual([eatCookie]);
|
||||
|
||||
ds.append(drinkCoffee);
|
||||
expect(ds.records()).toEqual([eatCookie, drinkCoffee]);
|
||||
|
||||
// @ts-ignore
|
||||
expect(() => ds.getById('stuff')).toThrow(
|
||||
/Records cannot be looked up by key/,
|
||||
);
|
||||
|
||||
ds.update(1, submitBug);
|
||||
expect(ds.records()[1]).toBe(submitBug);
|
||||
|
||||
ds.delete(0);
|
||||
expect(ds.records()[0]).toBe(submitBug);
|
||||
});
|
||||
|
||||
test('can create a keyed datasource', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie], {key: 'id'});
|
||||
expect(ds.records()).toEqual([eatCookie]);
|
||||
|
||||
ds.append(drinkCoffee);
|
||||
expect(ds.records()).toEqual([eatCookie, drinkCoffee]);
|
||||
|
||||
expect(ds.getById('bug')).toBe(undefined);
|
||||
expect(ds.getById('cookie')).toBe(eatCookie);
|
||||
expect(ds.getById('coffee')).toBe(drinkCoffee);
|
||||
expect(ds.getIndexOfKey('bug')).toBe(-1);
|
||||
expect(ds.getIndexOfKey('cookie')).toBe(0);
|
||||
expect(ds.getIndexOfKey('coffee')).toBe(1);
|
||||
|
||||
ds.update(1, submitBug);
|
||||
expect(ds.records()[1]).toBe(submitBug);
|
||||
expect(ds.getById('coffee')).toBe(undefined);
|
||||
expect(ds.getById('bug')).toBe(submitBug);
|
||||
expect(ds.getIndexOfKey('bug')).toBe(1);
|
||||
expect(ds.getIndexOfKey('cookie')).toBe(0);
|
||||
expect(ds.getIndexOfKey('coffee')).toBe(-1);
|
||||
|
||||
// upsert existing
|
||||
const newBug = {
|
||||
id: 'bug',
|
||||
title: 'file a bug',
|
||||
done: true,
|
||||
};
|
||||
ds.upsert(newBug);
|
||||
expect(ds.records()[1]).toBe(newBug);
|
||||
expect(ds.getById('bug')).toBe(newBug);
|
||||
|
||||
// upsert new
|
||||
const trash = {
|
||||
id: 'trash',
|
||||
title: 'take trash out',
|
||||
};
|
||||
ds.upsert(trash);
|
||||
expect(ds.records()[2]).toBe(trash);
|
||||
expect(ds.getById('trash')).toBe(trash);
|
||||
|
||||
// delete by key
|
||||
expect(ds.records()).toEqual([eatCookie, newBug, trash]);
|
||||
expect(ds.deleteByKey('bug')).toBe(true);
|
||||
expect(ds.records()).toEqual([eatCookie, trash]);
|
||||
expect(ds.getIndexOfKey('bug')).toBe(-1);
|
||||
expect(ds.getIndexOfKey('cookie')).toBe(0);
|
||||
expect(ds.getIndexOfKey('trash')).toBe(1);
|
||||
});
|
||||
|
||||
test('throws on invalid keys', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie], {key: 'id'});
|
||||
expect(() => {
|
||||
ds.append({id: '', title: 'test'});
|
||||
}).toThrow(`Invalid key value: ''`);
|
||||
expect(() => {
|
||||
ds.append({id: 'cookie', title: 'test'});
|
||||
}).toThrow(`Duplicate key: 'cookie'`);
|
||||
});
|
||||
|
||||
test('throws on update causing duplicate key', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, submitBug], {key: 'id'});
|
||||
expect(() => {
|
||||
ds.update(0, {id: 'bug', title: 'oops'});
|
||||
}).toThrow(
|
||||
`Trying to insert duplicate key 'bug', which already exist in the collection`,
|
||||
);
|
||||
});
|
||||
|
||||
test('removing invalid keys', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie], {key: 'id'});
|
||||
expect(ds.deleteByKey('trash')).toBe(false);
|
||||
expect(() => {
|
||||
ds.delete(1);
|
||||
}).toThrowError('Out of bounds');
|
||||
});
|
||||
|
||||
test('sorting works', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, drinkCoffee]);
|
||||
ds.view.setSortBy((todo) => todo.title);
|
||||
expect(rawOutput(ds)).toEqual([drinkCoffee, eatCookie]);
|
||||
|
||||
ds.view.setSortBy(undefined);
|
||||
ds.view.setSortBy(undefined);
|
||||
expect(rawOutput(ds)).toEqual([eatCookie, drinkCoffee]);
|
||||
ds.view.setSortBy((todo) => todo.title);
|
||||
expect(rawOutput(ds)).toEqual([drinkCoffee, eatCookie]);
|
||||
|
||||
const aleph = {
|
||||
id: 'd',
|
||||
title: 'aleph',
|
||||
};
|
||||
ds.append(aleph);
|
||||
expect(ds.records()).toEqual([eatCookie, drinkCoffee, aleph]);
|
||||
expect(rawOutput(ds)).toEqual([aleph, drinkCoffee, eatCookie]);
|
||||
});
|
||||
|
||||
test('sorting preserves insertion order with equal keys', () => {
|
||||
type N = {
|
||||
$: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
const a = {$: 'a', name: 'a'};
|
||||
const b1 = {$: 'b', name: 'b1'};
|
||||
const b2 = {$: 'b', name: 'b2'};
|
||||
const b3 = {$: 'b', name: 'b3'};
|
||||
const c = {$: 'c', name: 'c'};
|
||||
|
||||
const ds = createDataSource<N>([]);
|
||||
ds.view.setSortBy('$');
|
||||
ds.append(b1);
|
||||
ds.append(c);
|
||||
ds.append(b2);
|
||||
ds.append(a);
|
||||
ds.append(b3);
|
||||
|
||||
expect(ds.records()).toEqual([b1, c, b2, a, b3]);
|
||||
expect(rawOutput(ds)).toEqual([a, b1, b2, b3, c]);
|
||||
|
||||
// if we append a new item with existig item, it should end up in the end
|
||||
const b4 = {
|
||||
$: 'b',
|
||||
name: 'b4',
|
||||
};
|
||||
ds.append(b4);
|
||||
expect(ds.records()).toEqual([b1, c, b2, a, b3, b4]);
|
||||
expect(rawOutput(ds)).toEqual([a, b1, b2, b3, b4, c]);
|
||||
|
||||
// if we replace the middle item, it should end up in the middle
|
||||
const b2r = {
|
||||
$: 'b',
|
||||
name: 'b2replacement',
|
||||
};
|
||||
ds.update(2, b2r);
|
||||
expect(ds.records()).toEqual([b1, c, b2r, a, b3, b4]);
|
||||
expect(rawOutput(ds)).toEqual([a, b1, b2r, b3, b4, c]);
|
||||
|
||||
// if we replace something with a different sort value, it should be sorted properly, and the old should disappear
|
||||
const b3r = {
|
||||
$: 'aa',
|
||||
name: 'b3replacement',
|
||||
};
|
||||
ds.update(4, b3r);
|
||||
expect(ds.records()).toEqual([b1, c, b2r, a, b3r, b4]);
|
||||
expect(rawOutput(ds)).toEqual([a, b3r, b1, b2r, b4, c]);
|
||||
|
||||
ds.delete(3);
|
||||
expect(ds.records()).toEqual([b1, c, b2r, b3r, b4]);
|
||||
expect(rawOutput(ds)).toEqual([b3r, b1, b2r, b4, c]);
|
||||
});
|
||||
|
||||
test('filter + sort', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, drinkCoffee, submitBug]);
|
||||
|
||||
ds.view.setFilter((t) => t.title.indexOf('c') === -1);
|
||||
ds.view.setSortBy('title');
|
||||
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
|
||||
// append with and without filter
|
||||
const a = {id: 'a', title: 'does have that letter: c'};
|
||||
const b = {id: 'b', title: 'doesnt have that letter'};
|
||||
ds.append(a);
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
ds.append(b);
|
||||
expect(rawOutput(ds)).toEqual([b, submitBug]);
|
||||
|
||||
// filter in
|
||||
const newCookie = {
|
||||
id: 'cookie',
|
||||
title: 'eat a ookie',
|
||||
};
|
||||
ds.update(0, newCookie);
|
||||
expect(rawOutput(ds)).toEqual([b, newCookie, submitBug]);
|
||||
|
||||
// update -> filter in
|
||||
const newCoffee = {
|
||||
id: 'coffee',
|
||||
title: 'better drink tea',
|
||||
};
|
||||
ds.append(newCoffee);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie, submitBug]);
|
||||
|
||||
// update -> filter out
|
||||
ds.update(2, {id: 'bug', title: 'bug has c!'});
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie]);
|
||||
|
||||
ds.update(2, submitBug);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie, submitBug]);
|
||||
|
||||
ds.delete(3); // a
|
||||
ds.delete(3); // b
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, newCookie, submitBug]);
|
||||
|
||||
ds.view.setFilter(undefined);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, drinkCoffee, newCookie, submitBug]);
|
||||
|
||||
ds.view.setSortBy(undefined);
|
||||
// key insertion order
|
||||
expect(rawOutput(ds)).toEqual([newCookie, drinkCoffee, submitBug, newCoffee]);
|
||||
});
|
||||
|
||||
test('filter + sort + index', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, drinkCoffee, submitBug], {
|
||||
key: 'id',
|
||||
});
|
||||
|
||||
ds.view.setFilter((t) => t.title.indexOf('c') === -1);
|
||||
ds.view.setSortBy('title');
|
||||
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
|
||||
// append with and without filter
|
||||
const a = {id: 'a', title: 'does have that letter: c'};
|
||||
const b = {id: 'b', title: 'doesnt have that letter'};
|
||||
ds.append(a);
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
ds.append(b);
|
||||
expect(rawOutput(ds)).toEqual([b, submitBug]);
|
||||
|
||||
// filter in
|
||||
const newCookie = {
|
||||
id: 'cookie',
|
||||
title: 'eat a ookie',
|
||||
};
|
||||
ds.update(0, newCookie);
|
||||
expect(rawOutput(ds)).toEqual([b, newCookie, submitBug]);
|
||||
|
||||
// update -> filter in
|
||||
const newCoffee = {
|
||||
id: 'coffee',
|
||||
title: 'better drink tea',
|
||||
};
|
||||
ds.upsert(newCoffee);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie, submitBug]);
|
||||
|
||||
// update -> filter out
|
||||
ds.update(2, {id: 'bug', title: 'bug has c!'});
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie]);
|
||||
|
||||
ds.update(2, submitBug);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, b, newCookie, submitBug]);
|
||||
|
||||
ds.view.setFilter(undefined);
|
||||
expect(rawOutput(ds)).toEqual([newCoffee, a, b, newCookie, submitBug]);
|
||||
|
||||
ds.view.setSortBy(undefined);
|
||||
// key insertion order
|
||||
expect(rawOutput(ds)).toEqual([newCookie, newCoffee, submitBug, a, b]);
|
||||
// verify getOutput
|
||||
expect(rawOutput(ds).slice(1, 3)).toEqual([newCoffee, submitBug]);
|
||||
expect(ds.view.output(1, 3)).toEqual([newCoffee, submitBug]);
|
||||
});
|
||||
|
||||
test('filter', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, drinkCoffee, submitBug], {
|
||||
key: 'id',
|
||||
});
|
||||
|
||||
ds.view.setFilter((t) => t.title.indexOf('c') === -1);
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
|
||||
// append with and without filter
|
||||
const a = {id: 'a', title: 'does have that letter: c'};
|
||||
const b = {id: 'b', title: 'doesnt have that letter'};
|
||||
ds.append(a);
|
||||
expect(rawOutput(ds)).toEqual([submitBug]);
|
||||
ds.append(b);
|
||||
expect(rawOutput(ds)).toEqual([submitBug, b]);
|
||||
|
||||
// filter in
|
||||
const newCookie = {
|
||||
id: 'cookie',
|
||||
title: 'eat a ookie',
|
||||
};
|
||||
ds.update(0, newCookie);
|
||||
expect(rawOutput(ds)).toEqual([newCookie, submitBug, b]);
|
||||
|
||||
// update -> filter in
|
||||
const newCoffee = {
|
||||
id: 'coffee',
|
||||
title: 'better drink tea',
|
||||
};
|
||||
ds.upsert(newCoffee);
|
||||
expect(rawOutput(ds)).toEqual([newCookie, newCoffee, submitBug, b]);
|
||||
|
||||
// update -> filter out
|
||||
ds.update(2, {id: 'bug', title: 'bug has c!'});
|
||||
expect(rawOutput(ds)).toEqual([newCookie, newCoffee, b]);
|
||||
|
||||
ds.update(2, submitBug);
|
||||
|
||||
ds.view.setFilter(undefined);
|
||||
expect(rawOutput(ds)).toEqual([newCookie, newCoffee, submitBug, a, b]);
|
||||
});
|
||||
|
||||
test('reverse without sorting', () => {
|
||||
const ds = createDataSource<Todo>([eatCookie, drinkCoffee]);
|
||||
ds.view.setWindow(0, 100);
|
||||
expect(ds.view.output()).toEqual([eatCookie, drinkCoffee]);
|
||||
|
||||
ds.view.toggleReversed();
|
||||
expect(ds.view.output(1, 2)).toEqual([eatCookie]);
|
||||
expect(ds.view.output(0, 1)).toEqual([drinkCoffee]);
|
||||
expect(ds.view.output(0, 2)).toEqual([drinkCoffee, eatCookie]);
|
||||
|
||||
expect(ds.view.output()).toEqual([drinkCoffee, eatCookie]);
|
||||
|
||||
ds.append(submitBug);
|
||||
expect(ds.records()).toEqual([eatCookie, drinkCoffee, submitBug]);
|
||||
expect(ds.view.output()).toEqual([submitBug, drinkCoffee, eatCookie]);
|
||||
|
||||
const x = {id: 'x', title: 'x'};
|
||||
ds.update(0, x);
|
||||
expect(ds.records()).toEqual([x, drinkCoffee, submitBug]);
|
||||
expect(ds.view.output()).toEqual([submitBug, drinkCoffee, x]);
|
||||
const y = {id: 'y', title: 'y'};
|
||||
const z = {id: 'z', title: 'z'};
|
||||
ds.update(2, z);
|
||||
ds.update(1, y);
|
||||
|
||||
expect(ds.records()).toEqual([x, y, z]);
|
||||
expect(ds.view.output()).toEqual([z, y, x]);
|
||||
|
||||
ds.view.setReversed(false);
|
||||
expect(ds.view.output()).toEqual([x, y, z]);
|
||||
});
|
||||
|
||||
test('reverse with sorting', () => {
|
||||
type N = {
|
||||
$: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
const a = {$: 'a', name: 'a'};
|
||||
const b1 = {$: 'b', name: 'b1'};
|
||||
const b2 = {$: 'b', name: 'b2'};
|
||||
const b3 = {$: 'b', name: 'b3'};
|
||||
const c = {$: 'c', name: 'c'};
|
||||
|
||||
const ds = createDataSource<N>([]);
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.view.setReversed(true);
|
||||
ds.append(b1);
|
||||
ds.append(c);
|
||||
expect(ds.view.output()).toEqual([c, b1]);
|
||||
|
||||
ds.view.setSortBy('$');
|
||||
expect(ds.view.output()).toEqual([c, b1]);
|
||||
|
||||
ds.append(b2);
|
||||
expect(ds.view.output()).toEqual([c, b2, b1]);
|
||||
|
||||
ds.append(a);
|
||||
expect(ds.view.output()).toEqual([c, b2, b1, a]);
|
||||
|
||||
ds.append(b3);
|
||||
expect(ds.view.output()).toEqual([c, b3, b2, b1, a]);
|
||||
|
||||
// if we append a new item with existig item, it should end up in the end
|
||||
const b4 = {
|
||||
$: 'b',
|
||||
name: 'b4',
|
||||
};
|
||||
ds.append(b4);
|
||||
expect(ds.view.output()).toEqual([c, b4, b3, b2, b1, a]);
|
||||
|
||||
// if we replace the middle item, it should end up in the middle
|
||||
const b2r = {
|
||||
$: 'b',
|
||||
name: 'b2replacement',
|
||||
};
|
||||
ds.update(2, b2r);
|
||||
expect(ds.view.output()).toEqual([c, b4, b3, b2r, b1, a]);
|
||||
|
||||
// if we replace something with a different sort value, it should be sorted properly, and the old should disappear
|
||||
const b3r = {
|
||||
$: 'aa',
|
||||
name: 'b3replacement',
|
||||
};
|
||||
ds.update(4, b3r);
|
||||
expect(ds.view.output()).toEqual([c, b4, b2r, b1, b3r, a]);
|
||||
|
||||
ds.delete(4);
|
||||
expect(ds.view.output()).toEqual([c, b4, b2r, b1, a]);
|
||||
});
|
||||
|
||||
test('reset', () => {
|
||||
const ds = createDataSource<Todo>([submitBug, drinkCoffee, eatCookie], {
|
||||
key: 'id',
|
||||
});
|
||||
ds.view.setSortBy('title');
|
||||
ds.view.setFilter((v) => v.id !== 'cookie');
|
||||
expect(rawOutput(ds)).toEqual([drinkCoffee, submitBug]);
|
||||
expect([...ds.keys()]).toEqual(['bug', 'coffee', 'cookie']);
|
||||
|
||||
ds.view.reset();
|
||||
expect(rawOutput(ds)).toEqual([submitBug, drinkCoffee, eatCookie]);
|
||||
expect([...ds.keys()]).toEqual(['bug', 'coffee', 'cookie']);
|
||||
});
|
||||
|
||||
test('clear', () => {
|
||||
const ds = createDataSource<Todo>([submitBug, drinkCoffee, eatCookie], {
|
||||
key: 'id',
|
||||
});
|
||||
ds.view.setSortBy('title');
|
||||
ds.view.setFilter((v) => v.id !== 'cookie');
|
||||
expect(rawOutput(ds)).toEqual([drinkCoffee, submitBug]);
|
||||
expect([...ds.keys()]).toEqual(['bug', 'coffee', 'cookie']);
|
||||
|
||||
ds.clear();
|
||||
expect(rawOutput(ds)).toEqual([]);
|
||||
expect([...ds.keys()]).toEqual([]);
|
||||
|
||||
ds.append(eatCookie);
|
||||
ds.append(drinkCoffee);
|
||||
ds.append(submitBug);
|
||||
expect([...ds.keys()]).toEqual(['cookie', 'coffee', 'bug']);
|
||||
// resets in the same ordering as view preferences were preserved
|
||||
expect(rawOutput(ds)).toEqual([drinkCoffee, submitBug]);
|
||||
});
|
||||
|
||||
function testEvents<T>(
|
||||
initial: T[],
|
||||
op: (ds: DataSource<T, any, any>) => void,
|
||||
key?: keyof T,
|
||||
): any[] {
|
||||
const ds = createDataSource<T>(initial, {key});
|
||||
const events: any[] = [];
|
||||
ds.view.setListener((e) => events.push(e));
|
||||
op(ds);
|
||||
ds.view.setListener(undefined);
|
||||
return events;
|
||||
}
|
||||
|
||||
test('it emits the right events - zero window', () => {
|
||||
expect(
|
||||
testEvents(['a', 'b'], (ds) => {
|
||||
ds.append('c');
|
||||
ds.update(1, 'x');
|
||||
}),
|
||||
).toEqual([
|
||||
{
|
||||
delta: 1,
|
||||
index: 2,
|
||||
location: 'after',
|
||||
newCount: 3,
|
||||
type: 'shift',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('it emits the right events - small window', () => {
|
||||
expect(
|
||||
testEvents(['a', 'b'], (ds) => {
|
||||
ds.view.setWindow(0, 3);
|
||||
ds.append('c');
|
||||
ds.update(1, 'x');
|
||||
}),
|
||||
).toEqual([
|
||||
{delta: 1, location: 'in', newCount: 3, type: 'shift', index: 2},
|
||||
{index: 1, type: 'update'},
|
||||
]);
|
||||
});
|
||||
|
||||
test('it emits the right events - view change', () => {
|
||||
expect(
|
||||
testEvents(['a', 'b'], (ds) => {
|
||||
ds.view.setWindow(1, 2);
|
||||
ds.view.setSortBy((x) => x);
|
||||
// a, [b]
|
||||
ds.update(0, 'x');
|
||||
// b, [x]
|
||||
expect(ds.view.get(0)).toEqual('b');
|
||||
expect(ds.view.get(1)).toEqual('x');
|
||||
ds.append('y');
|
||||
// b, [x], y
|
||||
ds.append('c');
|
||||
// b, [c], x, y
|
||||
}),
|
||||
).toEqual([
|
||||
{newCount: 2, type: 'reset'},
|
||||
{index: 0, delta: -1, location: 'before', newCount: 1, type: 'shift'}, // remove a
|
||||
{index: 1, delta: 1, location: 'in', newCount: 2, type: 'shift'}, // pre-insert x
|
||||
{index: 2, delta: 1, location: 'after', newCount: 3, type: 'shift'}, // y happened after
|
||||
{index: 1, delta: 1, location: 'in', newCount: 4, type: 'shift'}, // c becomes the 'in' new indow
|
||||
]);
|
||||
});
|
||||
|
||||
test('it emits the right events - reversed view change', () => {
|
||||
expect(
|
||||
testEvents(['a', 'b'], (ds) => {
|
||||
ds.view.setWindow(1, 2);
|
||||
ds.view.setSortBy((x) => x);
|
||||
ds.view.setReversed(true);
|
||||
// b, [a]
|
||||
ds.update(0, 'x');
|
||||
// x, [b]
|
||||
expect(ds.view.get(0)).toEqual('x');
|
||||
expect(ds.view.get(1)).toEqual('b');
|
||||
ds.append('y');
|
||||
// y, [x], b
|
||||
ds.append('c');
|
||||
// y, [x], c, b
|
||||
ds.append('a');
|
||||
// y, [x], c, b, a
|
||||
}),
|
||||
).toEqual([
|
||||
{newCount: 2, type: 'reset'},
|
||||
{newCount: 2, type: 'reset'}, // FIXME: ideally dedupe these, but due to scheduling will do little harm
|
||||
{index: 1, delta: -1, location: 'in', newCount: 1, type: 'shift'}, // remove a
|
||||
{index: 0, delta: 1, location: 'before', newCount: 2, type: 'shift'}, // pre-insert x
|
||||
{index: 0, delta: 1, location: 'before', newCount: 3, type: 'shift'},
|
||||
{index: 2, delta: 1, location: 'after', newCount: 4, type: 'shift'},
|
||||
{index: 4, delta: 1, location: 'after', newCount: 5, type: 'shift'},
|
||||
]);
|
||||
});
|
||||
|
||||
test('it emits the right events - reversed view change with filter', () => {
|
||||
expect(
|
||||
testEvents(['a', 'b'], (ds) => {
|
||||
ds.view.setWindow(0, 2);
|
||||
ds.view.setSortBy((x) => x);
|
||||
ds.view.setReversed(true);
|
||||
ds.view.setFilter((x) => ['a', 'b'].includes(x));
|
||||
// [b, a]
|
||||
ds.update(0, 'x'); // x b
|
||||
// [b, ]
|
||||
expect(ds.view.get(0)).toEqual('b');
|
||||
expect(rawOutput(ds).length).toBe(1);
|
||||
ds.append('y'); // x b y
|
||||
// [b, ]
|
||||
ds.append('c'); // x b y c
|
||||
// [b, ]
|
||||
ds.append('a'); // x b y c a
|
||||
// [b, a]
|
||||
ds.append('a'); // x b y c a a
|
||||
// [b, a, a] // N.b. the new a is in the *middle*
|
||||
ds.delete(2); // x b c a a
|
||||
// no effect
|
||||
ds.delete(4); // this removes the second a in input, so the first a in the outpat!
|
||||
// [b, a]
|
||||
}),
|
||||
).toEqual([
|
||||
{newCount: 2, type: 'reset'},
|
||||
{newCount: 2, type: 'reset'}, // FIXME: ideally dedupe these, but due to scheduling will do little harm
|
||||
{newCount: 2, type: 'reset'}, // FIXME: ideally dedupe these, but due to scheduling will do little harm
|
||||
{index: 1, delta: -1, location: 'in', newCount: 1, type: 'shift'}, // remove a
|
||||
{index: 1, delta: 1, location: 'in', newCount: 2, type: 'shift'},
|
||||
{index: 1, delta: 1, location: 'in', newCount: 3, type: 'shift'},
|
||||
{index: 1, delta: -1, location: 'in', newCount: 2, type: 'shift'},
|
||||
]);
|
||||
});
|
||||
|
||||
test('basic remove', () => {
|
||||
const completedBug = {id: 'bug', title: 'fixed bug', done: true};
|
||||
expect(
|
||||
testEvents(
|
||||
[drinkCoffee, eatCookie, submitBug],
|
||||
(ds) => {
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.delete(0);
|
||||
expect(ds.view.output()).toEqual([eatCookie, submitBug]);
|
||||
expect(ds.getById('bug')).toBe(submitBug);
|
||||
expect(ds.getById('coffee')).toBeUndefined();
|
||||
expect(ds.getById('cookie')).toBe(eatCookie);
|
||||
ds.upsert(completedBug);
|
||||
ds.deleteByKey('cookie');
|
||||
expect(ds.view.output()).toEqual([completedBug]);
|
||||
expect(ds.getById('bug')).toBe(completedBug);
|
||||
},
|
||||
'id',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
type: 'shift',
|
||||
newCount: 2,
|
||||
location: 'in',
|
||||
index: 0,
|
||||
delta: -1,
|
||||
},
|
||||
{
|
||||
type: 'update',
|
||||
index: 1,
|
||||
},
|
||||
{
|
||||
type: 'shift',
|
||||
index: 0,
|
||||
location: 'in',
|
||||
newCount: 1,
|
||||
delta: -1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('basic shift', () => {
|
||||
const completedBug = {id: 'bug', title: 'fixed bug', done: true};
|
||||
expect(
|
||||
testEvents(
|
||||
[drinkCoffee, eatCookie, submitBug],
|
||||
(ds) => {
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.shift(2);
|
||||
expect(ds.view.output()).toEqual([submitBug]);
|
||||
expect(ds.getById('bug')).toBe(submitBug);
|
||||
expect(ds.getById('coffee')).toBeUndefined();
|
||||
expect(ds.getIndexOfKey('bug')).toBe(0);
|
||||
expect(ds.getIndexOfKey('coffee')).toBe(-1);
|
||||
ds.upsert(completedBug);
|
||||
expect(ds.view.output()).toEqual([completedBug]);
|
||||
expect(ds.getById('bug')).toBe(completedBug);
|
||||
},
|
||||
'id',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
type: 'shift',
|
||||
newCount: 1,
|
||||
location: 'in',
|
||||
index: 0,
|
||||
delta: -2,
|
||||
},
|
||||
{
|
||||
type: 'update',
|
||||
index: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('sorted shift', () => {
|
||||
expect(
|
||||
testEvents(['c', 'b', 'a', 'e', 'd'], (ds) => {
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.view.setSortBy((v) => v);
|
||||
expect(ds.view.output()).toEqual(['a', 'b', 'c', 'd', 'e']);
|
||||
ds.shift(4);
|
||||
expect(ds.view.output()).toEqual(['d']);
|
||||
ds.shift(1); // optimizes to reset
|
||||
}),
|
||||
).toEqual([
|
||||
{newCount: 5, type: 'reset'}, // sort
|
||||
{delta: -1, index: 4, location: 'in', newCount: 4, type: 'shift'}, // e
|
||||
{delta: -1, index: 0, location: 'in', newCount: 3, type: 'shift'}, // a
|
||||
{delta: -1, index: 0, location: 'in', newCount: 2, type: 'shift'}, // b
|
||||
{delta: -1, index: 0, location: 'in', newCount: 1, type: 'shift'}, // c
|
||||
{newCount: 0, type: 'reset'}, // shift that clears
|
||||
]);
|
||||
});
|
||||
|
||||
test('filtered shift', () => {
|
||||
expect(
|
||||
testEvents(['c', 'b', 'a', 'e', 'd'], (ds) => {
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.view.setFilter((v) => v !== 'b' && v !== 'e');
|
||||
expect(ds.view.output()).toEqual(['c', 'a', 'd']);
|
||||
ds.shift(4);
|
||||
expect(ds.view.output()).toEqual(['d']);
|
||||
}),
|
||||
).toEqual([
|
||||
{newCount: 3, type: 'reset'}, // filter
|
||||
{type: 'shift', location: 'in', newCount: 1, index: 0, delta: -2}, // optimized shift
|
||||
]);
|
||||
});
|
||||
|
||||
test('remove after shift works correctly', () => {
|
||||
const a: Todo = {id: 'a', title: 'a', done: false};
|
||||
const b: Todo = {id: 'b', title: 'b', done: false};
|
||||
|
||||
expect(
|
||||
testEvents(
|
||||
[eatCookie, drinkCoffee, submitBug, a, b],
|
||||
(ds) => {
|
||||
ds.view.setWindow(0, 100);
|
||||
ds.shift(2);
|
||||
ds.deleteByKey('b');
|
||||
ds.deleteByKey('bug');
|
||||
expect(ds.view.output()).toEqual([a]);
|
||||
expect(ds.getIndexOfKey('cookie')).toBe(-1);
|
||||
expect(ds.getIndexOfKey('coffee')).toBe(-1);
|
||||
expect(ds.getIndexOfKey('bug')).toBe(-1);
|
||||
expect(ds.getIndexOfKey('a')).toBe(0);
|
||||
expect(ds.getIndexOfKey('b')).toBe(-1);
|
||||
},
|
||||
'id',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
type: 'shift',
|
||||
newCount: 3,
|
||||
location: 'in',
|
||||
index: 0,
|
||||
delta: -2,
|
||||
},
|
||||
{
|
||||
type: 'shift',
|
||||
newCount: 2,
|
||||
location: 'in',
|
||||
index: 2,
|
||||
delta: -1,
|
||||
},
|
||||
{
|
||||
type: 'shift',
|
||||
newCount: 1,
|
||||
location: 'in',
|
||||
index: 0,
|
||||
delta: -1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('respects limit', () => {
|
||||
const grab = (): [length: number, first: number, last: number] => {
|
||||
const output = ds.view.output();
|
||||
return [output.length, output[0], output[output.length - 1]];
|
||||
};
|
||||
|
||||
const ds = createDataSource(
|
||||
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18],
|
||||
{limit: 20},
|
||||
);
|
||||
ds.view.setWindow(0, 100);
|
||||
|
||||
ds.append(19);
|
||||
ds.append(20);
|
||||
expect(grab()).toEqual([20, 1, 20]);
|
||||
|
||||
ds.append(21);
|
||||
expect(grab()).toEqual([19, 3, 21]);
|
||||
ds.append(22);
|
||||
expect(grab()).toEqual([20, 3, 22]);
|
||||
|
||||
ds.delete(0);
|
||||
expect(grab()).toEqual([19, 4, 22]);
|
||||
|
||||
ds.append(23);
|
||||
expect(grab()).toEqual([20, 4, 23]);
|
||||
ds.append(24);
|
||||
expect(grab()).toEqual([19, 6, 24]);
|
||||
});
|
||||
|
||||
test('DataSource can iterate', () => {
|
||||
const ds = createDataSource([eatCookie, drinkCoffee], {key: 'id'});
|
||||
|
||||
expect([...ds]).toEqual([eatCookie, drinkCoffee]);
|
||||
expect(Array.from(ds.keys())).toEqual(['cookie', 'coffee']);
|
||||
expect(Array.from(ds.entries())).toEqual([
|
||||
['cookie', eatCookie],
|
||||
['coffee', drinkCoffee],
|
||||
]);
|
||||
|
||||
const seen: Todo[] = [];
|
||||
for (const todo of ds) {
|
||||
seen.push(todo);
|
||||
}
|
||||
expect(seen).toEqual([eatCookie, drinkCoffee]);
|
||||
|
||||
ds.append(submitBug);
|
||||
expect([...ds]).toEqual([eatCookie, drinkCoffee, submitBug]);
|
||||
|
||||
ds.clear();
|
||||
expect([...ds]).toEqual([]);
|
||||
|
||||
ds.append(submitBug);
|
||||
expect([...ds]).toEqual([submitBug]);
|
||||
});
|
||||
|
||||
test('DataSource.view can iterate', () => {
|
||||
const ds = createDataSource([eatCookie, drinkCoffee, submitBug, eatCookie]);
|
||||
ds.view.setSortBy('id');
|
||||
// bug coffee cookie cookie
|
||||
ds.view.toggleReversed();
|
||||
// cookie cookie coffee bug
|
||||
ds.view.setWindow(1, 3);
|
||||
// cookie coffee
|
||||
|
||||
expect(ds.view.output()).toEqual([eatCookie, drinkCoffee]);
|
||||
expect([...ds.view]).toEqual([eatCookie, drinkCoffee]);
|
||||
|
||||
ds.view.reset();
|
||||
// default window is empty!
|
||||
expect([...ds.view]).toEqual([]);
|
||||
ds.view.setWindow(0, 100);
|
||||
expect([...ds.view]).toEqual([eatCookie, drinkCoffee, submitBug, eatCookie]);
|
||||
|
||||
ds.clear();
|
||||
expect([...ds.view]).toEqual([]);
|
||||
});
|
||||
@@ -1,213 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
|
||||
import {createDataSource, DataSource} from '../DataSource';
|
||||
|
||||
type Todo = {
|
||||
id: string;
|
||||
title: string;
|
||||
done: boolean;
|
||||
};
|
||||
|
||||
function generateTodos(amount: number): Todo[] {
|
||||
const res = new Array<Todo>(amount);
|
||||
for (let i = 0; i < amount; i++) {
|
||||
res[i] = {
|
||||
id: 'todo_' + i,
|
||||
title:
|
||||
'' +
|
||||
((i % 20) * 1000000 + (amount - i)) +
|
||||
GKChesterton.replace(/Chesterton/g, '' + i),
|
||||
done: i % 3 === 0,
|
||||
};
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
const defaultFilter = (t: Todo) => !t.done;
|
||||
|
||||
type DataSourceish = DataSource<Todo> | FakeDataSource<Todo>;
|
||||
|
||||
// NOTE: this run in jest, which is not optimal for perf, but should give some idea
|
||||
// make sure to use the `yarn watch` script in desktop root, so that the garbage collector is exposed
|
||||
|
||||
// By default skipped to not slow down each and every test run
|
||||
test.skip('run perf test', () => {
|
||||
if (!global.gc) {
|
||||
console.warn(
|
||||
'Warning: garbage collector not available, skipping this test. Make sure to start the test suite using `yarn watch`',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const measurements: any = {};
|
||||
|
||||
const smallSize = 50000;
|
||||
const largeSize = 100000;
|
||||
const smallset = generateTodos(smallSize);
|
||||
const largeset = generateTodos(largeSize);
|
||||
|
||||
const opts = {limit: largeSize * 2};
|
||||
const datasources = {
|
||||
unkeyed: createDataSource(smallset, opts),
|
||||
unkeyed_large: createDataSource(largeset, opts),
|
||||
keyed: createDataSource(smallset, {key: 'id', ...opts}),
|
||||
keyed_large: createDataSource(largeset, {key: 'id', ...opts}),
|
||||
unkeyed_sorted: createDataSource(smallset, opts),
|
||||
unkeyed_sorted_large: createDataSource(largeset, opts),
|
||||
keyed_sorted: createDataSource(smallset, {key: 'id', ...opts}),
|
||||
keyed_sorted_large: createDataSource(largeset, {
|
||||
key: 'id',
|
||||
...opts,
|
||||
}),
|
||||
fake_small: new FakeDataSource(smallset),
|
||||
fake_large: new FakeDataSource(largeset),
|
||||
fake_small_sorted: new FakeDataSource(smallset),
|
||||
fake_large_sorted: new FakeDataSource(largeset),
|
||||
};
|
||||
|
||||
Object.entries(datasources).forEach(([name, ds]) => {
|
||||
ds.view.setWindow(0, 1000000);
|
||||
if (name.includes('sorted')) {
|
||||
ds.view.setFilter(defaultFilter);
|
||||
ds.view.setSortBy('title');
|
||||
}
|
||||
});
|
||||
|
||||
function measure(title: string, task: (ds: DataSourceish) => void) {
|
||||
measurements[title] = {};
|
||||
Object.entries(datasources).forEach(([name, ds]) => {
|
||||
global.gc?.();
|
||||
const start = Date.now();
|
||||
task(ds as any);
|
||||
if (ds instanceof FakeDataSource) {
|
||||
// to 'render' we need to know the end result (this mimics a lazy evaluation of filter / sort)
|
||||
// note that this skews the test a bit in favor of fake data source,
|
||||
// as DataSource would *always* keep things sorted/ filtered, but doing that would explode the test for append / update :)
|
||||
ds.view.buildOutput();
|
||||
}
|
||||
// global.gc?.(); // to cleanup our createdmess as part of the measurement
|
||||
const duration = Date.now() - start;
|
||||
measurements[title][name] = duration;
|
||||
});
|
||||
}
|
||||
|
||||
measure('append', (ds) => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
ds.append({
|
||||
id: 'test_' + i,
|
||||
title: i + 'read some more chesterton!',
|
||||
done: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
measure('update', (ds) => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
ds.update(i, {
|
||||
id: 'test_update_' + i,
|
||||
title: i + 'read some more chesterton!',
|
||||
done: true,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
measure('remove', (ds) => {
|
||||
ds.delete(99);
|
||||
});
|
||||
|
||||
measure('shift', (ds) => {
|
||||
ds.shift(0.1 * smallSize);
|
||||
});
|
||||
|
||||
measure('change sorting', (ds) => {
|
||||
ds.view.setSortBy('id');
|
||||
});
|
||||
|
||||
measure('change filter', (ds) => {
|
||||
ds.view.setFilter((t) => t.title.includes('23')); // 23 does not occur in original text
|
||||
});
|
||||
|
||||
const sum: any = {};
|
||||
Object.entries(measurements).forEach(([_test, entries]: any) => {
|
||||
Object.entries(entries).forEach(([ds, duration]) => {
|
||||
if (!sum[ds]) sum[ds] = 0;
|
||||
sum[ds] += duration;
|
||||
});
|
||||
});
|
||||
measurements.sum = sum;
|
||||
console.table(measurements);
|
||||
});
|
||||
|
||||
const GKChesterton = `Gilbert Keith Chesterton KC*SG (29 May 1874 – 14 June 1936) was an English writer,[2] philosopher, lay theologian, and literary and art critic. He has been referred to as the "prince of paradox".[3] Time magazine observed of his writing style: "Whenever possible Chesterton made his points with popular sayings, proverbs, allegories—first carefully turning them inside out."[4]
|
||||
|
||||
Chesterton created the fictional priest-detective Father Brown,[5] and wrote on apologetics. Even some of those who disagree with him have recognised the wide appeal of such works as Orthodoxy and The Everlasting Man.[4][6] Chesterton routinely referred to himself as an "orthodox" Christian, and came to identify this position more and more with Catholicism, eventually converting to Catholicism from High Church Anglicanism. Biographers have identified him as a successor to such Victorian authors as Matthew Arnold, Thomas Carlyle, John Henry Newman, and John Ruskin.[7] On his contributions, T. S. Eliot wrote:
|
||||
|
||||
He was importantly and consistently on the side of the angels. Behind the Johnsonian fancy-dress, so reassuring to the British public, he concealed the most serious and revolutionary designs—concealing them by exposure ... Chesterton's social and economic ideas...were fundamentally Christian and Catholic. He did more, I think, than any man of his time—and was able to do more than anyone else, because of his particular background, development and abilities as a public performer—to maintain the existence of the important minority in the modern world. He leaves behind a permanent claim upon our loyalty, to see that the work that he did in his time is continued in ours.[8]`;
|
||||
|
||||
class FakeDataSource<T> {
|
||||
data: ReadonlyArray<T>;
|
||||
output!: ReadonlyArray<T>;
|
||||
filterFn?: (t: T) => boolean;
|
||||
private sortAttr?: keyof T;
|
||||
|
||||
constructor(initial: T[]) {
|
||||
this.data = initial;
|
||||
this.view.buildOutput();
|
||||
}
|
||||
|
||||
view = {
|
||||
setWindow: (_start: number, _end: number) => {
|
||||
// noop
|
||||
},
|
||||
|
||||
setFilter: (filter: (t: T) => boolean) => {
|
||||
this.filterFn = filter;
|
||||
},
|
||||
|
||||
setSortBy: (k: keyof T) => {
|
||||
this.sortAttr = k;
|
||||
},
|
||||
|
||||
buildOutput: () => {
|
||||
const filtered = this.filterFn
|
||||
? this.data.filter(this.filterFn)
|
||||
: this.data;
|
||||
const sorted = this.sortAttr
|
||||
? filtered
|
||||
.slice()
|
||||
.sort((a: any, b: any) =>
|
||||
String.prototype.localeCompare.call(
|
||||
a[this.sortAttr!],
|
||||
b[this.sortAttr!],
|
||||
),
|
||||
)
|
||||
: filtered;
|
||||
this.output = sorted;
|
||||
},
|
||||
};
|
||||
|
||||
append(v: T) {
|
||||
this.data = [...this.data, v];
|
||||
}
|
||||
|
||||
update(index: number, v: T) {
|
||||
this.data = this.data.slice();
|
||||
(this.data as any)[index] = v;
|
||||
}
|
||||
|
||||
delete(index: number) {
|
||||
this.data = this.data.slice();
|
||||
(this.data as any).splice(index, 1);
|
||||
}
|
||||
|
||||
shift(amount: number) {
|
||||
this.data = this.data.slice(amount);
|
||||
}
|
||||
}
|
||||
49
desktop/flipper-plugin/src/state/createDataSource.tsx
Normal file
49
desktop/flipper-plugin/src/state/createDataSource.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
|
||||
import {DataSource, ExtractKeyType} from '../data-source/DataSource';
|
||||
import {registerStorageAtom} from '../plugin/PluginBase';
|
||||
|
||||
type CreateDataSourceOptions<T, K extends keyof T> = {
|
||||
/**
|
||||
* If a key is set, the given field of the records is assumed to be unique,
|
||||
* and it's value can be used to perform lookups and upserts.
|
||||
*/
|
||||
key?: K;
|
||||
/**
|
||||
* The maximum amount of records that this DataSource will store.
|
||||
* If the limit is exceeded, the oldest records will automatically be dropped to make place for the new ones
|
||||
*/
|
||||
limit?: number;
|
||||
/**
|
||||
* Should this state persist when exporting a plugin?
|
||||
* If set, the dataSource will be saved / loaded under the key provided
|
||||
*/
|
||||
persist?: string;
|
||||
};
|
||||
|
||||
export function createDataSource<T, KEY extends keyof T = any>(
|
||||
initialSet: T[],
|
||||
options: CreateDataSourceOptions<T, KEY>,
|
||||
): DataSource<T, KEY, ExtractKeyType<T, KEY>>;
|
||||
export function createDataSource<T>(
|
||||
initialSet?: T[],
|
||||
): DataSource<T, never, never>;
|
||||
export function createDataSource<T, KEY extends keyof T>(
|
||||
initialSet: T[] = [],
|
||||
options?: CreateDataSourceOptions<T, KEY>,
|
||||
): DataSource<T, any, any> {
|
||||
const ds = new DataSource<T, KEY>(options?.key);
|
||||
if (options?.limit !== undefined) {
|
||||
ds.limit = options.limit;
|
||||
}
|
||||
registerStorageAtom(options?.persist, ds);
|
||||
initialSet.forEach((value) => ds.append(value));
|
||||
return ds;
|
||||
}
|
||||
Reference in New Issue
Block a user