diff --git a/gulp/config.js b/gulp/config.js index 18e78120e89..1945b879037 100644 --- a/gulp/config.js +++ b/gulp/config.js @@ -45,7 +45,6 @@ module.exports = { babel: { plugins: [ require('babel-plugin-add-module-exports'), - require('babel-plugin-minify-dead-code-elimination') ], presets: [ [require('babel-preset-env'), { diff --git a/gulp/tasks/build.js b/gulp/tasks/build.js index daf8b606a94..bf2eb7ca784 100644 --- a/gulp/tasks/build.js +++ b/gulp/tasks/build.js @@ -134,6 +134,7 @@ function compileIndvES2015ModulesToBrowser() { 'firebase-app': './src/app.ts', 'firebase-storage': './src/storage.ts', 'firebase-messaging': './src/messaging.ts', + 'firebase-database': './src/database.ts', }, output: { path: path.resolve(__dirname, './dist/browser'), @@ -192,27 +193,6 @@ function compileIndvES2015ModulesToBrowser() { .pipe(gulp.dest(`${config.paths.outDir}/browser`)); } -function compileSDKES2015ToBrowser() { - return gulp.src('./dist/es2015/firebase.js') - .pipe(webpackStream({ - plugins: [ - new webpack.DefinePlugin({ - TARGET_ENVIRONMENT: JSON.stringify('browser') - }) - ] - }, webpack)) - .pipe(sourcemaps.init({ loadMaps: true })) - .pipe(through.obj(function(file, enc, cb) { - // Dont pipe through any source map files as it will be handled - // by gulp-sourcemaps - var isSourceMap = /\.map$/.test(file.path); - if (!isSourceMap) this.push(file); - cb(); - })) - .pipe(sourcemaps.write('.')) - .pipe(gulp.dest(`${config.paths.outDir}/browser`)); -} - function buildBrowserFirebaseJs() { return gulp.src('./dist/browser/*.js') .pipe(sourcemaps.init({ loadMaps: true })) @@ -222,32 +202,18 @@ function buildBrowserFirebaseJs() { } function buildAltEnvFirebaseJs() { - const envs = [ - 'browser', - 'node', - 'react-native' - ]; - - const streams = envs.map(env => { - const babelConfig = Object.assign({}, config.babel, { - plugins: [ - ['inline-replace-variables', { - 'TARGET_ENVIRONMENT': env - }], - ...config.babel.plugins - ] - }); - return gulp.src('./dist/es2015/firebase.js') - .pipe(sourcemaps.init({ loadMaps: true })) - .pipe(babel(babelConfig)) - .pipe(rename({ - suffix: `-${env}` - })) - .pipe(sourcemaps.write('.')) - .pipe(gulp.dest(`${config.paths.outDir}/cjs`)); + const babelConfig = Object.assign({}, config.babel, { + plugins: config.babel.plugins }); - - return merge(streams); + return gulp.src([ + './dist/es2015/firebase-browser.js', + './dist/es2015/firebase-node.js', + './dist/es2015/firebase-react-native.js', + ]) + .pipe(sourcemaps.init({ loadMaps: true })) + .pipe(babel(babelConfig)) + .pipe(sourcemaps.write('.')) + .pipe(gulp.dest(`${config.paths.outDir}/cjs`)); } function copyPackageContents() { diff --git a/src/database.ts b/src/database.ts new file mode 100644 index 00000000000..11edeb11740 --- /dev/null +++ b/src/database.ts @@ -0,0 +1,65 @@ +/** +* Copyright 2017 Google Inc. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +import firebase from './app'; +import { FirebaseApp, FirebaseNamespace } from "./app/firebase_app"; +import { Database } from "./database/api/Database"; +import { Query } from "./database/api/Query"; +import { Reference } from "./database/api/Reference"; +import { enableLogging } from "./database/core/util/util"; +import { RepoManager } from "./database/core/RepoManager"; +import * as INTERNAL from './database/api/internal'; +import * as TEST_ACCESS from './database/api/test_access'; +import { isNodeSdk } from "./utils/environment"; + +export function registerDatabase(instance) { + // Register the Database Service with the 'firebase' namespace. + const namespace = instance.INTERNAL.registerService( + 'database', + app => RepoManager.getInstance().databaseFromApp(app), + // firebase.database namespace properties + { + Reference, + Query, + Database, + enableLogging, + INTERNAL, + ServerValue: Database.ServerValue, + TEST_ACCESS + } + ); + + if (isNodeSdk()) { + module.exports = namespace; + } +} + +/** + * Extensions to the FirebaseApp and FirebaseNamespaces interfaces + */ +declare module './app/firebase_app' { + interface FirebaseApp { + database?(): Database + } +} + +declare module './app/firebase_app' { + interface FirebaseNamespace { + database?(app: FirebaseApp): Database + } +} + +registerDatabase(firebase); diff --git a/src/database/api/DataSnapshot.ts b/src/database/api/DataSnapshot.ts new file mode 100644 index 00000000000..472393fff3e --- /dev/null +++ b/src/database/api/DataSnapshot.ts @@ -0,0 +1,168 @@ +import { validateArgCount, validateCallback } from '../../utils/validation'; +import { validatePathString } from '../core/util/validation'; +import { Path } from '../core/util/Path'; +import { PRIORITY_INDEX } from '../core/snap/indexes/PriorityIndex'; +import { Node } from '../core/snap/Node'; +import { Reference } from './Reference'; +import { Index } from '../core/snap/indexes/Index'; +import { ChildrenNode } from '../core/snap/ChildrenNode'; + +/** + * Class representing a firebase data snapshot. It wraps a SnapshotNode and + * surfaces the public methods (val, forEach, etc.) we want to expose. + */ +export class DataSnapshot { + /** + * @param {!Node} node_ A SnapshotNode to wrap. + * @param {!Reference} ref_ The ref of the location this snapshot came from. + * @param {!Index} index_ The iteration order for this snapshot + */ + constructor(private readonly node_: Node, + private readonly ref_: Reference, + private readonly index_: Index) { + } + + /** + * Retrieves the snapshot contents as JSON. Returns null if the snapshot is + * empty. + * + * @return {*} JSON representation of the DataSnapshot contents, or null if empty. + */ + val(): any { + validateArgCount('DataSnapshot.val', 0, 0, arguments.length); + return this.node_.val(); + } + + /** + * Returns the snapshot contents as JSON, including priorities of node. Suitable for exporting + * the entire node contents. + * @return {*} JSON representation of the DataSnapshot contents, or null if empty. + */ + exportVal(): any { + validateArgCount('DataSnapshot.exportVal', 0, 0, arguments.length); + return this.node_.val(true); + } + + // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary + // for end-users + toJSON(): any { + // Optional spacer argument is unnecessary because we're depending on recursion rather than stringifying the content + validateArgCount('DataSnapshot.toJSON', 0, 1, arguments.length); + return this.exportVal(); + } + + /** + * Returns whether the snapshot contains a non-null value. + * + * @return {boolean} Whether the snapshot contains a non-null value, or is empty. + */ + exists(): boolean { + validateArgCount('DataSnapshot.exists', 0, 0, arguments.length); + return !this.node_.isEmpty(); + } + + /** + * Returns a DataSnapshot of the specified child node's contents. + * + * @param {!string} childPathString Path to a child. + * @return {!DataSnapshot} DataSnapshot for child node. + */ + child(childPathString: string): DataSnapshot { + validateArgCount('DataSnapshot.child', 0, 1, arguments.length); + // Ensure the childPath is a string (can be a number) + childPathString = String(childPathString); + validatePathString('DataSnapshot.child', 1, childPathString, false); + + const childPath = new Path(childPathString); + const childRef = this.ref_.child(childPath); + return new DataSnapshot(this.node_.getChild(childPath), childRef, PRIORITY_INDEX); + } + + /** + * Returns whether the snapshot contains a child at the specified path. + * + * @param {!string} childPathString Path to a child. + * @return {boolean} Whether the child exists. + */ + hasChild(childPathString: string): boolean { + validateArgCount('DataSnapshot.hasChild', 1, 1, arguments.length); + validatePathString('DataSnapshot.hasChild', 1, childPathString, false); + + const childPath = new Path(childPathString); + return !this.node_.getChild(childPath).isEmpty(); + } + + /** + * Returns the priority of the object, or null if no priority was set. + * + * @return {string|number|null} The priority. + */ + getPriority(): string | number | null { + validateArgCount('DataSnapshot.getPriority', 0, 0, arguments.length); + + // typecast here because we never return deferred values or internal priorities (MAX_PRIORITY) + return /**@type {string|number|null} */ (this.node_.getPriority().val()); + } + + /** + * Iterates through child nodes and calls the specified action for each one. + * + * @param {function(!DataSnapshot)} action Callback function to be called + * for each child. + * @return {boolean} True if forEach was canceled by action returning true for + * one of the child nodes. + */ + forEach(action: (d: DataSnapshot) => any): boolean { + validateArgCount('DataSnapshot.forEach', 1, 1, arguments.length); + validateCallback('DataSnapshot.forEach', 1, action, false); + + if (this.node_.isLeafNode()) + return false; + + const childrenNode = /**@type {ChildrenNode} */ (this.node_); + // Sanitize the return value to a boolean. ChildrenNode.forEachChild has a weird return type... + return !!childrenNode.forEachChild(this.index_, (key, node) => { + return action(new DataSnapshot(node, this.ref_.child(key), PRIORITY_INDEX)); + }); + } + + /** + * Returns whether this DataSnapshot has children. + * @return {boolean} True if the DataSnapshot contains 1 or more child nodes. + */ + hasChildren(): boolean { + validateArgCount('DataSnapshot.hasChildren', 0, 0, arguments.length); + + if (this.node_.isLeafNode()) + return false; + else + return !this.node_.isEmpty(); + } + + get key() { + return this.ref_.getKey(); + } + + /** + * Returns the number of children for this DataSnapshot. + * @return {number} The number of children that this DataSnapshot contains. + */ + numChildren(): number { + validateArgCount('DataSnapshot.numChildren', 0, 0, arguments.length); + + return this.node_.numChildren(); + } + + /** + * @return {Reference} The Firebase reference for the location this snapshot's data came from. + */ + getRef(): Reference { + validateArgCount('DataSnapshot.ref', 0, 0, arguments.length); + + return this.ref_; + } + + get ref() { + return this.getRef(); + } +} diff --git a/src/database/api/Database.ts b/src/database/api/Database.ts new file mode 100644 index 00000000000..fcb2aadf46e --- /dev/null +++ b/src/database/api/Database.ts @@ -0,0 +1,133 @@ +import { fatal } from "../core/util/util"; +import { parseRepoInfo } from "../core/util/libs/parser"; +import { Path } from "../core/util/Path"; +import { PromiseImpl } from "../../utils/promise"; +import { Reference } from "./Reference"; +import { Repo } from "../core/Repo"; +import { RepoManager } from "../core/RepoManager"; +import { validateArgCount } from "../../utils/validation"; +import { FirebaseApp } from "../../app/firebase_app"; +import { validateUrl } from "../core/util/validation"; + +/** + * Class representing a firebase database. + * @implements {firebase.Service} + */ +export class Database { + repo_: Repo; + root_: Reference; + INTERNAL; + + static ServerValue = { + 'TIMESTAMP': { + '.sv' : 'timestamp' + } + } + + /** + * The constructor should not be called by users of our public API. + * @param {!Repo} repo + */ + constructor(repo) { + if (!(repo instanceof Repo)) { + fatal("Don't call new Database() directly - please use firebase.database()."); + } + + /** @type {Repo} */ + this.repo_ = repo; + + /** @type {Firebase} */ + this.root_ = new Reference(repo, Path.Empty); + + this.INTERNAL = new DatabaseInternals(this); + } + + get app(): FirebaseApp { + return this.repo_.app; + } + + /** + * Returns a reference to the root or the path specified in opt_pathString. + * @param {string=} pathString + * @return {!Firebase} Firebase reference. + */ + ref(pathString?): Reference { + this.checkDeleted_('ref'); + validateArgCount('database.ref', 0, 1, arguments.length); + + return pathString !== undefined ? this.root_.child(pathString) : this.root_; + } + + /** + * Returns a reference to the root or the path specified in url. + * We throw a exception if the url is not in the same domain as the + * current repo. + * @param {string} url + * @return {!Firebase} Firebase reference. + */ + refFromURL(url) { + /** @const {string} */ + var apiName = 'database.refFromURL'; + this.checkDeleted_(apiName); + validateArgCount(apiName, 1, 1, arguments.length); + var parsedURL = parseRepoInfo(url); + validateUrl(apiName, 1, parsedURL); + + var repoInfo = parsedURL.repoInfo; + if (repoInfo.host !== this.repo_.repoInfo_.host) { + fatal(apiName + ": Host name does not match the current database: " + + "(found " + repoInfo.host + " but expected " + this.repo_.repoInfo_.host + ")"); + } + + return this.ref(parsedURL.path.toString()); + } + + /** + * @param {string} apiName + */ + private checkDeleted_(apiName) { + if (this.repo_ === null) { + fatal("Cannot call " + apiName + " on a deleted database."); + } + } + + // Make individual repo go offline. + goOffline() { + validateArgCount('database.goOffline', 0, 0, arguments.length); + this.checkDeleted_('goOffline'); + this.repo_.interrupt(); + } + + goOnline () { + validateArgCount('database.goOnline', 0, 0, arguments.length); + this.checkDeleted_('goOnline'); + this.repo_.resume(); + } +}; + +Object.defineProperty(Repo.prototype, 'database', { + get() { + return this.__database || (this.__database = new Database(this)); + } +}); + +class DatabaseInternals { + database + /** @param {!Database} database */ + constructor(database) { + this.database = database; + } + + /** @return {firebase.Promise} */ + delete() { + this.database.checkDeleted_('delete'); + RepoManager.getInstance().deleteRepo(/** @type {!Repo} */ (this.database.repo_)); + + this.database.repo_ = null; + this.database.root_ = null; + this.database.INTERNAL = null; + this.database = null; + return PromiseImpl.resolve(); + } +}; + diff --git a/src/database/api/Query.ts b/src/database/api/Query.ts new file mode 100644 index 00000000000..be0620a3824 --- /dev/null +++ b/src/database/api/Query.ts @@ -0,0 +1,519 @@ +import { assert } from '../../utils/assert'; +import { KEY_INDEX } from '../core/snap/indexes/KeyIndex'; +import { PRIORITY_INDEX } from '../core/snap/indexes/PriorityIndex'; +import { VALUE_INDEX } from '../core/snap/indexes/ValueIndex'; +import { PathIndex } from '../core/snap/indexes/PathIndex'; +import { MIN_NAME, MAX_NAME, ObjectToUniqueKey } from '../core/util/util'; +import { Path } from '../core/util/Path'; +import { + isValidPriority, + validateEventType, + validatePathString, + validateFirebaseDataArg, + validateKey, +} from '../core/util/validation'; +import { errorPrefix, validateArgCount, validateCallback, validateContextObject } from '../../utils/validation'; +import { ValueEventRegistration, ChildEventRegistration } from '../core/view/EventRegistration'; +import { Deferred, attachDummyErrorHandler } from '../../utils/promise'; +import { Repo } from '../core/Repo'; +import { QueryParams } from '../core/view/QueryParams'; +import { Reference } from './Reference'; +import { DataSnapshot } from './DataSnapshot'; + +let __referenceConstructor: new(repo: Repo, path: Path) => Query; + +export interface SnapshotCallback { + (a: DataSnapshot, b?: string): any +} + +/** + * A Query represents a filter to be applied to a firebase location. This object purely represents the + * query expression (and exposes our public API to build the query). The actual query logic is in ViewBase.js. + * + * Since every Firebase reference is a query, Firebase inherits from this object. + */ +export class Query { + static set __referenceConstructor(val) { + __referenceConstructor = val; + } + + static get __referenceConstructor() { + assert(__referenceConstructor, 'Reference.ts has not been loaded'); + return __referenceConstructor; + } + + constructor(public repo: Repo, public path: Path, private queryParams_: QueryParams, private orderByCalled_: boolean) {} + + /** + * Validates start/end values for queries. + * @param {!QueryParams} params + * @private + */ + private static validateQueryEndpoints_(params: QueryParams) { + let startNode = null; + let endNode = null; + if (params.hasStart()) { + startNode = params.getIndexStartValue(); + } + if (params.hasEnd()) { + endNode = params.getIndexEndValue(); + } + + if (params.getIndex() === KEY_INDEX) { + const tooManyArgsError = 'Query: When ordering by key, you may only pass one argument to ' + + 'startAt(), endAt(), or equalTo().'; + const wrongArgTypeError = 'Query: When ordering by key, the argument passed to startAt(), endAt(),' + + 'or equalTo() must be a string.'; + if (params.hasStart()) { + const startName = params.getIndexStartName(); + if (startName != MIN_NAME) { + throw new Error(tooManyArgsError); + } else if (typeof(startNode) !== 'string') { + throw new Error(wrongArgTypeError); + } + } + if (params.hasEnd()) { + const endName = params.getIndexEndName(); + if (endName != MAX_NAME) { + throw new Error(tooManyArgsError); + } else if (typeof(endNode) !== 'string') { + throw new Error(wrongArgTypeError); + } + } + } + else if (params.getIndex() === PRIORITY_INDEX) { + if ((startNode != null && !isValidPriority(startNode)) || + (endNode != null && !isValidPriority(endNode))) { + throw new Error('Query: When ordering by priority, the first argument passed to startAt(), ' + + 'endAt(), or equalTo() must be a valid priority value (null, a number, or a string).'); + } + } else { + assert((params.getIndex() instanceof PathIndex) || + (params.getIndex() === VALUE_INDEX), 'unknown index type.'); + if ((startNode != null && typeof startNode === 'object') || + (endNode != null && typeof endNode === 'object')) { + throw new Error('Query: First argument passed to startAt(), endAt(), or equalTo() cannot be ' + + 'an object.'); + } + } + } + + /** + * Validates that limit* has been called with the correct combination of parameters + * @param {!QueryParams} params + * @private + */ + private static validateLimit_(params: QueryParams) { + if (params.hasStart() && params.hasEnd() && params.hasLimit() && !params.hasAnchoredLimit()) { + throw new Error( + 'Query: Can\'t combine startAt(), endAt(), and limit(). Use limitToFirst() or limitToLast() instead.' + ); + } + } + + /** + * Validates that no other order by call has been made + * @param {!string} fnName + * @private + */ + private validateNoPreviousOrderByCall_(fnName: string) { + if (this.orderByCalled_ === true) { + throw new Error(fnName + ': You can\'t combine multiple orderBy calls.'); + } + } + + /** + * @return {!QueryParams} + */ + getQueryParams(): QueryParams { + return this.queryParams_; + } + + /** + * @return {!Reference} + */ + getRef(): Reference { + validateArgCount('Query.ref', 0, 0, arguments.length); + // This is a slight hack. We cannot goog.require('fb.api.Firebase'), since Firebase requires fb.api.Query. + // However, we will always export 'Firebase' to the global namespace, so it's guaranteed to exist by the time this + // method gets called. + return (new Query.__referenceConstructor(this.repo, this.path)); + } + + /** + * @param {!string} eventType + * @param {!function(DataSnapshot, string=)} callback + * @param {(function(Error)|Object)=} cancelCallbackOrContext + * @param {Object=} context + * @return {!function(DataSnapshot, string=)} + */ + on(eventType: string, callback: SnapshotCallback, + cancelCallbackOrContext?: ((a: Error) => any) | Object, context?: Object): SnapshotCallback { + validateArgCount('Query.on', 2, 4, arguments.length); + validateEventType('Query.on', 1, eventType, false); + validateCallback('Query.on', 2, callback, false); + + const ret = Query.getCancelAndContextArgs_('Query.on', cancelCallbackOrContext, context); + + if (eventType === 'value') { + this.onValueEvent(callback, ret.cancel, ret.context); + } else { + const callbacks = {}; + callbacks[eventType] = callback; + this.onChildEvent(callbacks, ret.cancel, ret.context); + } + return callback; + } + + /** + * @param {!function(!DataSnapshot)} callback + * @param {?function(Error)} cancelCallback + * @param {?Object} context + * @protected + */ + onValueEvent(callback: (a: DataSnapshot) => any, cancelCallback: ((a: Error) => any) | null, context: Object | null) { + const container = new ValueEventRegistration(callback, cancelCallback || null, context || null); + this.repo.addEventCallbackForQuery(this, container); + } + + /** + * @param {!Object.} callbacks + * @param {?function(Error)} cancelCallback + * @param {?Object} context + */ + onChildEvent(callbacks: { [k: string]: SnapshotCallback }, + cancelCallback: ((a: Error) => any) | null, context: Object | null) { + const container = new ChildEventRegistration(callbacks, cancelCallback, context); + this.repo.addEventCallbackForQuery(this, container); + } + + /** + * @param {string=} eventType + * @param {(function(!DataSnapshot, ?string=))=} callback + * @param {Object=} context + */ + off(eventType?: string, callback?: SnapshotCallback, context?: Object) { + validateArgCount('Query.off', 0, 3, arguments.length); + validateEventType('Query.off', 1, eventType, true); + validateCallback('Query.off', 2, callback, true); + validateContextObject('Query.off', 3, context, true); + + let container = null; + let callbacks = null; + if (eventType === 'value') { + const valueCallback = /** @type {function(!DataSnapshot)} */ (callback) || null; + container = new ValueEventRegistration(valueCallback, null, context || null); + } else if (eventType) { + if (callback) { + callbacks = {}; + callbacks[eventType] = callback; + } + container = new ChildEventRegistration(callbacks, null, context || null); + } + this.repo.removeEventCallbackForQuery(this, container); + } + + /** + * Attaches a listener, waits for the first event, and then removes the listener + * @param {!string} eventType + * @param {!function(!DataSnapshot, string=)} userCallback + * @param cancelOrContext + * @param context + * @return {!firebase.Promise} + */ + once(eventType: string, userCallback: SnapshotCallback, + cancelOrContext?, context?: Object) { + validateArgCount('Query.once', 1, 4, arguments.length); + validateEventType('Query.once', 1, eventType, false); + validateCallback('Query.once', 2, userCallback, true); + + const ret = Query.getCancelAndContextArgs_('Query.once', cancelOrContext, context); + + // TODO: Implement this more efficiently (in particular, use 'get' wire protocol for 'value' event) + // TODO: consider actually wiring the callbacks into the promise. We cannot do this without a breaking change + // because the API currently expects callbacks will be called synchronously if the data is cached, but this is + // against the Promise specification. + let firstCall = true; + const deferred = new Deferred(); + attachDummyErrorHandler(deferred.promise); + + const onceCallback = (snapshot) => { + // NOTE: Even though we unsubscribe, we may get called multiple times if a single action (e.g. set() with JSON) + // triggers multiple events (e.g. child_added or child_changed). + if (firstCall) { + firstCall = false; + this.off(eventType, onceCallback); + + if (userCallback) { + userCallback.bind(ret.context)(snapshot); + } + deferred.resolve(snapshot); + } + }; + + this.on(eventType, onceCallback, /*cancel=*/ (err) => { + this.off(eventType, onceCallback); + + if (ret.cancel) + ret.cancel.bind(ret.context)(err); + deferred.reject(err); + }); + return deferred.promise; + } + + /** + * Set a limit and anchor it to the start of the window. + * @param {!number} limit + * @return {!Query} + */ + limitToFirst(limit: number): Query { + validateArgCount('Query.limitToFirst', 1, 1, arguments.length); + if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) { + throw new Error('Query.limitToFirst: First argument must be a positive integer.'); + } + if (this.queryParams_.hasLimit()) { + throw new Error('Query.limitToFirst: Limit was already set (by another call to limit, ' + + 'limitToFirst, or limitToLast).'); + } + + return new Query(this.repo, this.path, this.queryParams_.limitToFirst(limit), this.orderByCalled_); + } + + /** + * Set a limit and anchor it to the end of the window. + * @param {!number} limit + * @return {!Query} + */ + limitToLast(limit: number): Query { + validateArgCount('Query.limitToLast', 1, 1, arguments.length); + if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) { + throw new Error('Query.limitToLast: First argument must be a positive integer.'); + } + if (this.queryParams_.hasLimit()) { + throw new Error('Query.limitToLast: Limit was already set (by another call to limit, ' + + 'limitToFirst, or limitToLast).'); + } + + return new Query(this.repo, this.path, this.queryParams_.limitToLast(limit), + this.orderByCalled_); + } + + /** + * Given a child path, return a new query ordered by the specified grandchild path. + * @param {!string} path + * @return {!Query} + */ + orderByChild(path: string): Query { + validateArgCount('Query.orderByChild', 1, 1, arguments.length); + if (path === '$key') { + throw new Error('Query.orderByChild: "$key" is invalid. Use Query.orderByKey() instead.'); + } else if (path === '$priority') { + throw new Error('Query.orderByChild: "$priority" is invalid. Use Query.orderByPriority() instead.'); + } else if (path === '$value') { + throw new Error('Query.orderByChild: "$value" is invalid. Use Query.orderByValue() instead.'); + } + validatePathString('Query.orderByChild', 1, path, false); + this.validateNoPreviousOrderByCall_('Query.orderByChild'); + const parsedPath = new Path(path); + if (parsedPath.isEmpty()) { + throw new Error('Query.orderByChild: cannot pass in empty path. Use Query.orderByValue() instead.'); + } + const index = new PathIndex(parsedPath); + const newParams = this.queryParams_.orderBy(index); + Query.validateQueryEndpoints_(newParams); + + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + } + + /** + * Return a new query ordered by the KeyIndex + * @return {!Query} + */ + orderByKey(): Query { + validateArgCount('Query.orderByKey', 0, 0, arguments.length); + this.validateNoPreviousOrderByCall_('Query.orderByKey'); + const newParams = this.queryParams_.orderBy(KEY_INDEX); + Query.validateQueryEndpoints_(newParams); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + } + + /** + * Return a new query ordered by the PriorityIndex + * @return {!Query} + */ + orderByPriority(): Query { + validateArgCount('Query.orderByPriority', 0, 0, arguments.length); + this.validateNoPreviousOrderByCall_('Query.orderByPriority'); + const newParams = this.queryParams_.orderBy(PRIORITY_INDEX); + Query.validateQueryEndpoints_(newParams); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + } + + /** + * Return a new query ordered by the ValueIndex + * @return {!Query} + */ + orderByValue(): Query { + validateArgCount('Query.orderByValue', 0, 0, arguments.length); + this.validateNoPreviousOrderByCall_('Query.orderByValue'); + const newParams = this.queryParams_.orderBy(VALUE_INDEX); + Query.validateQueryEndpoints_(newParams); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + } + + /** + * @param {number|string|boolean|null} value + * @param {?string=} name + * @return {!Query} + */ + startAt(value: number | string | boolean | null = null, name?: string | null): Query { + validateArgCount('Query.startAt', 0, 2, arguments.length); + validateFirebaseDataArg('Query.startAt', 1, value, this.path, true); + validateKey('Query.startAt', 2, name, true); + + const newParams = this.queryParams_.startAt(value, name); + Query.validateLimit_(newParams); + Query.validateQueryEndpoints_(newParams); + if (this.queryParams_.hasStart()) { + throw new Error('Query.startAt: Starting point was already set (by another call to startAt ' + + 'or equalTo).'); + } + + // Calling with no params tells us to start at the beginning. + if (value === undefined) { + value = null; + name = null; + } + return new Query(this.repo, this.path, newParams, this.orderByCalled_); + } + + /** + * @param {number|string|boolean|null} value + * @param {?string=} name + * @return {!Query} + */ + endAt(value: number | string | boolean | null = null, name?: string | null): Query { + validateArgCount('Query.endAt', 0, 2, arguments.length); + validateFirebaseDataArg('Query.endAt', 1, value, this.path, true); + validateKey('Query.endAt', 2, name, true); + + const newParams = this.queryParams_.endAt(value, name); + Query.validateLimit_(newParams); + Query.validateQueryEndpoints_(newParams); + if (this.queryParams_.hasEnd()) { + throw new Error('Query.endAt: Ending point was already set (by another call to endAt or ' + + 'equalTo).'); + } + + return new Query(this.repo, this.path, newParams, this.orderByCalled_); + } + + /** + * Load the selection of children with exactly the specified value, and, optionally, + * the specified name. + * @param {number|string|boolean|null} value + * @param {string=} name + * @return {!Query} + */ + equalTo(value: number | string | boolean | null, name?: string) { + validateArgCount('Query.equalTo', 1, 2, arguments.length); + validateFirebaseDataArg('Query.equalTo', 1, value, this.path, false); + validateKey('Query.equalTo', 2, name, true); + if (this.queryParams_.hasStart()) { + throw new Error('Query.equalTo: Starting point was already set (by another call to startAt or ' + + 'equalTo).'); + } + if (this.queryParams_.hasEnd()) { + throw new Error('Query.equalTo: Ending point was already set (by another call to endAt or ' + + 'equalTo).'); + } + return this.startAt(value, name).endAt(value, name); + } + + /** + * @return {!string} URL for this location. + */ + toString(): string { + validateArgCount('Query.toString', 0, 0, arguments.length); + + return this.repo.toString() + this.path.toUrlEncodedString(); + } + + // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary + // for end-users. + toJSON() { + // An optional spacer argument is unnecessary for a string. + validateArgCount('Query.toJSON', 0, 1, arguments.length); + return this.toString(); + } + + /** + * An object representation of the query parameters used by this Query. + * @return {!Object} + */ + queryObject(): Object { + return this.queryParams_.getQueryObject(); + } + + /** + * @return {!string} + */ + queryIdentifier(): string { + const obj = this.queryObject(); + const id = ObjectToUniqueKey(obj); + return (id === '{}') ? 'default' : id; + } + + /** + * Return true if this query and the provided query are equivalent; otherwise, return false. + * @param {Query} other + * @return {boolean} + */ + isEqual(other: Query): boolean { + validateArgCount('Query.isEqual', 1, 1, arguments.length); + if (!(other instanceof Query)) { + const error = 'Query.isEqual failed: First argument must be an instance of firebase.database.Query.'; + throw new Error(error); + } + + const sameRepo = (this.repo === other.repo); + const samePath = this.path.equals(other.path); + const sameQueryIdentifier = (this.queryIdentifier() === other.queryIdentifier()); + + return (sameRepo && samePath && sameQueryIdentifier); + } + + /** + * Helper used by .on and .once to extract the context and or cancel arguments. + * @param {!string} fnName The function name (on or once) + * @param {(function(Error)|Object)=} cancelOrContext + * @param {Object=} context + * @return {{cancel: ?function(Error), context: ?Object}} + * @private + */ + private static getCancelAndContextArgs_(fnName: string, cancelOrContext?: ((a: Error) => any) | Object, + context?: Object): { cancel: ((a: Error) => any) | null, context: Object | null } { + const ret = {cancel: null, context: null}; + if (cancelOrContext && context) { + ret.cancel = /** @type {function(Error)} */ (cancelOrContext); + validateCallback(fnName, 3, ret.cancel, true); + + ret.context = context; + validateContextObject(fnName, 4, ret.context, true); + } else if (cancelOrContext) { // we have either a cancel callback or a context. + if (typeof cancelOrContext === 'object' && cancelOrContext !== null) { // it's a context! + ret.context = cancelOrContext; + } else if (typeof cancelOrContext === 'function') { + ret.cancel = cancelOrContext; + } else { + throw new Error(errorPrefix(fnName, 3, true) + + ' must either be a cancel callback or a context object.'); + } + } + return ret; + } + + get ref(): Reference { + return this.getRef(); + } +} diff --git a/src/database/api/Reference.ts b/src/database/api/Reference.ts new file mode 100644 index 00000000000..5654eaf9b7b --- /dev/null +++ b/src/database/api/Reference.ts @@ -0,0 +1,311 @@ +import { OnDisconnect } from './onDisconnect'; +import { TransactionResult } from './TransactionResult'; +import { warn } from '../core/util/util'; +import { nextPushId } from '../core/util/NextPushId'; +import { Query } from './Query'; +import { Repo } from '../core/Repo'; +import { Path } from '../core/util/Path'; +import { QueryParams } from '../core/view/QueryParams'; +import { + validateRootPathString, + validatePathString, + validateFirebaseMergeDataArg, + validateBoolean, + validatePriority, + validateFirebaseDataArg, + validateWritablePath, +} from '../core/util/validation'; +import { + validateArgCount, + validateCallback, +} from '../../utils/validation'; +import { Deferred, attachDummyErrorHandler, PromiseImpl } from '../../utils/promise'; +import { SyncPoint } from '../core/SyncPoint'; +import { Database } from './Database'; +import { DataSnapshot } from './DataSnapshot'; + +export class Reference extends Query { + public then; + public catch; + + /** + * Call options: + * new Reference(Repo, Path) or + * new Reference(url: string, string|RepoManager) + * + * Externally - this is the firebase.database.Reference type. + * + * @param {!Repo} repo + * @param {(!Path)} path + * @extends {Query} + */ + constructor(repo: Repo, path: Path) { + if (!(repo instanceof Repo)) { + throw new Error('new Reference() no longer supported - use app.database().'); + } + + // call Query's constructor, passing in the repo and path. + super(repo, path, QueryParams.DEFAULT, false); + } + + /** @return {?string} */ + getKey(): string | null { + validateArgCount('Reference.key', 0, 0, arguments.length); + + if (this.path.isEmpty()) + return null; + else + return this.path.getBack(); + } + + /** + * @param {!(string|Path)} pathString + * @return {!Reference} + */ + child(pathString: string | Path): Reference { + validateArgCount('Reference.child', 1, 1, arguments.length); + if (typeof pathString === 'number') { + pathString = String(pathString); + } else if (!(pathString instanceof Path)) { + if (this.path.getFront() === null) + validateRootPathString('Reference.child', 1, pathString, false); + else + validatePathString('Reference.child', 1, pathString, false); + } + + return new Reference(this.repo, this.path.child(pathString)); + } + + /** @return {?Reference} */ + getParent(): Reference | null { + validateArgCount('Reference.parent', 0, 0, arguments.length); + + const parentPath = this.path.parent(); + return parentPath === null ? null : new Reference(this.repo, parentPath); + } + + /** @return {!Reference} */ + getRoot(): Reference { + validateArgCount('Reference.root', 0, 0, arguments.length); + + let ref = this; + while (ref.getParent() !== null) { + ref = ref.getParent(); + } + return ref; + } + + /** @return {!Database} */ + databaseProp(): Database { + return this.repo.database; + } + + /** + * @param {*} newVal + * @param {function(?Error)=} onComplete + * @return {!Promise} + */ + set(newVal: any, onComplete?: (a: Error | null) => any): Promise { + validateArgCount('Reference.set', 1, 2, arguments.length); + validateWritablePath('Reference.set', this.path); + validateFirebaseDataArg('Reference.set', 1, newVal, this.path, false); + validateCallback('Reference.set', 2, onComplete, true); + + const deferred = new Deferred(); + this.repo.setWithPriority(this.path, newVal, /*priority=*/ null, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {!Object} objectToMerge + * @param {function(?Error)=} onComplete + * @return {!Promise} + */ + update(objectToMerge: Object, onComplete?: (a: Error | null) => any): Promise { + validateArgCount('Reference.update', 1, 2, arguments.length); + validateWritablePath('Reference.update', this.path); + + if (Array.isArray(objectToMerge)) { + const newObjectToMerge = {}; + for (let i = 0; i < objectToMerge.length; ++i) { + newObjectToMerge['' + i] = objectToMerge[i]; + } + objectToMerge = newObjectToMerge; + warn('Passing an Array to Firebase.update() is deprecated. ' + + 'Use set() if you want to overwrite the existing data, or ' + + 'an Object with integer keys if you really do want to ' + + 'only update some of the children.' + ); + } + validateFirebaseMergeDataArg('Reference.update', 1, objectToMerge, this.path, false); + validateCallback('Reference.update', 2, onComplete, true); + const deferred = new Deferred(); + this.repo.update(this.path, objectToMerge, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {*} newVal + * @param {string|number|null} newPriority + * @param {function(?Error)=} onComplete + * @return {!Promise} + */ + setWithPriority(newVal: any, newPriority: string | number | null, + onComplete?: (a: Error | null) => any): Promise { + validateArgCount('Reference.setWithPriority', 2, 3, arguments.length); + validateWritablePath('Reference.setWithPriority', this.path); + validateFirebaseDataArg('Reference.setWithPriority', 1, newVal, this.path, false); + validatePriority('Reference.setWithPriority', 2, newPriority, false); + validateCallback('Reference.setWithPriority', 3, onComplete, true); + + if (this.getKey() === '.length' || this.getKey() === '.keys') + throw 'Reference.setWithPriority failed: ' + this.getKey() + ' is a read-only object.'; + + const deferred = new Deferred(); + this.repo.setWithPriority(this.path, newVal, newPriority, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {function(?Error)=} onComplete + * @return {!Promise} + */ + remove(onComplete?: (a: Error | null) => any): Promise { + validateArgCount('Reference.remove', 0, 1, arguments.length); + validateWritablePath('Reference.remove', this.path); + validateCallback('Reference.remove', 1, onComplete, true); + + return this.set(null, onComplete); + } + + /** + * @param {function(*):*} transactionUpdate + * @param {(function(?Error, boolean, ?DataSnapshot))=} onComplete + * @param {boolean=} applyLocally + * @return {!Promise} + */ + transaction(transactionUpdate: (a: any) => any, + onComplete?: (a: Error | null, b: boolean, c: DataSnapshot | null) => any, + applyLocally?: boolean): Promise { + validateArgCount('Reference.transaction', 1, 3, arguments.length); + validateWritablePath('Reference.transaction', this.path); + validateCallback('Reference.transaction', 1, transactionUpdate, false); + validateCallback('Reference.transaction', 2, onComplete, true); + // NOTE: applyLocally is an internal-only option for now. We need to decide if we want to keep it and how + // to expose it. + validateBoolean('Reference.transaction', 3, applyLocally, true); + + if (this.getKey() === '.length' || this.getKey() === '.keys') + throw 'Reference.transaction failed: ' + this.getKey() + ' is a read-only object.'; + + if (applyLocally === undefined) + applyLocally = true; + + const deferred = new Deferred(); + if (typeof onComplete === 'function') { + attachDummyErrorHandler(deferred.promise); + } + + const promiseComplete = function (error, committed, snapshot) { + if (error) { + deferred.reject(error); + } else { + deferred.resolve(new TransactionResult(committed, snapshot)); + } + if (typeof onComplete === 'function') { + onComplete(error, committed, snapshot); + } + }; + this.repo.startTransaction(this.path, transactionUpdate, promiseComplete, applyLocally); + + return deferred.promise; + } + + /** + * @param {string|number|null} priority + * @param {function(?Error)=} onComplete + * @return {!Promise} + */ + setPriority(priority: string | number | null, onComplete?: (a: Error | null) => any): Promise { + validateArgCount('Reference.setPriority', 1, 2, arguments.length); + validateWritablePath('Reference.setPriority', this.path); + validatePriority('Reference.setPriority', 1, priority, false); + validateCallback('Reference.setPriority', 2, onComplete, true); + + const deferred = new Deferred(); + this.repo.setWithPriority(this.path.child('.priority'), priority, null, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {*=} value + * @param {function(?Error)=} onComplete + * @return {!Reference} + */ + push(value?: any, onComplete?: (a: Error | null) => any): Reference { + validateArgCount('Reference.push', 0, 2, arguments.length); + validateWritablePath('Reference.push', this.path); + validateFirebaseDataArg('Reference.push', 1, value, this.path, true); + validateCallback('Reference.push', 2, onComplete, true); + + const now = this.repo.serverTime(); + const name = nextPushId(now); + + // push() returns a ThennableReference whose promise is fulfilled with a regular Reference. + // We use child() to create handles to two different references. The first is turned into a + // ThennableReference below by adding then() and catch() methods and is used as the + // return value of push(). The second remains a regular Reference and is used as the fulfilled + // value of the first ThennableReference. + const thennablePushRef = this.child(name); + const pushRef = this.child(name); + + let promise; + if (value != null) { + promise = thennablePushRef.set(value, onComplete).then(() => pushRef); + } else { + promise = PromiseImpl.resolve(pushRef); + } + + thennablePushRef.then = promise.then.bind(promise); + thennablePushRef.catch = promise.then.bind(promise, undefined); + + if (typeof onComplete === 'function') { + attachDummyErrorHandler(promise); + } + + return thennablePushRef; + } + + /** + * @return {!OnDisconnect} + */ + onDisconnect(): OnDisconnect { + validateWritablePath('Reference.onDisconnect', this.path); + return new OnDisconnect(this.repo, this.path); + } + + get database(): Database { + return this.databaseProp(); + } + + get key(): string | null { + return this.getKey(); + } + + get parent(): Reference | null { + return this.getParent(); + } + + get root(): Reference { + return this.getRoot(); + } +} + +/** + * Define reference constructor in various modules + * + * We are doing this here to avoid several circular + * dependency issues + */ +Query.__referenceConstructor = Reference; +SyncPoint.__referenceConstructor = Reference; \ No newline at end of file diff --git a/src/database/api/TransactionResult.ts b/src/database/api/TransactionResult.ts new file mode 100644 index 00000000000..d089b2ed47b --- /dev/null +++ b/src/database/api/TransactionResult.ts @@ -0,0 +1,10 @@ +export class TransactionResult { + /** + * A type for the resolve value of Firebase.transaction. + * @constructor + * @dict + * @param {boolean} committed + * @param {fb.api.DataSnapshot} snapshot + */ + constructor(public committed, public snapshot) {} +} \ No newline at end of file diff --git a/src/database/api/internal.ts b/src/database/api/internal.ts new file mode 100644 index 00000000000..b52d60f1d28 --- /dev/null +++ b/src/database/api/internal.ts @@ -0,0 +1,44 @@ +import { WebSocketConnection } from "../realtime/WebSocketConnection"; +import { BrowserPollConnection } from "../realtime/BrowserPollConnection"; + +/** + * INTERNAL methods for internal-use only (tests, etc.). + * + * Customers shouldn't use these or else should be aware that they could break at any time. + * + * @const + */ + +export const forceLongPolling = function() { + WebSocketConnection.forceDisallow(); + BrowserPollConnection.forceAllow(); +}; + +export const forceWebSockets = function() { + BrowserPollConnection.forceDisallow(); +}; + +/* Used by App Manager */ +export const isWebSocketsAvailable = function() { + return WebSocketConnection['isAvailable'](); +}; + +export const setSecurityDebugCallback = function(ref, callback) { + ref.repo.persistentConnection_.securityDebugCallback_ = callback; +}; + +export const stats = function(ref, showDelta) { + ref.repo.stats(showDelta); +}; + +export const statsIncrementCounter = function(ref, metric) { + ref.repo.statsIncrementCounter(metric); +}; + +export const dataUpdateCount = function(ref) { + return ref.repo.dataUpdateCount; +}; + +export const interceptServerData = function(ref, callback) { + return ref.repo.interceptServerData_(callback); +}; diff --git a/src/database/api/onDisconnect.ts b/src/database/api/onDisconnect.ts new file mode 100644 index 00000000000..ead95424416 --- /dev/null +++ b/src/database/api/onDisconnect.ts @@ -0,0 +1,113 @@ +import { + validateArgCount, + validateCallback +} from "../../utils/validation"; +import { + validateWritablePath, + validateFirebaseDataArg, + validatePriority, + validateFirebaseMergeDataArg, +} from "../core/util/validation"; +import { warn } from "../core/util/util"; +import { Deferred } from "../../utils/promise"; +import { Repo } from '../core/Repo'; +import { Path } from '../core/util/Path'; + +/** + * @constructor + */ +export class OnDisconnect { + /** + * @param {!Repo} repo_ + * @param {!Path} path_ + */ + constructor(private repo_: Repo, + private path_: Path) { + } + + /** + * @param {function(?Error)=} opt_onComplete + * @return {!firebase.Promise} + */ + cancel(onComplete?) { + validateArgCount('OnDisconnect.cancel', 0, 1, arguments.length); + validateCallback('OnDisconnect.cancel', 1, onComplete, true); + const deferred = new Deferred(); + this.repo_.onDisconnectCancel(this.path_, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {function(?Error)=} opt_onComplete + * @return {!firebase.Promise} + */ + remove(onComplete?) { + validateArgCount('OnDisconnect.remove', 0, 1, arguments.length); + validateWritablePath('OnDisconnect.remove', this.path_); + validateCallback('OnDisconnect.remove', 1, onComplete, true); + const deferred = new Deferred(); + this.repo_.onDisconnectSet(this.path_, null, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {*} value + * @param {function(?Error)=} opt_onComplete + * @return {!firebase.Promise} + */ + set(value, onComplete?) { + validateArgCount('OnDisconnect.set', 1, 2, arguments.length); + validateWritablePath('OnDisconnect.set', this.path_); + validateFirebaseDataArg('OnDisconnect.set', 1, value, this.path_, false); + validateCallback('OnDisconnect.set', 2, onComplete, true); + const deferred = new Deferred(); + this.repo_.onDisconnectSet(this.path_, value, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {*} value + * @param {number|string|null} priority + * @param {function(?Error)=} opt_onComplete + * @return {!firebase.Promise} + */ + setWithPriority(value, priority, onComplete?) { + validateArgCount('OnDisconnect.setWithPriority', 2, 3, arguments.length); + validateWritablePath('OnDisconnect.setWithPriority', this.path_); + validateFirebaseDataArg('OnDisconnect.setWithPriority', + 1, value, this.path_, false); + validatePriority('OnDisconnect.setWithPriority', 2, priority, false); + validateCallback('OnDisconnect.setWithPriority', 3, onComplete, true); + + const deferred = new Deferred(); + this.repo_.onDisconnectSetWithPriority(this.path_, value, priority, deferred.wrapCallback(onComplete)); + return deferred.promise; + } + + /** + * @param {!Object} objectToMerge + * @param {function(?Error)=} opt_onComplete + * @return {!firebase.Promise} + */ + update(objectToMerge, onComplete?) { + validateArgCount('OnDisconnect.update', 1, 2, arguments.length); + validateWritablePath('OnDisconnect.update', this.path_); + if (Array.isArray(objectToMerge)) { + const newObjectToMerge = {}; + for (let i = 0; i < objectToMerge.length; ++i) { + newObjectToMerge['' + i] = objectToMerge[i]; + } + objectToMerge = newObjectToMerge; + warn( + 'Passing an Array to firebase.database.onDisconnect().update() is deprecated. Use set() if you want to overwrite the ' + + 'existing data, or an Object with integer keys if you really do want to only update some of the children.' + ); + } + validateFirebaseMergeDataArg('OnDisconnect.update', 1, objectToMerge, + this.path_, false); + validateCallback('OnDisconnect.update', 2, onComplete, true); + const deferred = new Deferred(); + this.repo_.onDisconnectUpdate(this.path_, objectToMerge, deferred.wrapCallback(onComplete)); + return deferred.promise; + } +} \ No newline at end of file diff --git a/src/database/api/test_access.ts b/src/database/api/test_access.ts new file mode 100644 index 00000000000..fbed5b74862 --- /dev/null +++ b/src/database/api/test_access.ts @@ -0,0 +1,72 @@ +import { RepoInfo } from "../core/RepoInfo"; +import { PersistentConnection } from "../core/PersistentConnection"; +import { RepoManager } from "../core/RepoManager"; +import { Connection } from "../realtime/Connection"; + +export const DataConnection = PersistentConnection; + +/** + * @param {!string} pathString + * @param {function(*)} onComplete + */ +(PersistentConnection.prototype as any).simpleListen = function(pathString, onComplete) { + this.sendRequest('q', {'p': pathString}, onComplete); +}; + +/** + * @param {*} data + * @param {function(*)} onEcho + */ +(PersistentConnection.prototype as any).echo = function(data, onEcho) { + this.sendRequest('echo', {'d': data}, onEcho); +}; + +// RealTimeConnection properties that we use in tests. +export const RealTimeConnection = Connection; + +/** + * @param {function(): string} newHash + * @return {function()} + */ +export const hijackHash = function(newHash) { + var oldPut = PersistentConnection.prototype.put; + PersistentConnection.prototype.put = function(pathString, data, opt_onComplete, opt_hash) { + if (opt_hash !== undefined) { + opt_hash = newHash(); + } + oldPut.call(this, pathString, data, opt_onComplete, opt_hash); + }; + return function() { + PersistentConnection.prototype.put = oldPut; + } +}; + +/** + * @type {function(new:fb.core.RepoInfo, !string, boolean, !string, boolean): undefined} + */ +export const ConnectionTarget = RepoInfo; + +/** + * @param {!fb.api.Query} query + * @return {!string} + */ +export const queryIdentifier = function(query) { + return query.queryIdentifier(); +}; + +/** + * @param {!fb.api.Query} firebaseRef + * @return {!Object} + */ +export const listens = function(firebaseRef) { + return firebaseRef.repo.persistentConnection_.listens_; +}; + +/** + * Forces the RepoManager to create Repos that use ReadonlyRestClient instead of PersistentConnection. + * + * @param {boolean} forceRestClient + */ +export const forceRestClient = function(forceRestClient) { + RepoManager.getInstance().forceRestClient(forceRestClient); +}; diff --git a/src/database/core/AuthTokenProvider.ts b/src/database/core/AuthTokenProvider.ts new file mode 100644 index 00000000000..1dd486c9517 --- /dev/null +++ b/src/database/core/AuthTokenProvider.ts @@ -0,0 +1,67 @@ +import { log, warn } from "./util/util"; + +/** + * Abstraction around FirebaseApp's token fetching capabilities. + */ +export class AuthTokenProvider { + private app_; + + /** + * @param {!firebase.app.App} app + */ + constructor(app) { + /** @private {!firebase.app.App} */ + this.app_ = app; + } + + /** + * @param {boolean} forceRefresh + * @return {!Promise} + */ + getToken(forceRefresh) { + return this.app_['INTERNAL']['getToken'](forceRefresh) + .then( + null, + // .catch + function(error) { + // TODO: Need to figure out all the cases this is raised and whether + // this makes sense. + if (error && error.code === 'auth/token-not-initialized') { + log('Got auth/token-not-initialized error. Treating as null token.'); + return null; + } else { + return Promise.reject(error); + } + }); + } + + addTokenChangeListener(listener) { + // TODO: We might want to wrap the listener and call it with no args to + // avoid a leaky abstraction, but that makes removing the listener harder. + this.app_['INTERNAL']['addAuthTokenListener'](listener); + } + + removeTokenChangeListener(listener) { + this.app_['INTERNAL']['removeAuthTokenListener'](listener); + } + + notifyForInvalidToken() { + var errorMessage = 'Provided authentication credentials for the app named "' + + this.app_.name + '" are invalid. This usually indicates your app was not ' + + 'initialized correctly. '; + if ('credential' in this.app_.options) { + errorMessage += 'Make sure the "credential" property provided to initializeApp() ' + + 'is authorized to access the specified "databaseURL" and is from the correct ' + + 'project.'; + } else if ('serviceAccount' in this.app_.options) { + errorMessage += 'Make sure the "serviceAccount" property provided to initializeApp() ' + + 'is authorized to access the specified "databaseURL" and is from the correct ' + + 'project.'; + } else { + errorMessage += 'Make sure the "apiKey" and "databaseURL" properties provided to ' + + 'initializeApp() match the values provided for your app at ' + + 'https://console.firebase.google.com/.'; + } + warn(errorMessage); + } +}; diff --git a/src/database/core/CompoundWrite.ts b/src/database/core/CompoundWrite.ts new file mode 100644 index 00000000000..1843b7429a2 --- /dev/null +++ b/src/database/core/CompoundWrite.ts @@ -0,0 +1,196 @@ +import { ImmutableTree } from "./util/ImmutableTree"; +import { Path } from "./util/Path"; +import { forEach } from "../../utils/obj"; +import { Node, NamedNode } from "./snap/Node"; +import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; +import { assert } from "../../utils/assert"; + +/** + * This class holds a collection of writes that can be applied to nodes in unison. It abstracts away the logic with + * dealing with priority writes and multiple nested writes. At any given path there is only allowed to be one write + * modifying that path. Any write to an existing path or shadowing an existing path will modify that existing write + * to reflect the write added. + * + * @constructor + * @param {!ImmutableTree.} writeTree + */ +export class CompoundWrite { + constructor(private writeTree_: ImmutableTree) {}; + /** + * @type {!CompoundWrite} + */ + static Empty = new CompoundWrite(new ImmutableTree(null)); + + /** + * @param {!Path} path + * @param {!Node} node + * @return {!CompoundWrite} + */ + addWrite(path: Path, node: Node): CompoundWrite { + if (path.isEmpty()) { + return new CompoundWrite(new ImmutableTree(node)); + } else { + var rootmost = this.writeTree_.findRootMostValueAndPath(path); + if (rootmost != null) { + var rootMostPath = rootmost.path + var value = rootmost.value; + var relativePath = Path.relativePath(rootMostPath, path); + value = value.updateChild(relativePath, node); + return new CompoundWrite(this.writeTree_.set(rootMostPath, value)); + } else { + var subtree = new ImmutableTree(node); + var newWriteTree = this.writeTree_.setTree(path, subtree); + return new CompoundWrite(newWriteTree); + } + } + }; + + /** + * @param {!Path} path + * @param {!Object.} updates + * @return {!CompoundWrite} + */ + addWrites(path: Path, updates: { [name: string]: Node }): CompoundWrite { + var newWrite = this; + forEach(updates, function(childKey, node) { + newWrite = newWrite.addWrite(path.child(childKey), node); + }); + return newWrite; + }; + + /** + * Will remove a write at the given path and deeper paths. This will not modify a write at a higher + * location, which must be removed by calling this method with that path. + * + * @param {!Path} path The path at which a write and all deeper writes should be removed + * @return {!CompoundWrite} The new CompoundWrite with the removed path + */ + removeWrite(path: Path): CompoundWrite { + if (path.isEmpty()) { + return CompoundWrite.Empty; + } else { + var newWriteTree = this.writeTree_.setTree(path, ImmutableTree.Empty); + return new CompoundWrite(newWriteTree); + } + }; + + /** + * Returns whether this CompoundWrite will fully overwrite a node at a given location and can therefore be + * considered "complete". + * + * @param {!Path} path The path to check for + * @return {boolean} Whether there is a complete write at that path + */ + hasCompleteWrite(path: Path): boolean { + return this.getCompleteNode(path) != null; + }; + + /** + * Returns a node for a path if and only if the node is a "complete" overwrite at that path. This will not aggregate + * writes from deeper paths, but will return child nodes from a more shallow path. + * + * @param {!Path} path The path to get a complete write + * @return {?Node} The node if complete at that path, or null otherwise. + */ + getCompleteNode(path: Path): Node { + var rootmost = this.writeTree_.findRootMostValueAndPath(path); + if (rootmost != null) { + return this.writeTree_.get(rootmost.path).getChild(Path.relativePath(rootmost.path, path)); + } else { + return null; + } + }; + + /** + * Returns all children that are guaranteed to be a complete overwrite. + * + * @return {!Array.} A list of all complete children. + */ + getCompleteChildren(): Array { + var children = []; + var node = this.writeTree_.value; + if (node != null) { + // If it's a leaf node, it has no children; so nothing to do. + if (!node.isLeafNode()) { + node = /** @type {!ChildrenNode} */ (node); + node.forEachChild(PRIORITY_INDEX, function(childName, childNode) { + children.push(new NamedNode(childName, childNode)); + }); + } + } else { + this.writeTree_.children.inorderTraversal(function(childName, childTree) { + if (childTree.value != null) { + children.push(new NamedNode(childName, childTree.value)); + } + }); + } + return children; + }; + + /** + * @param {!Path} path + * @return {!CompoundWrite} + */ + childCompoundWrite(path: Path) { + if (path.isEmpty()) { + return this; + } else { + var shadowingNode = this.getCompleteNode(path); + if (shadowingNode != null) { + return new CompoundWrite(new ImmutableTree(shadowingNode)); + } else { + return new CompoundWrite(this.writeTree_.subtree(path)); + } + } + }; + + /** + * Returns true if this CompoundWrite is empty and therefore does not modify any nodes. + * @return {boolean} Whether this CompoundWrite is empty + */ + isEmpty() { + return this.writeTree_.isEmpty(); + }; + + /** + * Applies this CompoundWrite to a node. The node is returned with all writes from this CompoundWrite applied to the + * node + * @param {!Node} node The node to apply this CompoundWrite to + * @return {!Node} The node with all writes applied + */ + apply(node: Node) { + return CompoundWrite.applySubtreeWrite_(Path.Empty, this.writeTree_, node); + }; + + /** + * @param {!Path} relativePath + * @param {!ImmutableTree.} writeTree + * @param {!Node} node + * @return {!Node} + * @private + */ + static applySubtreeWrite_ = function(relativePath: Path, writeTree: ImmutableTree, node: Node) { + if (writeTree.value != null) { + // Since there a write is always a leaf, we're done here + return node.updateChild(relativePath, writeTree.value); + } else { + var priorityWrite = null; + writeTree.children.inorderTraversal(function(childKey, childTree) { + if (childKey === '.priority') { + // Apply priorities at the end so we don't update priorities for either empty nodes or forget + // to apply priorities to empty nodes that are later filled + assert(childTree.value !== null, 'Priority writes must always be leaf nodes'); + priorityWrite = childTree.value; + } else { + node = CompoundWrite.applySubtreeWrite_(relativePath.child(childKey), childTree, node); + } + }); + // If there was a priority write, we only apply it if the node is not empty + if (!node.getChild(relativePath).isEmpty() && priorityWrite !== null) { + node = node.updateChild(relativePath.child('.priority'), priorityWrite); + } + return node; + } + }; +} + diff --git a/src/database/core/PersistentConnection.ts b/src/database/core/PersistentConnection.ts new file mode 100644 index 00000000000..34741d4bfc4 --- /dev/null +++ b/src/database/core/PersistentConnection.ts @@ -0,0 +1,888 @@ +import firebase from "../../app"; +import { forEach, contains, isEmpty, getCount, safeGet } from "../../utils/obj"; +import { stringify } from "../../utils/json"; +import { assert } from '../../utils/assert'; +import { error, log, logWrapper, warn, ObjectToUniqueKey } from "./util/util"; +import { Path } from "./util/Path"; +import { VisibilityMonitor } from "./util/VisibilityMonitor"; +import { OnlineMonitor } from "./util/OnlineMonitor"; +import { isAdmin, isValidFormat } from "../../utils/jwt"; +import { Connection } from "../realtime/Connection"; +import { CONSTANTS } from "../../utils/constants"; +import { + isMobileCordova, + isReactNative, + isNodeSdk +} from "../../utils/environment"; + +var RECONNECT_MIN_DELAY = 1000; +var RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858) +var RECONNECT_MAX_DELAY_FOR_ADMINS = 30 * 1000; // 30 seconds for admin clients (likely to be a backend server) +var RECONNECT_DELAY_MULTIPLIER = 1.3; +var RECONNECT_DELAY_RESET_TIMEOUT = 30000; // Reset delay back to MIN_DELAY after being connected for 30sec. +var SERVER_KILL_INTERRUPT_REASON = "server_kill"; + +// If auth fails repeatedly, we'll assume something is wrong and log a warning / back off. +var INVALID_AUTH_TOKEN_THRESHOLD = 3; + +/** + * Firebase connection. Abstracts wire protocol and handles reconnecting. + * + * NOTE: All JSON objects sent to the realtime connection must have property names enclosed + * in quotes to make sure the closure compiler does not minify them. + */ +export class PersistentConnection { + // Used for diagnostic logging. + id; + log_; + /** @private {Object} */ + interruptReasons_; + listens_; + outstandingPuts_; + outstandingPutCount_; + onDisconnectRequestQueue_; + connected_; + reconnectDelay_; + maxReconnectDelay_; + onDataUpdate_; + onConnectStatus_; + onServerInfoUpdate_; + repoInfo_; + securityDebugCallback_; + lastSessionId; + /** @private {?{ + * sendRequest(Object), + * close() + * }} */ + private realtime_; + /** @private {string|null} */ + authToken_; + authTokenProvider_; + forceTokenRefresh_; + invalidAuthTokenCount_; + /** @private {Object|null|undefined} */ + private authOverride_; + /** @private {number|null} */ + private establishConnectionTimer_; + /** @private {boolean} */ + private visible_; + + // Before we get connected, we keep a queue of pending messages to send. + requestCBHash_; + requestNumber_; + + firstConnection_; + lastConnectionAttemptTime_; + lastConnectionEstablishedTime_; + + /** + * @private + */ + static nextPersistentConnectionId_ = 0; + + /** + * Counter for number of connections created. Mainly used for tagging in the logs + * @type {number} + * @private + */ + static nextConnectionId_ = 0; + /** + * @implements {ServerActions} + * @param {!RepoInfo} repoInfo Data about the namespace we are connecting to + * @param {function(string, *, boolean, ?number)} onDataUpdate A callback for new data from the server + */ + constructor(repoInfo, onDataUpdate, onConnectStatus, + onServerInfoUpdate, authTokenProvider, authOverride) { + // Used for diagnostic logging. + this.id = PersistentConnection.nextPersistentConnectionId_++; + this.log_ = logWrapper('p:' + this.id + ':'); + /** @private {Object} */ + this.interruptReasons_ = { }; + this.listens_ = {}; + this.outstandingPuts_ = []; + this.outstandingPutCount_ = 0; + this.onDisconnectRequestQueue_ = []; + this.connected_ = false; + this.reconnectDelay_ = RECONNECT_MIN_DELAY; + this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_DEFAULT; + this.onDataUpdate_ = onDataUpdate; + this.onConnectStatus_ = onConnectStatus; + this.onServerInfoUpdate_ = onServerInfoUpdate; + this.repoInfo_ = repoInfo; + this.securityDebugCallback_ = null; + this.lastSessionId = null; + /** @private {?{ + * sendRequest(Object), + * close() + * }} */ + this.realtime_ = null; + /** @private {string|null} */ + this.authToken_ = null; + this.authTokenProvider_ = authTokenProvider; + this.forceTokenRefresh_ = false; + this.invalidAuthTokenCount_ = 0; + if (authOverride && !isNodeSdk()) { + throw new Error('Auth override specified in options, but not supported on non Node.js platforms'); + } + /** private {Object|null|undefined} */ + this.authOverride_ = authOverride; + /** @private {number|null} */ + this.establishConnectionTimer_ = null; + /** @private {boolean} */ + this.visible_ = false; + + // Before we get connected, we keep a queue of pending messages to send. + this.requestCBHash_ = {}; + this.requestNumber_ = 0; + + this.firstConnection_ = true; + this.lastConnectionAttemptTime_ = null; + this.lastConnectionEstablishedTime_ = null; + this.scheduleConnect_(0); + + VisibilityMonitor.getInstance().on('visible', this.onVisible_, this); + + if (repoInfo.host.indexOf('fblocal') === -1) { + OnlineMonitor.getInstance().on('online', this.onOnline_, this); + } + } + + /** + * @param {!string} action + * @param {*} body + * @param {function(*)=} onResponse + * @protected + */ + sendRequest(action, body, onResponse?) { + var curReqNum = ++this.requestNumber_; + + var msg = {'r': curReqNum, 'a': action, 'b': body}; + this.log_(stringify(msg)); + assert(this.connected_, "sendRequest call when we're not connected not allowed."); + this.realtime_.sendRequest(msg); + if (onResponse) { + this.requestCBHash_[curReqNum] = onResponse; + } + } + + /** + * @inheritDoc + */ + listen(query, currentHashFn, tag, onComplete) { + var queryId = query.queryIdentifier(); + var pathString = query.path.toString(); + this.log_('Listen called for ' + pathString + ' ' + queryId); + this.listens_[pathString] = this.listens_[pathString] || {}; + assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), + 'listen() called for non-default but complete query'); + assert(!this.listens_[pathString][queryId], 'listen() called twice for same path/queryId.'); + var listenSpec = { + onComplete: onComplete, + hashFn: currentHashFn, + query: query, + tag: tag + }; + this.listens_[pathString][queryId] = listenSpec; + + if (this.connected_) { + this.sendListen_(listenSpec); + } + } + + /** + * @param {!{onComplete(), + * hashFn():!string, + * query: !Query, + * tag: ?number}} listenSpec + * @private + */ + sendListen_(listenSpec) { + var query = listenSpec.query; + var pathString = query.path.toString(); + var queryId = query.queryIdentifier(); + var self = this; + this.log_('Listen on ' + pathString + ' for ' + queryId); + var req = {/*path*/ 'p': pathString}; + + var action = 'q'; + + // Only bother to send query if it's non-default. + if (listenSpec.tag) { + req['q'] = query.queryObject(); + req['t'] = listenSpec.tag; + } + + req[/*hash*/'h'] = listenSpec.hashFn(); + + this.sendRequest(action, req, function(message) { + var payload = message[/*data*/ 'd']; + var status = message[/*status*/ 's']; + + // print warnings in any case... + self.warnOnListenWarnings_(payload, query); + + var currentListenSpec = self.listens_[pathString] && self.listens_[pathString][queryId]; + // only trigger actions if the listen hasn't been removed and readded + if (currentListenSpec === listenSpec) { + self.log_('listen response', message); + + if (status !== 'ok') { + self.removeListen_(pathString, queryId); + } + + if (listenSpec.onComplete) { + listenSpec.onComplete(status, payload); + } + } + }); + } + + /** + * @param {*} payload + * @param {!Query} query + * @private + */ + warnOnListenWarnings_(payload, query) { + if (payload && typeof payload === 'object' && contains(payload, 'w')) { + var warnings = safeGet(payload, 'w'); + if (Array.isArray(warnings) && ~warnings.indexOf('no_index')) { + var indexSpec = '".indexOn": "' + query.getQueryParams().getIndex().toString() + '"'; + var indexPath = query.path.toString(); + warn('Using an unspecified index. Consider adding ' + indexSpec + ' at ' + indexPath + + ' to your security rules for better performance'); + } + } + } + + /** + * @inheritDoc + */ + refreshAuthToken(token) { + this.authToken_ = token; + this.log_('Auth token refreshed'); + if (this.authToken_) { + this.tryAuth(); + } else { + //If we're connected we want to let the server know to unauthenticate us. If we're not connected, simply delete + //the credential so we dont become authenticated next time we connect. + if (this.connected_) { + this.sendRequest('unauth', {}, function() { }); + } + } + + this.reduceReconnectDelayIfAdminCredential_(token); + } + + /** + * @param {!string} credential + * @private + */ + reduceReconnectDelayIfAdminCredential_(credential) { + // NOTE: This isn't intended to be bulletproof (a malicious developer can always just modify the client). + // Additionally, we don't bother resetting the max delay back to the default if auth fails / expires. + var isFirebaseSecret = credential && credential.length === 40; + if (isFirebaseSecret || isAdmin(credential)) { + this.log_('Admin auth credential detected. Reducing max reconnect time.'); + this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS; + } + } + + /** + * Attempts to authenticate with the given credentials. If the authentication attempt fails, it's triggered like + * a auth revoked (the connection is closed). + */ + tryAuth() { + var self = this; + if (this.connected_ && this.authToken_) { + var token = this.authToken_; + var authMethod = isValidFormat(token) ? 'auth' : 'gauth'; + var requestData = {'cred': token}; + if (this.authOverride_ === null) { + requestData['noauth'] = true; + } else if (typeof this.authOverride_ === 'object') { + requestData['authvar'] = this.authOverride_; + } + this.sendRequest(authMethod, requestData, function(res) { + var status = res[/*status*/ 's']; + var data = res[/*data*/ 'd'] || 'error'; + + if (self.authToken_ === token) { + if (status === 'ok') { + self.invalidAuthTokenCount_ = 0; + } else { + // Triggers reconnect and force refresh for auth token + self.onAuthRevoked_(status, data); + } + } + }); + } + } + + /** + * @inheritDoc + */ + unlisten(query, tag) { + var pathString = query.path.toString(); + var queryId = query.queryIdentifier(); + + this.log_("Unlisten called for " + pathString + " " + queryId); + + assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), + 'unlisten() called for non-default but complete query'); + var listen = this.removeListen_(pathString, queryId); + if (listen && this.connected_) { + this.sendUnlisten_(pathString, queryId, query.queryObject(), tag); + } + } + + sendUnlisten_(pathString, queryId, queryObj, tag) { + this.log_('Unlisten on ' + pathString + ' for ' + queryId); + var self = this; + + var req = {/*path*/ 'p': pathString}; + var action = 'n'; + // Only bother send queryId if it's non-default. + if (tag) { + req['q'] = queryObj; + req['t'] = tag; + } + + this.sendRequest(action, req); + } + + /** + * @inheritDoc + */ + onDisconnectPut(pathString, data, opt_onComplete) { + if (this.connected_) { + this.sendOnDisconnect_('o', pathString, data, opt_onComplete); + } else { + this.onDisconnectRequestQueue_.push({ + pathString: pathString, + action: 'o', + data: data, + onComplete: opt_onComplete + }); + } + } + + /** + * @inheritDoc + */ + onDisconnectMerge(pathString, data, opt_onComplete) { + if (this.connected_) { + this.sendOnDisconnect_('om', pathString, data, opt_onComplete); + } else { + this.onDisconnectRequestQueue_.push({ + pathString: pathString, + action: 'om', + data: data, + onComplete: opt_onComplete + }); + } + } + + /** + * @inheritDoc + */ + onDisconnectCancel(pathString, opt_onComplete) { + if (this.connected_) { + this.sendOnDisconnect_('oc', pathString, null, opt_onComplete); + } else { + this.onDisconnectRequestQueue_.push({ + pathString: pathString, + action: 'oc', + data: null, + onComplete: opt_onComplete + }); + } + } + + sendOnDisconnect_(action, pathString, data, opt_onComplete) { + var self = this; + var request = {/*path*/ 'p': pathString, /*data*/ 'd': data}; + self.log_('onDisconnect ' + action, request); + this.sendRequest(action, request, function(response) { + if (opt_onComplete) { + setTimeout(function() { + opt_onComplete(response[/*status*/ 's'], response[/* data */'d']); + }, Math.floor(0)); + } + }); + } + + /** + * @inheritDoc + */ + put(pathString, data, opt_onComplete, opt_hash) { + this.putInternal('p', pathString, data, opt_onComplete, opt_hash); + } + + /** + * @inheritDoc + */ + merge(pathString, data, onComplete, opt_hash) { + this.putInternal('m', pathString, data, onComplete, opt_hash); + } + + putInternal(action, pathString, data, opt_onComplete, opt_hash) { + var request = {/*path*/ 'p': pathString, /*data*/ 'd': data }; + + if (opt_hash !== undefined) + request[/*hash*/ 'h'] = opt_hash; + + // TODO: Only keep track of the most recent put for a given path? + this.outstandingPuts_.push({ + action: action, + request: request, + onComplete: opt_onComplete + }); + + this.outstandingPutCount_++; + var index = this.outstandingPuts_.length - 1; + + if (this.connected_) { + this.sendPut_(index); + } else { + this.log_('Buffering put: ' + pathString); + } + } + + sendPut_(index) { + var self = this; + var action = this.outstandingPuts_[index].action; + var request = this.outstandingPuts_[index].request; + var onComplete = this.outstandingPuts_[index].onComplete; + this.outstandingPuts_[index].queued = this.connected_; + + this.sendRequest(action, request, function(message) { + self.log_(action + ' response', message); + + delete self.outstandingPuts_[index]; + self.outstandingPutCount_--; + + // Clean up array occasionally. + if (self.outstandingPutCount_ === 0) { + self.outstandingPuts_ = []; + } + + if (onComplete) + onComplete(message[/*status*/ 's'], message[/* data */ 'd']); + }); + } + + /** + * @inheritDoc + */ + reportStats(stats) { + // If we're not connected, we just drop the stats. + if (this.connected_) { + var request = { /*counters*/ 'c': stats }; + this.log_('reportStats', request); + + this.sendRequest(/*stats*/ 's', request, function(result) { + var status = result[/*status*/ 's']; + if (status !== 'ok') { + var errorReason = result[/* data */ 'd']; + this.log_('reportStats', 'Error sending stats: ' + errorReason); + } + }); + } + } + + /** + * @param {*} message + * @private + */ + onDataMessage_(message) { + if ('r' in message) { + // this is a response + this.log_('from server: ' + stringify(message)); + var reqNum = message['r']; + var onResponse = this.requestCBHash_[reqNum]; + if (onResponse) { + delete this.requestCBHash_[reqNum]; + onResponse(message[/*body*/ 'b']); + } + } else if ('error' in message) { + throw 'A server-side error has occurred: ' + message['error']; + } else if ('a' in message) { + // a and b are action and body, respectively + this.onDataPush_(message['a'], message['b']); + } + } + + onDataPush_(action, body) { + this.log_('handleServerMessage', action, body); + if (action === 'd') + this.onDataUpdate_(body[/*path*/ 'p'], body[/*data*/ 'd'], /*isMerge*/false, body['t']); + else if (action === 'm') + this.onDataUpdate_(body[/*path*/ 'p'], body[/*data*/ 'd'], /*isMerge=*/true, body['t']); + else if (action === 'c') + this.onListenRevoked_(body[/*path*/ 'p'], body[/*query*/ 'q']); + else if (action === 'ac') + this.onAuthRevoked_(body[/*status code*/ 's'], body[/* explanation */ 'd']); + else if (action === 'sd') + this.onSecurityDebugPacket_(body); + else + error('Unrecognized action received from server: ' + stringify(action) + + '\nAre you using the latest client?'); + } + + onReady_(timestamp, sessionId) { + this.log_('connection ready'); + this.connected_ = true; + this.lastConnectionEstablishedTime_ = new Date().getTime(); + this.handleTimestamp_(timestamp); + this.lastSessionId = sessionId; + if (this.firstConnection_) { + this.sendConnectStats_(); + } + this.restoreState_(); + this.firstConnection_ = false; + this.onConnectStatus_(true); + } + + scheduleConnect_(timeout) { + assert(!this.realtime_, "Scheduling a connect when we're already connected/ing?"); + + if (this.establishConnectionTimer_) { + clearTimeout(this.establishConnectionTimer_); + } + + // NOTE: Even when timeout is 0, it's important to do a setTimeout to work around an infuriating "Security Error" in + // Firefox when trying to write to our long-polling iframe in some scenarios (e.g. Forge or our unit tests). + + var self = this; + this.establishConnectionTimer_ = setTimeout(function() { + self.establishConnectionTimer_ = null; + self.establishConnection_(); + }, Math.floor(timeout)); + } + + /** + * @param {boolean} visible + * @private + */ + onVisible_(visible) { + // NOTE: Tabbing away and back to a window will defeat our reconnect backoff, but I think that's fine. + if (visible && !this.visible_ && this.reconnectDelay_ === this.maxReconnectDelay_) { + this.log_('Window became visible. Reducing delay.'); + this.reconnectDelay_ = RECONNECT_MIN_DELAY; + + if (!this.realtime_) { + this.scheduleConnect_(0); + } + } + this.visible_ = visible; + } + + onOnline_(online) { + if (online) { + this.log_('Browser went online.'); + this.reconnectDelay_ = RECONNECT_MIN_DELAY; + if (!this.realtime_) { + this.scheduleConnect_(0); + } + } else { + this.log_("Browser went offline. Killing connection."); + if (this.realtime_) { + this.realtime_.close(); + } + } + } + + onRealtimeDisconnect_() { + this.log_('data client disconnected'); + this.connected_ = false; + this.realtime_ = null; + + // Since we don't know if our sent transactions succeeded or not, we need to cancel them. + this.cancelSentTransactions_(); + + // Clear out the pending requests. + this.requestCBHash_ = {}; + + if (this.shouldReconnect_()) { + if (!this.visible_) { + this.log_("Window isn't visible. Delaying reconnect."); + this.reconnectDelay_ = this.maxReconnectDelay_; + this.lastConnectionAttemptTime_ = new Date().getTime(); + } else if (this.lastConnectionEstablishedTime_) { + // If we've been connected long enough, reset reconnect delay to minimum. + var timeSinceLastConnectSucceeded = new Date().getTime() - this.lastConnectionEstablishedTime_; + if (timeSinceLastConnectSucceeded > RECONNECT_DELAY_RESET_TIMEOUT) + this.reconnectDelay_ = RECONNECT_MIN_DELAY; + this.lastConnectionEstablishedTime_ = null; + } + + var timeSinceLastConnectAttempt = new Date().getTime() - this.lastConnectionAttemptTime_; + var reconnectDelay = Math.max(0, this.reconnectDelay_ - timeSinceLastConnectAttempt); + reconnectDelay = Math.random() * reconnectDelay; + + this.log_('Trying to reconnect in ' + reconnectDelay + 'ms'); + this.scheduleConnect_(reconnectDelay); + + // Adjust reconnect delay for next time. + this.reconnectDelay_ = Math.min(this.maxReconnectDelay_, this.reconnectDelay_ * RECONNECT_DELAY_MULTIPLIER); + } + this.onConnectStatus_(false); + } + + establishConnection_() { + if (this.shouldReconnect_()) { + this.log_('Making a connection attempt'); + this.lastConnectionAttemptTime_ = new Date().getTime(); + this.lastConnectionEstablishedTime_ = null; + var onDataMessage = this.onDataMessage_.bind(this); + var onReady = this.onReady_.bind(this); + var onDisconnect = this.onRealtimeDisconnect_.bind(this); + var connId = this.id + ':' + PersistentConnection.nextConnectionId_++; + var self = this; + var lastSessionId = this.lastSessionId; + var canceled = false; + var connection = null; + var closeFn = function() { + if (connection) { + connection.close(); + } else { + canceled = true; + onDisconnect(); + } + }; + var sendRequestFn = function(msg) { + assert(connection, "sendRequest call when we're not connected not allowed."); + connection.sendRequest(msg); + }; + + this.realtime_ = { + close: closeFn, + sendRequest: sendRequestFn + }; + + var forceRefresh = this.forceTokenRefresh_; + this.forceTokenRefresh_ = false; + + // First fetch auth token, and establish connection after fetching the token was successful + this.authTokenProvider_.getToken(forceRefresh).then(function(result) { + if (!canceled) { + log('getToken() completed. Creating connection.'); + self.authToken_ = result && result.accessToken; + connection = new Connection(connId, self.repoInfo_, + onDataMessage, + onReady, + onDisconnect, /* onKill= */ function (reason) { + warn(reason + ' (' + self.repoInfo_.toString() + ')'); + self.interrupt(SERVER_KILL_INTERRUPT_REASON); + }, + lastSessionId); + } else { + log('getToken() completed but was canceled'); + } + }).then(null, function(error) { + self.log_('Failed to get token: ' + error); + if (!canceled) { + if (CONSTANTS.NODE_ADMIN) { + // This may be a critical error for the Admin Node.js SDK, so log a warning. + // But getToken() may also just have temporarily failed, so we still want to + // continue retrying. + warn(error); + } + closeFn(); + } + }); + } + } + + /** + * @param {string} reason + */ + interrupt(reason) { + log('Interrupting connection for reason: ' + reason); + this.interruptReasons_[reason] = true; + if (this.realtime_) { + this.realtime_.close(); + } else { + if (this.establishConnectionTimer_) { + clearTimeout(this.establishConnectionTimer_); + this.establishConnectionTimer_ = null; + } + if (this.connected_) { + this.onRealtimeDisconnect_(); + } + } + } + + /** + * @param {string} reason + */ + resume(reason) { + log('Resuming connection for reason: ' + reason); + delete this.interruptReasons_[reason]; + if (isEmpty(this.interruptReasons_)) { + this.reconnectDelay_ = RECONNECT_MIN_DELAY; + if (!this.realtime_) { + this.scheduleConnect_(0); + } + } + } + + /** + * @param reason + * @return {boolean} + */ + isInterrupted(reason) { + return this.interruptReasons_[reason] || false; + } + + handleTimestamp_(timestamp) { + var delta = timestamp - new Date().getTime(); + this.onServerInfoUpdate_({'serverTimeOffset': delta}); + } + + cancelSentTransactions_() { + for (var i = 0; i < this.outstandingPuts_.length; i++) { + var put = this.outstandingPuts_[i]; + if (put && /*hash*/'h' in put.request && put.queued) { + if (put.onComplete) + put.onComplete('disconnect'); + + delete this.outstandingPuts_[i]; + this.outstandingPutCount_--; + } + } + + // Clean up array occasionally. + if (this.outstandingPutCount_ === 0) + this.outstandingPuts_ = []; + } + + /** + * @param {!string} pathString + * @param {Array.<*>=} opt_query + * @private + */ + onListenRevoked_(pathString, opt_query) { + // Remove the listen and manufacture a "permission_denied" error for the failed listen. + var queryId; + if (!opt_query) { + queryId = 'default'; + } else { + queryId = opt_query.map(function(q) { return ObjectToUniqueKey(q); }).join('$'); + } + var listen = this.removeListen_(pathString, queryId); + if (listen && listen.onComplete) + listen.onComplete('permission_denied'); + } + + /** + * @param {!string} pathString + * @param {!string} queryId + * @return {{queries:Array., onComplete:function(string)}} + * @private + */ + removeListen_(pathString, queryId) { + var normalizedPathString = new Path(pathString).toString(); // normalize path. + var listen; + if (this.listens_[normalizedPathString] !== undefined) { + listen = this.listens_[normalizedPathString][queryId]; + delete this.listens_[normalizedPathString][queryId]; + if (getCount(this.listens_[normalizedPathString]) === 0) { + delete this.listens_[normalizedPathString]; + } + } else { + // all listens for this path has already been removed + listen = undefined; + } + return listen; + } + + onAuthRevoked_(statusCode, explanation) { + log('Auth token revoked: ' + statusCode + '/' + explanation); + this.authToken_ = null; + this.forceTokenRefresh_ = true; + this.realtime_.close(); + if (statusCode === 'invalid_token' || statusCode === 'permission_denied') { + // We'll wait a couple times before logging the warning / increasing the + // retry period since oauth tokens will report as "invalid" if they're + // just expired. Plus there may be transient issues that resolve themselves. + this.invalidAuthTokenCount_++; + if (this.invalidAuthTokenCount_ >= INVALID_AUTH_TOKEN_THRESHOLD) { + // Set a long reconnect delay because recovery is unlikely + this.reconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS; + + // Notify the auth token provider that the token is invalid, which will log + // a warning + this.authTokenProvider_.notifyForInvalidToken(); + } + } + } + + onSecurityDebugPacket_(body) { + if (this.securityDebugCallback_) { + this.securityDebugCallback_(body); + } else { + if ('msg' in body && typeof console !== 'undefined') { + console.log('FIREBASE: ' + body['msg'].replace('\n', '\nFIREBASE: ')); + } + } + } + + restoreState_() { + //Re-authenticate ourselves if we have a credential stored. + this.tryAuth(); + + // Puts depend on having received the corresponding data update from the server before they complete, so we must + // make sure to send listens before puts. + var self = this; + forEach(this.listens_, function(pathString, queries) { + forEach(queries, function(key, listenSpec) { + self.sendListen_(listenSpec); + }); + }); + + for (var i = 0; i < this.outstandingPuts_.length; i++) { + if (this.outstandingPuts_[i]) + this.sendPut_(i); + } + + while (this.onDisconnectRequestQueue_.length) { + var request = this.onDisconnectRequestQueue_.shift(); + this.sendOnDisconnect_(request.action, request.pathString, request.data, request.onComplete); + } + } + + /** + * Sends client stats for first connection + * @private + */ + sendConnectStats_() { + var stats = {}; + + var clientName = 'js'; + if (CONSTANTS.NODE_ADMIN) { + clientName = 'admin_node'; + } else if (CONSTANTS.NODE_CLIENT) { + clientName = 'node'; + } + + stats['sdk.' + clientName + '.' + firebase.SDK_VERSION.replace(/\./g, '-')] = 1; + + if (isMobileCordova()) { + stats['framework.cordova'] = 1; + } + else if (isReactNative()) { + stats['framework.reactnative'] = 1; + } + this.reportStats(stats); + } + + /** + * @return {boolean} + * @private + */ + shouldReconnect_() { + var online = OnlineMonitor.getInstance().currentlyOnline(); + return isEmpty(this.interruptReasons_) && online; + } +}; // end PersistentConnection diff --git a/src/database/core/ReadonlyRestClient.ts b/src/database/core/ReadonlyRestClient.ts new file mode 100644 index 00000000000..8228a613ca9 --- /dev/null +++ b/src/database/core/ReadonlyRestClient.ts @@ -0,0 +1,175 @@ +import { assert } from '../../utils/assert'; +import { logWrapper, warn } from './util/util'; +import { jsonEval } from '../../utils/json'; +import { safeGet } from '../../utils/obj'; +import { querystring } from '../../utils/util'; +import { ServerActions } from './ServerActions'; +import { RepoInfo } from './RepoInfo'; +import { AuthTokenProvider } from './AuthTokenProvider'; +import { Query } from '../api/Query'; + +/** + * An implementation of ServerActions that communicates with the server via REST requests. + * This is mostly useful for compatibility with crawlers, where we don't want to spin up a full + * persistent connection (using WebSockets or long-polling) + */ +export class ReadonlyRestClient implements ServerActions { + /** @private {function(...[*])} */ + private log_: (...args: any[]) => any = logWrapper('p:rest:'); + + /** + * We don't actually need to track listens, except to prevent us calling an onComplete for a listen + * that's been removed. :-/ + * + * @private {!Object.} + */ + private listens_: { [k: string]: Object } = {}; + + /** + * @param {!Query} query + * @param {?number=} tag + * @return {string} + * @private + */ + static getListenId_(query: Query, tag?: number | null): string { + if (tag !== undefined) { + return 'tag$' + tag; + } else { + assert(query.getQueryParams().isDefault(), 'should have a tag if it\'s not a default query.'); + return query.path.toString(); + } + } + + /** + * @param {!RepoInfo} repoInfo_ Data about the namespace we are connecting to + * @param {function(string, *, boolean, ?number)} onDataUpdate_ A callback for new data from the server + * @param {AuthTokenProvider} authTokenProvider_ + * @implements {ServerActions} + */ + constructor(private repoInfo_: RepoInfo, + private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => any, + private authTokenProvider_: AuthTokenProvider) { + } + + /** @inheritDoc */ + listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => any) { + const pathString = query.path.toString(); + this.log_('Listen called for ' + pathString + ' ' + query.queryIdentifier()); + + // Mark this listener so we can tell if it's removed. + const listenId = ReadonlyRestClient.getListenId_(query, tag); + const thisListen = {}; + this.listens_[listenId] = thisListen; + + const queryStringParamaters = query.getQueryParams().toRestQueryStringParameters(); + + this.restRequest_(pathString + '.json', queryStringParamaters, (error, result) => { + let data = result; + + if (error === 404) { + data = null; + error = null; + } + + if (error === null) { + this.onDataUpdate_(pathString, data, /*isMerge=*/false, tag); + } + + if (safeGet(this.listens_, listenId) === thisListen) { + let status; + if (!error) { + status = 'ok'; + } else if (error == 401) { + status = 'permission_denied'; + } else { + status = 'rest_error:' + error; + } + + onComplete(status, null); + } + }); + } + + /** @inheritDoc */ + unlisten(query: Query, tag: number | null) { + const listenId = ReadonlyRestClient.getListenId_(query, tag); + delete this.listens_[listenId]; + } + + /** @inheritDoc */ + refreshAuthToken(token: string) { + // no-op since we just always call getToken. + } + + /** @inheritDoc */ + onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => any) { } + + /** @inheritDoc */ + onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => any) { } + + /** @inheritDoc */ + onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => any) { } + + /** @inheritDoc */ + put(pathString: string, data: any, onComplete?: (a: string, b: string) => any, hash?: string) { } + + /** @inheritDoc */ + merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => any, hash?: string) { } + + /** @inheritDoc */ + reportStats(stats: { [k: string]: any }) { } + + /** + * Performs a REST request to the given path, with the provided query string parameters, + * and any auth credentials we have. + * + * @param {!string} pathString + * @param {!Object.} queryStringParameters + * @param {?function(?number, *=)} callback + * @private + */ + private restRequest_(pathString: string, queryStringParameters: {[k: string]: any} = {}, + callback: ((a: number | null, b?: any) => any) | null) { + queryStringParameters['format'] = 'export'; + + this.authTokenProvider_.getToken(/*forceRefresh=*/false).then((authTokenData) => { + const authToken = authTokenData && authTokenData.accessToken; + if (authToken) { + queryStringParameters['auth'] = authToken; + } + + const url = (this.repoInfo_.secure ? 'https://' : 'http://') + + this.repoInfo_.host + + pathString + + '?' + + querystring(queryStringParameters); + + this.log_('Sending REST request for ' + url); + const xhr = new XMLHttpRequest(); + xhr.onreadystatechange = () => { + if (callback && xhr.readyState === 4) { + this.log_('REST Response for ' + url + ' received. status:', xhr.status, 'response:', xhr.responseText); + let res = null; + if (xhr.status >= 200 && xhr.status < 300) { + try { + res = jsonEval(xhr.responseText); + } catch (e) { + warn('Failed to parse JSON response for ' + url + ': ' + xhr.responseText); + } + callback(null, res); + } else { + // 401 and 404 are expected. + if (xhr.status !== 401 && xhr.status !== 404) { + warn('Got unsuccessful REST response for ' + url + ' Status: ' + xhr.status); + } + callback(xhr.status); + } + callback = null; + } + }; + + xhr.open('GET', url, /*asynchronous=*/true); + xhr.send(); + }); + } +} diff --git a/src/database/core/Repo.ts b/src/database/core/Repo.ts new file mode 100644 index 00000000000..8c9eb2f55c4 --- /dev/null +++ b/src/database/core/Repo.ts @@ -0,0 +1,565 @@ +import { + generateWithValues, + resolveDeferredValueSnapshot, + resolveDeferredValueTree +} from './util/ServerValues'; +import { nodeFromJSON } from './snap/nodeFromJSON'; +import { Path } from './util/Path'; +import { SparseSnapshotTree } from './SparseSnapshotTree'; +import { SyncTree } from './SyncTree'; +import { SnapshotHolder } from './SnapshotHolder'; +import { stringify } from '../../utils/json'; +import { beingCrawled, each, exceptionGuard, warn, log } from './util/util'; +import { map, forEach, isEmpty } from '../../utils/obj'; +import { AuthTokenProvider } from './AuthTokenProvider'; +import { StatsManager } from './stats/StatsManager'; +import { StatsReporter } from './stats/StatsReporter'; +import { StatsListener } from './stats/StatsListener'; +import { EventQueue } from './view/EventQueue'; +import { PersistentConnection } from './PersistentConnection'; +import { ReadonlyRestClient } from './ReadonlyRestClient'; +import { FirebaseApp } from '../../app/firebase_app'; +import { RepoInfo } from './RepoInfo'; +import { Database } from '../api/Database'; +import { ServerActions } from './ServerActions'; +import { Query } from '../api/Query'; +import { EventRegistration } from './view/EventRegistration'; + +const INTERRUPT_REASON = 'repo_interrupt'; + +/** + * A connection to a single data repository. + */ +export class Repo { + /** @type {!Database} */ + database: Database; + infoSyncTree_: SyncTree; + dataUpdateCount; + serverSyncTree_: SyncTree; + + public repoInfo_; + private stats_; + private statsListener_; + private eventQueue_; + private nextWriteId_; + private server_: ServerActions; + private statsReporter_; + private transactions_init_; + private infoData_; + private onDisconnect_; + private abortTransactions_; + private rerunTransactions_; + private interceptServerDataCallback_; + + /** + * TODO: This should be @private but it's used by test_access.js and internal.js + * @type {?PersistentConnection} + */ + persistentConnection_: PersistentConnection | null = null; + + /** + * @param {!RepoInfo} repoInfo + * @param {boolean} forceRestClient + * @param {!FirebaseApp} app + */ + constructor(repoInfo: RepoInfo, forceRestClient: boolean, public app: FirebaseApp) { + /** @type {!AuthTokenProvider} */ + const authTokenProvider = new AuthTokenProvider(app); + + this.repoInfo_ = repoInfo; + this.stats_ = StatsManager.getCollection(repoInfo); + /** @type {StatsListener} */ + this.statsListener_ = null; + this.eventQueue_ = new EventQueue(); + this.nextWriteId_ = 1; + + if (forceRestClient || beingCrawled()) { + this.server_ = new ReadonlyRestClient(this.repoInfo_, + this.onDataUpdate_.bind(this), + authTokenProvider); + + // Minor hack: Fire onConnect immediately, since there's no actual connection. + setTimeout(this.onConnectStatus_.bind(this, true), 0); + } else { + const authOverride = app.options['databaseAuthVariableOverride']; + // Validate authOverride + if (typeof authOverride !== 'undefined' && authOverride !== null) { + if (typeof authOverride !== 'object') { + throw new Error('Only objects are supported for option databaseAuthVariableOverride'); + } + try { + stringify(authOverride); + } catch (e) { + throw new Error('Invalid authOverride provided: ' + e); + } + } + + this.persistentConnection_ = new PersistentConnection(this.repoInfo_, + this.onDataUpdate_.bind(this), + this.onConnectStatus_.bind(this), + this.onServerInfoUpdate_.bind(this), + authTokenProvider, + authOverride); + + this.server_ = this.persistentConnection_; + } + + authTokenProvider.addTokenChangeListener((token) => { + this.server_.refreshAuthToken(token); + }); + + // In the case of multiple Repos for the same repoInfo (i.e. there are multiple Firebase.Contexts being used), + // we only want to create one StatsReporter. As such, we'll report stats over the first Repo created. + this.statsReporter_ = StatsManager.getOrCreateReporter(repoInfo, + () => new StatsReporter(this.stats_, this.server_)); + + this.transactions_init_(); + + // Used for .info. + this.infoData_ = new SnapshotHolder(); + this.infoSyncTree_ = new SyncTree({ + startListening: (query, tag, currentHashFn, onComplete) => { + let infoEvents = []; + const node = this.infoData_.getNode(query.path); + // This is possibly a hack, but we have different semantics for .info endpoints. We don't raise null events + // on initial data... + if (!node.isEmpty()) { + infoEvents = this.infoSyncTree_.applyServerOverwrite(query.path, node); + setTimeout(() => { + onComplete('ok'); + }, 0); + } + return infoEvents; + }, + stopListening: () => {} + }); + this.updateInfo_('connected', false); + + // A list of data pieces and paths to be set when this client disconnects. + this.onDisconnect_ = new SparseSnapshotTree(); + + this.dataUpdateCount = 0; + + this.interceptServerDataCallback_ = null; + + this.serverSyncTree_ = new SyncTree({ + startListening: (query, tag, currentHashFn, onComplete) => { + this.server_.listen(query, currentHashFn, tag, (status, data) => { + const events = onComplete(status, data); + this.eventQueue_.raiseEventsForChangedPath(query.path, events); + }); + // No synchronous events for network-backed sync trees + return []; + }, + stopListening: (query, tag) => { + this.server_.unlisten(query, tag); + } + }); + } + + /** + * @return {string} The URL corresponding to the root of this Firebase. + */ + toString(): string { + return (this.repoInfo_.secure ? 'https://' : 'http://') + this.repoInfo_.host; + } + + /** + * @return {!string} The namespace represented by the repo. + */ + name(): string { + return this.repoInfo_.namespace; + } + + /** + * @return {!number} The time in milliseconds, taking the server offset into account if we have one. + */ + serverTime(): number { + const offsetNode = this.infoData_.getNode(new Path('.info/serverTimeOffset')); + const offset = /** @type {number} */ (offsetNode.val()) || 0; + return new Date().getTime() + offset; + } + + /** + * Generate ServerValues using some variables from the repo object. + * @return {!Object} + */ + generateServerValues(): Object { + return generateWithValues({ + 'timestamp': this.serverTime() + }); + } + + /** + * Called by realtime when we get new messages from the server. + * + * @private + * @param {string} pathString + * @param {*} data + * @param {boolean} isMerge + * @param {?number} tag + */ + private onDataUpdate_(pathString: string, data: any, isMerge: boolean, tag: number | null) { + // For testing. + this.dataUpdateCount++; + const path = new Path(pathString); + data = this.interceptServerDataCallback_ ? this.interceptServerDataCallback_(pathString, data) : data; + let events = []; + if (tag) { + if (isMerge) { + const taggedChildren = map(/**@type {!Object.} */ (data), (raw) => nodeFromJSON(raw)); + events = this.serverSyncTree_.applyTaggedQueryMerge(path, taggedChildren, tag); + } else { + const taggedSnap = nodeFromJSON(data); + events = this.serverSyncTree_.applyTaggedQueryOverwrite(path, taggedSnap, tag); + } + } else if (isMerge) { + const changedChildren = map(/**@type {!Object.} */ (data), (raw) => nodeFromJSON(raw)); + events = this.serverSyncTree_.applyServerMerge(path, changedChildren); + } else { + const snap = nodeFromJSON(data); + events = this.serverSyncTree_.applyServerOverwrite(path, snap); + } + let affectedPath = path; + if (events.length > 0) { + // Since we have a listener outstanding for each transaction, receiving any events + // is a proxy for some change having occurred. + affectedPath = this.rerunTransactions_(path); + } + this.eventQueue_.raiseEventsForChangedPath(affectedPath, events); + } + + /** + * @param {?function(!string, *):*} callback + * @private + */ + private interceptServerData_(callback: (a: string, b: any) => any) { + this.interceptServerDataCallback_ = callback; + } + + /** + * @param {!boolean} connectStatus + * @private + */ + private onConnectStatus_(connectStatus: boolean) { + this.updateInfo_('connected', connectStatus); + if (connectStatus === false) { + this.runOnDisconnectEvents_(); + } + } + + /** + * @param {!Object} updates + * @private + */ + private onServerInfoUpdate_(updates: Object) { + each(updates, (value: any, key: string) => { + this.updateInfo_(key, value); + }); + } + + /** + * + * @param {!string} pathString + * @param {*} value + * @private + */ + private updateInfo_(pathString: string, value: any) { + const path = new Path('/.info/' + pathString); + const newNode = nodeFromJSON(value); + this.infoData_.updateSnapshot(path, newNode); + const events = this.infoSyncTree_.applyServerOverwrite(path, newNode); + this.eventQueue_.raiseEventsForChangedPath(path, events); + } + + /** + * @return {!number} + * @private + */ + private getNextWriteId_(): number { + return this.nextWriteId_++; + } + + /** + * @param {!Path} path + * @param {*} newVal + * @param {number|string|null} newPriority + * @param {?function(?Error, *=)} onComplete + */ + setWithPriority(path: Path, newVal: any, newPriority: number | string | null, + onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + this.log_('set', {path: path.toString(), value: newVal, priority: newPriority}); + + // TODO: Optimize this behavior to either (a) store flag to skip resolving where possible and / or + // (b) store unresolved paths on JSON parse + const serverValues = this.generateServerValues(); + const newNodeUnresolved = nodeFromJSON(newVal, newPriority); + const newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + + const writeId = this.getNextWriteId_(); + const events = this.serverSyncTree_.applyUserOverwrite(path, newNode, writeId, true); + this.eventQueue_.queueEvents(events); + this.server_.put(path.toString(), newNodeUnresolved.val(/*export=*/true), (status, errorReason) => { + const success = status === 'ok'; + if (!success) { + warn('set at ' + path + ' failed: ' + status); + } + + const clearEvents = this.serverSyncTree_.ackUserWrite(writeId, !success); + this.eventQueue_.raiseEventsForChangedPath(path, clearEvents); + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + const affectedPath = this.abortTransactions_(path); + this.rerunTransactions_(affectedPath); + // We queued the events above, so just flush the queue here + this.eventQueue_.raiseEventsForChangedPath(affectedPath, []); + } + + /** + * @param {!Path} path + * @param {!Object} childrenToMerge + * @param {?function(?Error, *=)} onComplete + */ + update(path: Path, childrenToMerge: Object, + onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + this.log_('update', {path: path.toString(), value: childrenToMerge}); + + // Start with our existing data and merge each child into it. + let empty = true; + const serverValues = this.generateServerValues(); + const changedChildren = {}; + forEach(childrenToMerge, function (changedKey, changedValue) { + empty = false; + const newNodeUnresolved = nodeFromJSON(changedValue); + changedChildren[changedKey] = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + }); + + if (!empty) { + const writeId = this.getNextWriteId_(); + const events = this.serverSyncTree_.applyUserMerge(path, changedChildren, writeId); + this.eventQueue_.queueEvents(events); + this.server_.merge(path.toString(), childrenToMerge, (status, errorReason) => { + const success = status === 'ok'; + if (!success) { + warn('update at ' + path + ' failed: ' + status); + } + + const clearEvents = this.serverSyncTree_.ackUserWrite(writeId, !success); + const affectedPath = (clearEvents.length > 0) ? this.rerunTransactions_(path) : path; + this.eventQueue_.raiseEventsForChangedPath(affectedPath, clearEvents); + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + + forEach(childrenToMerge, (changedPath, changedValue) => { + const affectedPath = this.abortTransactions_(path.child(changedPath)); + this.rerunTransactions_(affectedPath); + }); + + // We queued the events above, so just flush the queue here + this.eventQueue_.raiseEventsForChangedPath(path, []); + } else { + log('update() called with empty data. Don\'t do anything.'); + this.callOnCompleteCallback(onComplete, 'ok'); + } + } + + /** + * Applies all of the changes stored up in the onDisconnect_ tree. + * @private + */ + private runOnDisconnectEvents_() { + this.log_('onDisconnectEvents'); + + const serverValues = this.generateServerValues(); + const resolvedOnDisconnectTree = resolveDeferredValueTree(this.onDisconnect_, serverValues); + let events = []; + + resolvedOnDisconnectTree.forEachTree(Path.Empty, (path, snap) => { + events = events.concat(this.serverSyncTree_.applyServerOverwrite(path, snap)); + const affectedPath = this.abortTransactions_(path); + this.rerunTransactions_(affectedPath); + }); + + this.onDisconnect_ = new SparseSnapshotTree(); + this.eventQueue_.raiseEventsForChangedPath(Path.Empty, events); + } + + /** + * @param {!Path} path + * @param {?function(?Error, *=)} onComplete + */ + onDisconnectCancel(path: Path, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + this.server_.onDisconnectCancel(path.toString(), (status, errorReason) => { + if (status === 'ok') { + this.onDisconnect_.forget(path); + } + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + } + + /** + * @param {!Path} path + * @param {*} value + * @param {?function(?Error, *=)} onComplete + */ + onDisconnectSet(path: Path, value: any, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + const newNode = nodeFromJSON(value); + this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { + if (status === 'ok') { + this.onDisconnect_.remember(path, newNode); + } + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + } + + /** + * @param {!Path} path + * @param {*} value + * @param {*} priority + * @param {?function(?Error, *=)} onComplete + */ + onDisconnectSetWithPriority(path, value, priority, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + const newNode = nodeFromJSON(value, priority); + this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { + if (status === 'ok') { + this.onDisconnect_.remember(path, newNode); + } + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + } + + /** + * @param {!Path} path + * @param {*} childrenToMerge + * @param {?function(?Error, *=)} onComplete + */ + onDisconnectUpdate(path, childrenToMerge, + onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + if (isEmpty(childrenToMerge)) { + log('onDisconnect().update() called with empty data. Don\'t do anything.'); + this.callOnCompleteCallback(onComplete, 'ok'); + return; + } + + this.server_.onDisconnectMerge(path.toString(), childrenToMerge, (status, errorReason) => { + if (status === 'ok') { + forEach(childrenToMerge, (childName: string, childNode: any) => { + const newChildNode = nodeFromJSON(childNode); + this.onDisconnect_.remember(path.child(childName), newChildNode); + }); + } + this.callOnCompleteCallback(onComplete, status, errorReason); + }); + } + + /** + * @param {!Query} query + * @param {!EventRegistration} eventRegistration + */ + addEventCallbackForQuery(query: Query, eventRegistration: EventRegistration) { + let events; + if (query.path.getFront() === '.info') { + events = this.infoSyncTree_.addEventRegistration(query, eventRegistration); + } else { + events = this.serverSyncTree_.addEventRegistration(query, eventRegistration); + } + this.eventQueue_.raiseEventsAtPath(query.path, events); + } + + /** + * @param {!Query} query + * @param {?EventRegistration} eventRegistration + */ + removeEventCallbackForQuery(query: Query, eventRegistration: EventRegistration) { + // These are guaranteed not to raise events, since we're not passing in a cancelError. However, we can future-proof + // a little bit by handling the return values anyways. + let events; + if (query.path.getFront() === '.info') { + events = this.infoSyncTree_.removeEventRegistration(query, eventRegistration); + } else { + events = this.serverSyncTree_.removeEventRegistration(query, eventRegistration); + } + this.eventQueue_.raiseEventsAtPath(query.path, events); + } + + interrupt() { + if (this.persistentConnection_) { + this.persistentConnection_.interrupt(INTERRUPT_REASON); + } + } + + resume() { + if (this.persistentConnection_) { + this.persistentConnection_.resume(INTERRUPT_REASON); + } + } + + stats(showDelta: boolean = false) { + if (typeof console === 'undefined') + return; + + let stats; + if (showDelta) { + if (!this.statsListener_) + this.statsListener_ = new StatsListener(this.stats_); + stats = this.statsListener_.get(); + } else { + stats = this.stats_.get(); + } + + const longestName = Object.keys(stats).reduce( + function (previousValue, currentValue, index, array) { + return Math.max(currentValue.length, previousValue); + }, 0); + + forEach(stats, (stat, value) => { + // pad stat names to be the same length (plus 2 extra spaces). + for (let i = stat.length; i < longestName + 2; i++) + stat += ' '; + console.log(stat + value); + }); + } + + statsIncrementCounter(metric) { + this.stats_.incrementCounter(metric); + this.statsReporter_.includeStat(metric); + } + + /** + * @param {...*} var_args + * @private + */ + private log_(...var_args: any[]) { + let prefix = ''; + if (this.persistentConnection_) { + prefix = this.persistentConnection_.id + ':'; + } + log(prefix, var_args); + } + + /** + * @param {?function(?Error, *=)} callback + * @param {!string} status + * @param {?string=} errorReason + */ + callOnCompleteCallback(callback: ((status: Error | null, errorReason?: string) => any) | null, + status: string, errorReason?: string | null) { + if (callback) { + exceptionGuard(function () { + if (status == 'ok') { + callback(null); + } else { + const code = (status || 'error').toUpperCase(); + let message = code; + if (errorReason) + message += ': ' + errorReason; + + const error = new Error(message); + (error as any).code = code; + callback(error); + } + }); + } + } +} + diff --git a/src/database/core/RepoInfo.ts b/src/database/core/RepoInfo.ts new file mode 100644 index 00000000000..96fd5567896 --- /dev/null +++ b/src/database/core/RepoInfo.ts @@ -0,0 +1,100 @@ +import { assert } from "../../utils/assert"; +import { forEach } from "../../utils/obj"; +import { PersistentStorage } from './storage/storage'; +import { CONSTANTS } from "../realtime/Constants"; +/** + * A class that holds metadata about a Repo object + * @param {string} host Hostname portion of the url for the repo + * @param {boolean} secure Whether or not this repo is accessed over ssl + * @param {string} namespace The namespace represented by the repo + * @param {boolean} webSocketOnly Whether to prefer websockets over all other transports (used by Nest). + * @param {string=} persistenceKey Override the default session persistence storage key + * @constructor + */ +export class RepoInfo { + host; + domain; + secure; + namespace; + webSocketOnly; + persistenceKey; + internalHost; + + constructor(host, secure, namespace, webSocketOnly, persistenceKey?) { + this.host = host.toLowerCase(); + this.domain = this.host.substr(this.host.indexOf('.') + 1); + this.secure = secure; + this.namespace = namespace; + this.webSocketOnly = webSocketOnly; + this.persistenceKey = persistenceKey || ''; + this.internalHost = PersistentStorage.get('host:' + host) || this.host; + } + needsQueryParam() { + return this.host !== this.internalHost; + }; + + isCacheableHost() { + return this.internalHost.substr(0, 2) === 's-'; + }; + + isDemoHost() { + return this.domain === 'firebaseio-demo.com'; + }; + + isCustomHost() { + return this.domain !== 'firebaseio.com' && this.domain !== 'firebaseio-demo.com'; + }; + + updateHost(newHost) { + if (newHost !== this.internalHost) { + this.internalHost = newHost; + if (this.isCacheableHost()) { + PersistentStorage.set('host:' + this.host, this.internalHost); + } + } + }; + + /** + * Returns the websocket URL for this repo + * @param {string} type of connection + * @param {Object} params list + * @return {string} The URL for this repo + */ + connectionURL(type, params) { + assert(typeof type === 'string', 'typeof type must == string'); + assert(typeof params === 'object', 'typeof params must == object'); + var connURL; + if (type === CONSTANTS.WEBSOCKET) { + connURL = (this.secure ? 'wss://' : 'ws://') + this.internalHost + '/.ws?'; + } else if (type === CONSTANTS.LONG_POLLING) { + connURL = (this.secure ? 'https://' : 'http://') + this.internalHost + '/.lp?'; + } else { + throw new Error('Unknown connection type: ' + type); + } + if (this.needsQueryParam()) { + params['ns'] = this.namespace; + } + + var pairs = []; + + forEach(params, (key, value) => { + pairs.push(key + '=' + value); + }); + + return connURL + pairs.join('&'); + }; + + /** @return {string} */ + toString() { + var str = this.toURLString(); + if (this.persistenceKey) { + str += '<' + this.persistenceKey + '>'; + } + return str; + }; + + /** @return {string} */ + toURLString() { + return (this.secure ? 'https://' : 'http://') + this.host; + }; +} diff --git a/src/database/core/RepoManager.ts b/src/database/core/RepoManager.ts new file mode 100644 index 00000000000..e7066769fe2 --- /dev/null +++ b/src/database/core/RepoManager.ts @@ -0,0 +1,121 @@ +import { FirebaseApp } from "../../app/firebase_app"; +import { safeGet } from "../../utils/obj"; +import { Repo } from "./Repo"; +import { fatal } from "./util/util"; +import { parseRepoInfo } from "./util/libs/parser"; +import { validateUrl } from "./util/validation"; +import "./Repo_transaction"; +import { Database } from '../api/Database'; + +/** @const {string} */ +var DATABASE_URL_OPTION = 'databaseURL'; + +let _staticInstance; + +/** + * Creates and caches Repo instances. + */ +export class RepoManager { + /** + * @private {!Object.} + */ + private repos_: { + [name: string]: Repo + } = {}; + + /** + * If true, new Repos will be created to use ReadonlyRestClient (for testing purposes). + * @private {boolean} + */ + private useRestClient_: boolean = false; + + static getInstance() { + if (!_staticInstance) { + _staticInstance = new RepoManager(); + } + return _staticInstance; + } + + // TODO(koss): Remove these functions unless used in tests? + interrupt() { + for (var repo in this.repos_) { + this.repos_[repo].interrupt(); + } + } + + resume() { + for (var repo in this.repos_) { + this.repos_[repo].resume(); + } + } + + /** + * This function should only ever be called to CREATE a new database instance. + * + * @param {!App} app + * @return {!Database} + */ + databaseFromApp(app: FirebaseApp): Database { + var dbUrl: string = app.options[DATABASE_URL_OPTION]; + if (dbUrl === undefined) { + fatal("Can't determine Firebase Database URL. Be sure to include " + + DATABASE_URL_OPTION + + " option when calling firebase.intializeApp()."); + } + + var parsedUrl = parseRepoInfo(dbUrl); + var repoInfo = parsedUrl.repoInfo; + + validateUrl('Invalid Firebase Database URL', 1, parsedUrl); + if (!parsedUrl.path.isEmpty()) { + fatal("Database URL must point to the root of a Firebase Database " + + "(not including a child path)."); + } + + var repo = this.createRepo(repoInfo, app); + + return repo.database; + } + + /** + * Remove the repo and make sure it is disconnected. + * + * @param {!Repo} repo + */ + deleteRepo(repo) { + + // This should never happen... + if (safeGet(this.repos_, repo.app.name) !== repo) { + fatal("Database " + repo.app.name + " has already been deleted."); + } + repo.interrupt(); + delete this.repos_[repo.app.name]; + } + + /** + * Ensures a repo doesn't already exist and then creates one using the + * provided app. + * + * @param {!RepoInfo} repoInfo The metadata about the Repo + * @param {!FirebaseApp} app + * @return {!Repo} The Repo object for the specified server / repoName. + */ + createRepo(repoInfo, app: FirebaseApp): Repo { + var repo = safeGet(this.repos_, app.name); + if (repo) { + fatal('FIREBASE INTERNAL ERROR: Database initialized multiple times.'); + } + repo = new Repo(repoInfo, this.useRestClient_, app); + this.repos_[app.name] = repo; + + return repo; + } + + /** + * Forces us to use ReadonlyRestClient instead of PersistentConnection for new Repos. + * @param {boolean} forceRestClient + */ + forceRestClient(forceRestClient) { + this.useRestClient_ = forceRestClient; + } +}; // end RepoManager \ No newline at end of file diff --git a/src/database/core/Repo_transaction.ts b/src/database/core/Repo_transaction.ts new file mode 100644 index 00000000000..fa9a95ab805 --- /dev/null +++ b/src/database/core/Repo_transaction.ts @@ -0,0 +1,654 @@ +import { assert } from "../../utils/assert"; +import { Reference } from "../api/Reference"; +import { DataSnapshot } from "../api/DataSnapshot"; +import { Path } from "./util/Path"; +import { Tree } from "./util/Tree"; +import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; +import { Node } from "./snap/Node"; +import { + LUIDGenerator, + warn, + exceptionGuard, +} from "./util/util"; +import { resolveDeferredValueSnapshot } from "./util/ServerValues"; +import { isValidPriority, validateFirebaseData } from "./util/validation"; +import { contains, safeGet } from "../../utils/obj"; +import { nodeFromJSON } from "./snap/nodeFromJSON"; +import { ChildrenNode } from "./snap/ChildrenNode"; +import { Repo } from "./Repo"; + +// TODO: This is pretty messy. Ideally, a lot of this would move into FirebaseData, or a transaction-specific +// component used by FirebaseData, but it has ties to user callbacks (transaction update and onComplete) as well +// as the realtime connection (to send transactions to the server). So that all needs to be decoupled first. +// For now it's part of Repo, but in its own file. + +/** + * @enum {number} + */ +export const TransactionStatus = { + // We've run the transaction and updated transactionResultData_ with the result, but it isn't currently sent to the + // server. A transaction will go from RUN -> SENT -> RUN if it comes back from the server as rejected due to + // mismatched hash. + RUN: 1, + + // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted + // or rejected yet). + SENT: 2, + + // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be + // removed when we get a chance to prune completed ones. + COMPLETED: 3, + + // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made). + // If it comes back as unsuccessful, we'll abort it. + SENT_NEEDS_ABORT: 4, + + // Temporary state used to mark transactions that need to be aborted. + NEEDS_ABORT: 5 +}; + +/** + * If a transaction does not succeed after 25 retries, we abort it. Among other things this ensure that if there's + * ever a bug causing a mismatch between client / server hashes for some data, we won't retry indefinitely. + * @type {number} + * @const + * @private + */ +(Repo as any).MAX_TRANSACTION_RETRIES_ = 25; + +/** + * @typedef {{ + * path: !Path, + * update: function(*):*, + * onComplete: ?function(?Error, boolean, ?DataSnapshot), + * status: ?TransactionStatus, + * order: !number, + * applyLocally: boolean, + * retryCount: !number, + * unwatcher: function(), + * abortReason: ?string, + * currentWriteId: !number, + * currentHash: ?string, + * currentInputSnapshot: ?Node, + * currentOutputSnapshotRaw: ?Node, + * currentOutputSnapshotResolved: ?Node + * }} + */ + +/** + * Setup the transaction data structures + * @private + */ +(Repo.prototype as any).transactions_init_ = function() { + /** + * Stores queues of outstanding transactions for Firebase locations. + * + * @type {!Tree.>} + * @private + */ + this.transactionQueueTree_ = new Tree(); +}; + +declare module './Repo' { + interface Repo { + startTransaction(path: Path, transactionUpdate, onComplete, applyLocally): void + } +} + +type Transaction = { + path: Path, + update: Function, + onComplete: Function, + status: number, + order: number, + applyLocally: boolean, + retryCount: number, + unwatcher: Function, + abortReason: any, + currentWriteId: any, + currentInputSnapshot: any, + currentOutputSnapshotRaw: any, + currentOutputSnapshotResolved: any +} + +/** + * Creates a new transaction, adds it to the transactions we're tracking, and sends it to the server if possible. + * + * @param {!Path} path Path at which to do transaction. + * @param {function(*):*} transactionUpdate Update callback. + * @param {?function(?Error, boolean, ?DataSnapshot)} onComplete Completion callback. + * @param {boolean} applyLocally Whether or not to make intermediate results visible + */ +(Repo.prototype as any).startTransaction = function(path: Path, + transactionUpdate: () => any, + onComplete: (Error, boolean, DataSnapshot) => any, + applyLocally: boolean) { + this.log_('transaction on ' + path); + + // Add a watch to make sure we get server updates. + var valueCallback = function() { }; + var watchRef = new Reference(this, path); + watchRef.on('value', valueCallback); + var unwatcher = function() { watchRef.off('value', valueCallback); }; + + // Initialize transaction. + var transaction: Transaction = { + path: path, + update: transactionUpdate, + onComplete: onComplete, + + // One of TransactionStatus enums. + status: null, + + // Used when combining transactions at different locations to figure out which one goes first. + order: LUIDGenerator(), + + // Whether to raise local events for this transaction. + applyLocally: applyLocally, + + // Count of how many times we've retried the transaction. + retryCount: 0, + + // Function to call to clean up our .on() listener. + unwatcher: unwatcher, + + // Stores why a transaction was aborted. + abortReason: null, + + currentWriteId: null, + + currentInputSnapshot: null, + + currentOutputSnapshotRaw: null, + + currentOutputSnapshotResolved: null + }; + + + // Run transaction initially. + var currentState = this.getLatestState_(path); + transaction.currentInputSnapshot = currentState; + var newVal = transaction.update(currentState.val()); + if (newVal === undefined) { + // Abort transaction. + transaction.unwatcher(); + transaction.currentOutputSnapshotRaw = null; + transaction.currentOutputSnapshotResolved = null; + if (transaction.onComplete) { + // We just set the input snapshot, so this cast should be safe + var snapshot = new DataSnapshot(transaction.currentInputSnapshot, new Reference(this, transaction.path), PRIORITY_INDEX); + transaction.onComplete(null, false, snapshot); + } + } else { + validateFirebaseData('transaction failed: Data returned ', newVal, transaction.path); + + // Mark as run and add to our queue. + transaction.status = TransactionStatus.RUN; + var queueNode = this.transactionQueueTree_.subTree(path); + var nodeQueue = queueNode.getValue() || []; + nodeQueue.push(transaction); + + queueNode.setValue(nodeQueue); + + // Update visibleData and raise events + // Note: We intentionally raise events after updating all of our transaction state, since the user could + // start new transactions from the event callbacks. + var priorityForNode; + if (typeof newVal === 'object' && newVal !== null && contains(newVal, '.priority')) { + priorityForNode = safeGet(newVal, '.priority'); + assert(isValidPriority(priorityForNode), 'Invalid priority returned by transaction. ' + + 'Priority must be a valid string, finite number, server value, or null.'); + } else { + var currentNode = this.serverSyncTree_.calcCompleteEventCache(path) || ChildrenNode.EMPTY_NODE; + priorityForNode = currentNode.getPriority().val(); + } + priorityForNode = /** @type {null|number|string} */ (priorityForNode); + + var serverValues = this.generateServerValues(); + var newNodeUnresolved = nodeFromJSON(newVal, priorityForNode); + var newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + transaction.currentOutputSnapshotRaw = newNodeUnresolved; + transaction.currentOutputSnapshotResolved = newNode; + transaction.currentWriteId = this.getNextWriteId_(); + + var events = this.serverSyncTree_.applyUserOverwrite(path, newNode, transaction.currentWriteId, transaction.applyLocally); + this.eventQueue_.raiseEventsForChangedPath(path, events); + + this.sendReadyTransactions_(); + } +}; + +/** + * @param {!Path} path + * @param {Array.=} excludeSets A specific set to exclude + * @return {Node} + * @private + */ +(Repo.prototype as any).getLatestState_ = function(path: Path, excludeSets: [number]): Node { + return this.serverSyncTree_.calcCompleteEventCache(path, excludeSets) || ChildrenNode.EMPTY_NODE; +}; + + +/** + * Sends any already-run transactions that aren't waiting for outstanding transactions to + * complete. + * + * Externally it's called with no arguments, but it calls itself recursively with a particular + * transactionQueueTree node to recurse through the tree. + * + * @param {Tree.>=} opt_node transactionQueueTree node to start at. + * @private + */ +(Repo.prototype as any).sendReadyTransactions_ = function(node?) { + var node = /** @type {!Tree.>} */ (node || this.transactionQueueTree_); + + // Before recursing, make sure any completed transactions are removed. + if (!node) { + this.pruneCompletedTransactionsBelowNode_(node); + } + + if (node.getValue() !== null) { + var queue = this.buildTransactionQueue_(node); + assert(queue.length > 0, 'Sending zero length transaction queue'); + + var allRun = queue.every(function(transaction) { + return transaction.status === TransactionStatus.RUN; + }); + + // If they're all run (and not sent), we can send them. Else, we must wait. + if (allRun) { + this.sendTransactionQueue_(node.path(), queue); + } + } else if (node.hasChildren()) { + var self = this; + node.forEachChild(function(childNode) { + self.sendReadyTransactions_(childNode); + }); + } +}; + + +/** + * Given a list of run transactions, send them to the server and then handle the result (success or failure). + * + * @param {!Path} path The location of the queue. + * @param {!Array.} queue Queue of transactions under the specified location. + * @private + */ +(Repo.prototype as any).sendTransactionQueue_ = function(path: Path, queue: Array) { + // Mark transactions as sent and increment retry count! + var setsToIgnore = queue.map(function(txn) { return txn.currentWriteId; }); + var latestState = this.getLatestState_(path, setsToIgnore); + var snapToSend = latestState; + var latestHash = latestState.hash(); + for (var i = 0; i < queue.length; i++) { + var txn = queue[i]; + assert(txn.status === TransactionStatus.RUN, + 'tryToSendTransactionQueue_: items in queue should all be run.'); + txn.status = TransactionStatus.SENT; + txn.retryCount++; + var relativePath = Path.relativePath(path, txn.path); + // If we've gotten to this point, the output snapshot must be defined. + snapToSend = snapToSend.updateChild(relativePath, /**@type {!Node} */ (txn.currentOutputSnapshotRaw)); + } + + var dataToSend = snapToSend.val(true); + var pathToSend = path; + + // Send the put. + var self = this; + this.server_.put(pathToSend.toString(), dataToSend, function(status) { + self.log_('transaction put response', {path: pathToSend.toString(), status: status}); + + var events = []; + if (status === 'ok') { + // Queue up the callbacks and fire them after cleaning up all of our transaction state, since + // the callback could trigger more transactions or sets. + var callbacks = []; + for (i = 0; i < queue.length; i++) { + queue[i].status = TransactionStatus.COMPLETED; + events = events.concat(self.serverSyncTree_.ackUserWrite(queue[i].currentWriteId)); + if (queue[i].onComplete) { + // We never unset the output snapshot, and given that this transaction is complete, it should be set + var node = /** @type {!Node} */ (queue[i].currentOutputSnapshotResolved); + var ref = new Reference(self, queue[i].path); + var snapshot = new DataSnapshot(node, ref, PRIORITY_INDEX); + callbacks.push(queue[i].onComplete.bind(null, null, true, snapshot)); + } + queue[i].unwatcher(); + } + + // Now remove the completed transactions. + self.pruneCompletedTransactionsBelowNode_(self.transactionQueueTree_.subTree(path)); + // There may be pending transactions that we can now send. + self.sendReadyTransactions_(); + + self.eventQueue_.raiseEventsForChangedPath(path, events); + + // Finally, trigger onComplete callbacks. + for (i = 0; i < callbacks.length; i++) { + exceptionGuard(callbacks[i]); + } + } else { + // transactions are no longer sent. Update their status appropriately. + if (status === 'datastale') { + for (i = 0; i < queue.length; i++) { + if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) + queue[i].status = TransactionStatus.NEEDS_ABORT; + else + queue[i].status = TransactionStatus.RUN; + } + } else { + warn('transaction at ' + pathToSend.toString() + ' failed: ' + status); + for (i = 0; i < queue.length; i++) { + queue[i].status = TransactionStatus.NEEDS_ABORT; + queue[i].abortReason = status; + } + } + + self.rerunTransactions_(path); + } + }, latestHash); +}; + +/** + * Finds all transactions dependent on the data at changedPath and reruns them. + * + * Should be called any time cached data changes. + * + * Return the highest path that was affected by rerunning transactions. This is the path at which events need to + * be raised for. + * + * @param {!Path} changedPath The path in mergedData that changed. + * @return {!Path} The rootmost path that was affected by rerunning transactions. + * @private + */ +(Repo.prototype as any).rerunTransactions_ = function(changedPath: Path) { + var rootMostTransactionNode = this.getAncestorTransactionNode_(changedPath); + var path = rootMostTransactionNode.path(); + + var queue = this.buildTransactionQueue_(rootMostTransactionNode); + this.rerunTransactionQueue_(queue, path); + + return path; +}; + + +/** + * Does all the work of rerunning transactions (as well as cleans up aborted transactions and whatnot). + * + * @param {Array.} queue The queue of transactions to run. + * @param {!Path} path The path the queue is for. + * @private + */ +(Repo.prototype as any).rerunTransactionQueue_ = function(queue: Array, path: Path) { + if (queue.length === 0) { + return; // Nothing to do! + } + + // Queue up the callbacks and fire them after cleaning up all of our transaction state, since + // the callback could trigger more transactions or sets. + var callbacks = []; + var events = []; + // Ignore all of the sets we're going to re-run. + var txnsToRerun = queue.filter(function(q) { return q.status === TransactionStatus.RUN; }); + var setsToIgnore = txnsToRerun.map(function(q) { return q.currentWriteId; }); + for (var i = 0; i < queue.length; i++) { + var transaction = queue[i]; + var relativePath = Path.relativePath(path, transaction.path); + var abortTransaction = false, abortReason; + assert(relativePath !== null, 'rerunTransactionsUnderNode_: relativePath should not be null.'); + + if (transaction.status === TransactionStatus.NEEDS_ABORT) { + abortTransaction = true; + abortReason = transaction.abortReason; + events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + } else if (transaction.status === TransactionStatus.RUN) { + if (transaction.retryCount >= (Repo as any).MAX_TRANSACTION_RETRIES_) { + abortTransaction = true; + abortReason = 'maxretry'; + events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + } else { + // This code reruns a transaction + var currentNode = this.getLatestState_(transaction.path, setsToIgnore); + transaction.currentInputSnapshot = currentNode; + var newData = queue[i].update(currentNode.val()); + if (newData !== undefined) { + validateFirebaseData('transaction failed: Data returned ', newData, transaction.path); + var newDataNode = nodeFromJSON(newData); + var hasExplicitPriority = (typeof newData === 'object' && newData != null && + contains(newData, '.priority')); + if (!hasExplicitPriority) { + // Keep the old priority if there wasn't a priority explicitly specified. + newDataNode = newDataNode.updatePriority(currentNode.getPriority()); + } + + var oldWriteId = transaction.currentWriteId; + var serverValues = this.generateServerValues(); + var newNodeResolved = resolveDeferredValueSnapshot(newDataNode, serverValues); + + transaction.currentOutputSnapshotRaw = newDataNode; + transaction.currentOutputSnapshotResolved = newNodeResolved; + transaction.currentWriteId = this.getNextWriteId_(); + // Mutates setsToIgnore in place + setsToIgnore.splice(setsToIgnore.indexOf(oldWriteId), 1); + events = events.concat( + this.serverSyncTree_.applyUserOverwrite(transaction.path, newNodeResolved, transaction.currentWriteId, + transaction.applyLocally) + ); + events = events.concat(this.serverSyncTree_.ackUserWrite(oldWriteId, true)); + } else { + abortTransaction = true; + abortReason = 'nodata'; + events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + } + } + } + this.eventQueue_.raiseEventsForChangedPath(path, events); + events = []; + if (abortTransaction) { + // Abort. + queue[i].status = TransactionStatus.COMPLETED; + + // Removing a listener can trigger pruning which can muck with mergedData/visibleData (as it prunes data). + // So defer the unwatcher until we're done. + (function(unwatcher) { + setTimeout(unwatcher, Math.floor(0)); + })(queue[i].unwatcher); + + if (queue[i].onComplete) { + if (abortReason === 'nodata') { + var ref = new Reference(this, queue[i].path); + // We set this field immediately, so it's safe to cast to an actual snapshot + var lastInput = /** @type {!Node} */ (queue[i].currentInputSnapshot); + var snapshot = new DataSnapshot(lastInput, ref, PRIORITY_INDEX); + callbacks.push(queue[i].onComplete.bind(null, null, false, snapshot)); + } else { + callbacks.push(queue[i].onComplete.bind(null, new Error(abortReason), false, null)); + } + } + } + } + + // Clean up completed transactions. + this.pruneCompletedTransactionsBelowNode_(this.transactionQueueTree_); + + // Now fire callbacks, now that we're in a good, known state. + for (i = 0; i < callbacks.length; i++) { + exceptionGuard(callbacks[i]); + } + + // Try to send the transaction result to the server. + this.sendReadyTransactions_(); +}; + + +/** + * Returns the rootmost ancestor node of the specified path that has a pending transaction on it, or just returns + * the node for the given path if there are no pending transactions on any ancestor. + * + * @param {!Path} path The location to start at. + * @return {!Tree.>} The rootmost node with a transaction. + * @private + */ +(Repo.prototype as any).getAncestorTransactionNode_ = function(path: Path): Tree { + var front; + + // Start at the root and walk deeper into the tree towards path until we find a node with pending transactions. + var transactionNode = this.transactionQueueTree_; + while ((front = path.getFront()) !== null && transactionNode.getValue() === null) { + transactionNode = transactionNode.subTree(front); + path = path.popFront(); + } + + return transactionNode; +}; + + +/** + * Builds the queue of all transactions at or below the specified transactionNode. + * + * @param {!Tree.>} transactionNode + * @return {Array.} The generated queue. + * @private + */ +(Repo.prototype as any).buildTransactionQueue_ = function(transactionNode: Tree): Array { + // Walk any child transaction queues and aggregate them into a single queue. + var transactionQueue = []; + this.aggregateTransactionQueuesForNode_(transactionNode, transactionQueue); + + // Sort them by the order the transactions were created. + transactionQueue.sort(function(a, b) { return a.order - b.order; }); + + return transactionQueue; +}; + +/** + * @param {!Tree.>} node + * @param {Array.} queue + * @private + */ +(Repo.prototype as any).aggregateTransactionQueuesForNode_ = function(node: Tree, queue: Array) { + var nodeQueue = node.getValue(); + if (nodeQueue !== null) { + for (var i = 0; i < nodeQueue.length; i++) { + queue.push(nodeQueue[i]); + } + } + + var self = this; + node.forEachChild(function(child) { + self.aggregateTransactionQueuesForNode_(child, queue); + }); +}; + + +/** + * Remove COMPLETED transactions at or below this node in the transactionQueueTree_. + * + * @param {!Tree.>} node + * @private + */ +(Repo.prototype as any).pruneCompletedTransactionsBelowNode_ = function(node: Tree) { + var queue = node.getValue(); + if (queue) { + var to = 0; + for (var from = 0; from < queue.length; from++) { + if (queue[from].status !== TransactionStatus.COMPLETED) { + queue[to] = queue[from]; + to++; + } + } + queue.length = to; + node.setValue(queue.length > 0 ? queue : null); + } + + var self = this; + node.forEachChild(function(childNode) { + self.pruneCompletedTransactionsBelowNode_(childNode); + }); +}; + + +/** + * Aborts all transactions on ancestors or descendants of the specified path. Called when doing a set() or update() + * since we consider them incompatible with transactions. + * + * @param {!Path} path Path for which we want to abort related transactions. + * @return {!Path} + * @private + */ +(Repo.prototype as any).abortTransactions_ = function(path: Path) { + var affectedPath = this.getAncestorTransactionNode_(path).path(); + + var transactionNode = this.transactionQueueTree_.subTree(path); + var self = this; + + transactionNode.forEachAncestor(function(node) { + self.abortTransactionsOnNode_(node); + }); + + this.abortTransactionsOnNode_(transactionNode); + + transactionNode.forEachDescendant(function(node) { + self.abortTransactionsOnNode_(node); + }); + + return affectedPath; +}; + + +/** + * Abort transactions stored in this transaction queue node. + * + * @param {!Tree.>} node Node to abort transactions for. + * @private + */ +(Repo.prototype as any).abortTransactionsOnNode_ = function(node: Tree) { + var queue = node.getValue(); + if (queue !== null) { + + // Queue up the callbacks and fire them after cleaning up all of our transaction state, since + // the callback could trigger more transactions or sets. + var callbacks = []; + + // Go through queue. Any already-sent transactions must be marked for abort, while the unsent ones + // can be immediately aborted and removed. + var events = []; + var lastSent = -1; + for (var i = 0; i < queue.length; i++) { + if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) { + // Already marked. No action needed. + } else if (queue[i].status === TransactionStatus.SENT) { + assert(lastSent === i - 1, 'All SENT items should be at beginning of queue.'); + lastSent = i; + // Mark transaction for abort when it comes back. + queue[i].status = TransactionStatus.SENT_NEEDS_ABORT; + queue[i].abortReason = 'set'; + } else { + assert(queue[i].status === TransactionStatus.RUN, + 'Unexpected transaction status in abort'); + // We can abort it immediately. + queue[i].unwatcher(); + events = events.concat(this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId, true)); + if (queue[i].onComplete) { + var snapshot = null; + callbacks.push(queue[i].onComplete.bind(null, new Error('set'), false, snapshot)); + } + } + } + if (lastSent === -1) { + // We're not waiting for any sent transactions. We can clear the queue. + node.setValue(null); + } else { + // Remove the transactions we aborted. + queue.length = lastSent + 1; + } + + // Now fire the callbacks. + this.eventQueue_.raiseEventsForChangedPath(node.path(), events); + for (i = 0; i < callbacks.length; i++) { + exceptionGuard(callbacks[i]); + } + } +}; diff --git a/src/database/core/ServerActions.ts b/src/database/core/ServerActions.ts new file mode 100644 index 00000000000..e593a55487f --- /dev/null +++ b/src/database/core/ServerActions.ts @@ -0,0 +1,74 @@ +import { Query } from '../api/Query'; + +/** + * Interface defining the set of actions that can be performed against the Firebase server + * (basically corresponds to our wire protocol). + * + * @interface + */ +export interface ServerActions { + + /** + * @param {!Query} query + * @param {function():string} currentHashFn + * @param {?number} tag + * @param {function(string, *)} onComplete + */ + listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => any); + + /** + * Remove a listen. + * + * @param {!Query} query + * @param {?number} tag + */ + unlisten(query: Query, tag: number | null); + + /** + * @param {string} pathString + * @param {*} data + * @param {function(string, string)=} onComplete + * @param {string=} hash + */ + put(pathString: string, data: any, onComplete?: (a: string, b: string) => any, hash?: string); + + /** + * @param {string} pathString + * @param {*} data + * @param {function(string, ?string)} onComplete + * @param {string=} hash + */ + merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => any, hash?: string); + + /** + * Refreshes the auth token for the current connection. + * @param {string} token The authentication token + */ + refreshAuthToken(token: string); + + /** + * @param {string} pathString + * @param {*} data + * @param {function(string, string)=} onComplete + */ + onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => any); + + /** + * @param {string} pathString + * @param {*} data + * @param {function(string, string)=} onComplete + */ + onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => any); + + /** + * @param {string} pathString + * @param {function(string, string)=} onComplete + */ + onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => any); + + /** + * @param {Object.} stats + */ + reportStats(stats: { [k: string]: any }); + +} diff --git a/src/database/core/SnapshotHolder.ts b/src/database/core/SnapshotHolder.ts new file mode 100644 index 00000000000..880dad4cd3f --- /dev/null +++ b/src/database/core/SnapshotHolder.ts @@ -0,0 +1,19 @@ +import { ChildrenNode } from "./snap/ChildrenNode"; + +/** + * Mutable object which basically just stores a reference to the "latest" immutable snapshot. + * + * @constructor + */ +export class SnapshotHolder { + private rootNode_; + constructor() { + this.rootNode_ = ChildrenNode.EMPTY_NODE; + } + getNode(path) { + return this.rootNode_.getChild(path); + } + updateSnapshot(path, newSnapshotNode) { + this.rootNode_ = this.rootNode_.updateChild(path, newSnapshotNode); + } +} diff --git a/src/database/core/SparseSnapshotTree.ts b/src/database/core/SparseSnapshotTree.ts new file mode 100644 index 00000000000..0b0321778bc --- /dev/null +++ b/src/database/core/SparseSnapshotTree.ts @@ -0,0 +1,161 @@ +import { Path } from "./util/Path"; +import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; +import { CountedSet } from "./util/CountedSet"; + +/** + * Helper class to store a sparse set of snapshots. + * + * @constructor + */ +export class SparseSnapshotTree { + value_; + children_; + constructor() { + /** + * @private + * @type {Node} + */ + this.value_ = null; + + /** + * @private + * @type {CountedSet} + */ + this.children_ = null; + }; + /** + * Gets the node stored at the given path if one exists. + * + * @param {!Path} path Path to look up snapshot for. + * @return {?Node} The retrieved node, or null. + */ + find(path) { + if (this.value_ != null) { + return this.value_.getChild(path); + } else if (!path.isEmpty() && this.children_ != null) { + var childKey = path.getFront(); + path = path.popFront(); + if (this.children_.contains(childKey)) { + var childTree = this.children_.get(childKey); + return childTree.find(path); + } else { + return null; + } + } else { + return null; + } + }; + + + /** + * Stores the given node at the specified path. If there is already a node + * at a shallower path, it merges the new data into that snapshot node. + * + * @param {!Path} path Path to look up snapshot for. + * @param {!Node} data The new data, or null. + */ + remember(path, data) { + if (path.isEmpty()) { + this.value_ = data; + this.children_ = null; + } else if (this.value_ !== null) { + this.value_ = this.value_.updateChild(path, data); + } else { + if (this.children_ == null) { + this.children_ = new CountedSet(); + } + + var childKey = path.getFront(); + if (!this.children_.contains(childKey)) { + this.children_.add(childKey, new SparseSnapshotTree()); + } + + var child = this.children_.get(childKey); + path = path.popFront(); + child.remember(path, data); + } + }; + + + /** + * Purge the data at path from the cache. + * + * @param {!Path} path Path to look up snapshot for. + * @return {boolean} True if this node should now be removed. + */ + forget(path) { + if (path.isEmpty()) { + this.value_ = null; + this.children_ = null; + return true; + } else { + if (this.value_ !== null) { + if (this.value_.isLeafNode()) { + // We're trying to forget a node that doesn't exist + return false; + } else { + var value = this.value_; + this.value_ = null; + + var self = this; + value.forEachChild(PRIORITY_INDEX, function(key, tree) { + self.remember(new Path(key), tree); + }); + + return this.forget(path); + } + } else if (this.children_ !== null) { + var childKey = path.getFront(); + path = path.popFront(); + if (this.children_.contains(childKey)) { + var safeToRemove = this.children_.get(childKey).forget(path); + if (safeToRemove) { + this.children_.remove(childKey); + } + } + + if (this.children_.isEmpty()) { + this.children_ = null; + return true; + } else { + return false; + } + + } else { + return true; + } + } + }; + + /** + * Recursively iterates through all of the stored tree and calls the + * callback on each one. + * + * @param {!Path} prefixPath Path to look up node for. + * @param {!Function} func The function to invoke for each tree. + */ + forEachTree(prefixPath, func) { + if (this.value_ !== null) { + func(prefixPath, this.value_); + } else { + this.forEachChild(function(key, tree) { + var path = new Path(prefixPath.toString() + '/' + key); + tree.forEachTree(path, func); + }); + } + }; + + + /** + * Iterates through each immediate child and triggers the callback. + * + * @param {!Function} func The function to invoke for each child. + */ + forEachChild(func) { + if (this.children_ !== null) { + this.children_.each(function(key, tree) { + func(key, tree); + }); + } + }; +} diff --git a/src/database/core/SyncPoint.ts b/src/database/core/SyncPoint.ts new file mode 100644 index 00000000000..ab58339d474 --- /dev/null +++ b/src/database/core/SyncPoint.ts @@ -0,0 +1,229 @@ +import { CacheNode } from "./view/CacheNode"; +import { ChildrenNode } from "./snap/ChildrenNode"; +import { assert } from "../../utils/assert"; +import { isEmpty, forEach, findValue, safeGet } from "../../utils/obj"; +import { ViewCache } from "./view/ViewCache"; +import { View } from "./view/View"; + +let __referenceConstructor; + +/** + * SyncPoint represents a single location in a SyncTree with 1 or more event registrations, meaning we need to + * maintain 1 or more Views at this location to cache server data and raise appropriate events for server changes + * and user writes (set, transaction, update). + * + * It's responsible for: + * - Maintaining the set of 1 or more views necessary at this location (a SyncPoint with 0 views should be removed). + * - Proxying user / server operations to the views as appropriate (i.e. applyServerOverwrite, + * applyUserOverwrite, etc.) + */ +export class SyncPoint { + static set __referenceConstructor(val) { + assert(!__referenceConstructor, '__referenceConstructor has already been defined'); + __referenceConstructor = val; + } + static get __referenceConstructor() { + assert(__referenceConstructor, 'Reference.ts has not been loaded'); + return __referenceConstructor; + } + + views_: object; + constructor() { + /** + * The Views being tracked at this location in the tree, stored as a map where the key is a + * queryId and the value is the View for that query. + * + * NOTE: This list will be quite small (usually 1, but perhaps 2 or 3; any more is an odd use case). + * + * @type {!Object.} + * @private + */ + this.views_ = { }; + }; + /** + * @return {boolean} + */ + isEmpty() { + return isEmpty(this.views_); + }; + + /** + * + * @param {!Operation} operation + * @param {!WriteTreeRef} writesCache + * @param {?Node} optCompleteServerCache + * @return {!Array.} + */ + applyOperation(operation, writesCache, optCompleteServerCache) { + var queryId = operation.source.queryId; + if (queryId !== null) { + var view = safeGet(this.views_, queryId); + assert(view != null, 'SyncTree gave us an op for an invalid query.'); + return view.applyOperation(operation, writesCache, optCompleteServerCache); + } else { + var events = []; + + forEach(this.views_, function(key, view) { + events = events.concat(view.applyOperation(operation, writesCache, optCompleteServerCache)); + }); + + return events; + } + }; + + /** + * Add an event callback for the specified query. + * + * @param {!Query} query + * @param {!EventRegistration} eventRegistration + * @param {!WriteTreeRef} writesCache + * @param {?Node} serverCache Complete server cache, if we have it. + * @param {boolean} serverCacheComplete + * @return {!Array.} Events to raise. + */ + addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete) { + var queryId = query.queryIdentifier(); + var view = safeGet(this.views_, queryId); + if (!view) { + // TODO: make writesCache take flag for complete server node + var eventCache = writesCache.calcCompleteEventCache(serverCacheComplete ? serverCache : null); + var eventCacheComplete = false; + if (eventCache) { + eventCacheComplete = true; + } else if (serverCache instanceof ChildrenNode) { + eventCache = writesCache.calcCompleteEventChildren(serverCache); + eventCacheComplete = false; + } else { + eventCache = ChildrenNode.EMPTY_NODE; + eventCacheComplete = false; + } + var viewCache = new ViewCache( + new CacheNode(/** @type {!Node} */ (eventCache), eventCacheComplete, false), + new CacheNode(/** @type {!Node} */ (serverCache), serverCacheComplete, false) + ); + view = new View(query, viewCache); + this.views_[queryId] = view; + } + + // This is guaranteed to exist now, we just created anything that was missing + view.addEventRegistration(eventRegistration); + return view.getInitialEvents(eventRegistration); + }; + + /** + * Remove event callback(s). Return cancelEvents if a cancelError is specified. + * + * If query is the default query, we'll check all views for the specified eventRegistration. + * If eventRegistration is null, we'll remove all callbacks for the specified view(s). + * + * @param {!Query} query + * @param {?EventRegistration} eventRegistration If null, remove all callbacks. + * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. + * @return {{removed:!Array., events:!Array.}} removed queries and any cancel events + */ + removeEventRegistration(query, eventRegistration, cancelError) { + var queryId = query.queryIdentifier(); + var removed = []; + var cancelEvents = []; + var hadCompleteView = this.hasCompleteView(); + if (queryId === 'default') { + // When you do ref.off(...), we search all views for the registration to remove. + var self = this; + forEach(this.views_, function(viewQueryId, view) { + cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); + if (view.isEmpty()) { + delete self.views_[viewQueryId]; + + // We'll deal with complete views later. + if (!view.getQuery().getQueryParams().loadsAllData()) { + removed.push(view.getQuery()); + } + } + }); + } else { + // remove the callback from the specific view. + var view = safeGet(this.views_, queryId); + if (view) { + cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); + if (view.isEmpty()) { + delete this.views_[queryId]; + + // We'll deal with complete views later. + if (!view.getQuery().getQueryParams().loadsAllData()) { + removed.push(view.getQuery()); + } + } + } + } + + if (hadCompleteView && !this.hasCompleteView()) { + // We removed our last complete view. + removed.push(new SyncPoint.__referenceConstructor(query.repo, query.path)); + } + + return {removed: removed, events: cancelEvents}; + }; + + /** + * @return {!Array.} + */ + getQueryViews() { + const values = Object.keys(this.views_) + .map(key => this.views_[key]); + return values.filter(function(view) { + return !view.getQuery().getQueryParams().loadsAllData(); + }); + }; + + /** + * + * @param {!Path} path The path to the desired complete snapshot + * @return {?Node} A complete cache, if it exists + */ + getCompleteServerCache(path) { + var serverCache = null; + forEach(this.views_, (key, view) => { + serverCache = serverCache || view.getCompleteServerCache(path); + }); + return serverCache; + }; + + /** + * @param {!Query} query + * @return {?View} + */ + viewForQuery(query) { + var params = query.getQueryParams(); + if (params.loadsAllData()) { + return this.getCompleteView(); + } else { + var queryId = query.queryIdentifier(); + return safeGet(this.views_, queryId); + } + }; + + /** + * @param {!Query} query + * @return {boolean} + */ + viewExistsForQuery(query) { + return this.viewForQuery(query) != null; + }; + + /** + * @return {boolean} + */ + hasCompleteView() { + return this.getCompleteView() != null; + }; + + /** + * @return {?View} + */ + getCompleteView() { + var completeView = findValue(this.views_, function(view) { + return view.getQuery().getQueryParams().loadsAllData(); + }); + return completeView || null; + }; +} diff --git a/src/database/core/SyncTree.ts b/src/database/core/SyncTree.ts new file mode 100644 index 00000000000..3310d6a34cb --- /dev/null +++ b/src/database/core/SyncTree.ts @@ -0,0 +1,749 @@ +import { assert } from '../../utils/assert'; +import { errorForServerCode } from "./util/util"; +import { AckUserWrite } from "./operation/AckUserWrite"; +import { ChildrenNode } from "./snap/ChildrenNode"; +import { forEach, safeGet } from "../../utils/obj"; +import { ImmutableTree } from "./util/ImmutableTree"; +import { ListenComplete } from "./operation/ListenComplete"; +import { Merge } from "./operation/Merge"; +import { OperationSource } from "./operation/Operation"; +import { Overwrite } from "./operation/Overwrite"; +import { Path } from "./util/Path"; +import { SyncPoint } from "./SyncPoint"; +import { WriteTree } from "./WriteTree"; +import { Query } from "../api/Query"; + +/** + * @typedef {{ + * startListening: function( + * !Query, + * ?number, + * function():string, + * function(!string, *):!Array. + * ):!Array., + * + * stopListening: function(!Query, ?number) + * }} + */ + +/** + * SyncTree is the central class for managing event callback registration, data caching, views + * (query processing), and event generation. There are typically two SyncTree instances for + * each Repo, one for the normal Firebase data, and one for the .info data. + * + * It has a number of responsibilities, including: + * - Tracking all user event callbacks (registered via addEventRegistration() and removeEventRegistration()). + * - Applying and caching data changes for user set(), transaction(), and update() calls + * (applyUserOverwrite(), applyUserMerge()). + * - Applying and caching data changes for server data changes (applyServerOverwrite(), + * applyServerMerge()). + * - Generating user-facing events for server and user changes (all of the apply* methods + * return the set of events that need to be raised as a result). + * - Maintaining the appropriate set of server listens to ensure we are always subscribed + * to the correct set of paths and queries to satisfy the current set of user event + * callbacks (listens are started/stopped using the provided listenProvider). + * + * NOTE: Although SyncTree tracks event callbacks and calculates events to raise, the actual + * events are returned to the caller rather than raised synchronously. + * + * @constructor + * @param {!ListenProvider} listenProvider Used by SyncTree to start / stop listening + * to server data. + */ +export class SyncTree { + /** + * Tree of SyncPoints. There's a SyncPoint at any location that has 1 or more views. + * @type {!ImmutableTree.} + * @private + */ + syncPointTree_; + + /** + * A tree of all pending user writes (user-initiated set()'s, transaction()'s, update()'s, etc.). + * @type {!WriteTree} + * @private + */ + pendingWriteTree_; + tagToQueryMap_; + queryToTagMap_; + listenProvider_; + + constructor(listenProvider) { + this.syncPointTree_ = ImmutableTree.Empty; + this.pendingWriteTree_ = new WriteTree(); + this.tagToQueryMap_ = {}; + this.queryToTagMap_ = {}; + this.listenProvider_ = listenProvider; + }; + + /** + * Apply the data changes for a user-generated set() or transaction() call. + * + * @param {!Path} path + * @param {!Node} newData + * @param {number} writeId + * @param {boolean=} visible + * @return {!Array.} Events to raise. + */ + applyUserOverwrite(path, newData, writeId, visible) { + // Record pending write. + this.pendingWriteTree_.addOverwrite(path, newData, writeId, visible); + + if (!visible) { + return []; + } else { + return this.applyOperationToSyncPoints_( + new Overwrite(OperationSource.User, path, newData)); + } + }; + + /** + * Apply the data from a user-generated update() call + * + * @param {!Path} path + * @param {!Object.} changedChildren + * @param {!number} writeId + * @return {!Array.} Events to raise. + */ + applyUserMerge(path, changedChildren, writeId) { + // Record pending merge. + this.pendingWriteTree_.addMerge(path, changedChildren, writeId); + + var changeTree = ImmutableTree.fromObject(changedChildren); + + return this.applyOperationToSyncPoints_( + new Merge(OperationSource.User, path, changeTree)); + }; + + /** + * Acknowledge a pending user write that was previously registered with applyUserOverwrite() or applyUserMerge(). + * + * @param {!number} writeId + * @param {boolean=} revert True if the given write failed and needs to be reverted + * @return {!Array.} Events to raise. + */ + ackUserWrite(writeId, revert) { + revert = revert || false; + + var write = this.pendingWriteTree_.getWrite(writeId); + var needToReevaluate = this.pendingWriteTree_.removeWrite(writeId); + if (!needToReevaluate) { + return []; + } else { + var affectedTree = ImmutableTree.Empty; + if (write.snap != null) { // overwrite + affectedTree = affectedTree.set(Path.Empty, true); + } else { + forEach(write.children, function(pathString, node) { + affectedTree = affectedTree.set(new Path(pathString), node); + }); + } + return this.applyOperationToSyncPoints_(new AckUserWrite(write.path, affectedTree, revert)); + } + }; + + /** + * Apply new server data for the specified path.. + * + * @param {!Path} path + * @param {!Node} newData + * @return {!Array.} Events to raise. + */ + applyServerOverwrite(path, newData) { + return this.applyOperationToSyncPoints_( + new Overwrite(OperationSource.Server, path, newData)); + }; + + /** + * Apply new server data to be merged in at the specified path. + * + * @param {!Path} path + * @param {!Object.} changedChildren + * @return {!Array.} Events to raise. + */ + applyServerMerge(path, changedChildren) { + var changeTree = ImmutableTree.fromObject(changedChildren); + + return this.applyOperationToSyncPoints_( + new Merge(OperationSource.Server, path, changeTree)); + }; + + /** + * Apply a listen complete for a query + * + * @param {!Path} path + * @return {!Array.} Events to raise. + */ + applyListenComplete(path) { + return this.applyOperationToSyncPoints_( + new ListenComplete(OperationSource.Server, path)); + }; + + /** + * Apply new server data for the specified tagged query. + * + * @param {!Path} path + * @param {!Node} snap + * @param {!number} tag + * @return {!Array.} Events to raise. + */ + applyTaggedQueryOverwrite(path, snap, tag) { + var queryKey = this.queryKeyForTag_(tag); + if (queryKey != null) { + var r = this.parseQueryKey_(queryKey); + var queryPath = r.path, queryId = r.queryId; + var relativePath = Path.relativePath(queryPath, path); + var op = new Overwrite(OperationSource.forServerTaggedQuery(queryId), + relativePath, snap); + return this.applyTaggedOperation_(queryPath, queryId, op); + } else { + // Query must have been removed already + return []; + } + }; + + /** + * Apply server data to be merged in for the specified tagged query. + * + * @param {!Path} path + * @param {!Object.} changedChildren + * @param {!number} tag + * @return {!Array.} Events to raise. + */ + applyTaggedQueryMerge(path, changedChildren, tag) { + var queryKey = this.queryKeyForTag_(tag); + if (queryKey) { + var r = this.parseQueryKey_(queryKey); + var queryPath = r.path, queryId = r.queryId; + var relativePath = Path.relativePath(queryPath, path); + var changeTree = ImmutableTree.fromObject(changedChildren); + var op = new Merge(OperationSource.forServerTaggedQuery(queryId), + relativePath, changeTree); + return this.applyTaggedOperation_(queryPath, queryId, op); + } else { + // We've already removed the query. No big deal, ignore the update + return []; + } + }; + + /** + * Apply a listen complete for a tagged query + * + * @param {!Path} path + * @param {!number} tag + * @return {!Array.} Events to raise. + */ + applyTaggedListenComplete(path, tag) { + var queryKey = this.queryKeyForTag_(tag); + if (queryKey) { + var r = this.parseQueryKey_(queryKey); + var queryPath = r.path, queryId = r.queryId; + var relativePath = Path.relativePath(queryPath, path); + var op = new ListenComplete(OperationSource.forServerTaggedQuery(queryId), + relativePath); + return this.applyTaggedOperation_(queryPath, queryId, op); + } else { + // We've already removed the query. No big deal, ignore the update + return []; + } + }; + + /** + * Add an event callback for the specified query. + * + * @param {!Query} query + * @param {!EventRegistration} eventRegistration + * @return {!Array.} Events to raise. + */ + addEventRegistration(query, eventRegistration) { + var path = query.path; + + var serverCache = null; + var foundAncestorDefaultView = false; + // Any covering writes will necessarily be at the root, so really all we need to find is the server cache. + // Consider optimizing this once there's a better understanding of what actual behavior will be. + this.syncPointTree_.foreachOnPath(path, function(pathToSyncPoint, sp) { + var relativePath = Path.relativePath(pathToSyncPoint, path); + serverCache = serverCache || sp.getCompleteServerCache(relativePath); + foundAncestorDefaultView = foundAncestorDefaultView || sp.hasCompleteView(); + }); + var syncPoint = this.syncPointTree_.get(path); + if (!syncPoint) { + syncPoint = new SyncPoint(); + this.syncPointTree_ = this.syncPointTree_.set(path, syncPoint); + } else { + foundAncestorDefaultView = foundAncestorDefaultView || syncPoint.hasCompleteView(); + serverCache = serverCache || syncPoint.getCompleteServerCache(Path.Empty); + } + + var serverCacheComplete; + if (serverCache != null) { + serverCacheComplete = true; + } else { + serverCacheComplete = false; + serverCache = ChildrenNode.EMPTY_NODE; + var subtree = this.syncPointTree_.subtree(path); + subtree.foreachChild(function(childName, childSyncPoint) { + var completeCache = childSyncPoint.getCompleteServerCache(Path.Empty); + if (completeCache) { + serverCache = serverCache.updateImmediateChild(childName, completeCache); + } + }); + } + + var viewAlreadyExists = syncPoint.viewExistsForQuery(query); + if (!viewAlreadyExists && !query.getQueryParams().loadsAllData()) { + // We need to track a tag for this query + var queryKey = this.makeQueryKey_(query); + assert(!(queryKey in this.queryToTagMap_), + 'View does not exist, but we have a tag'); + var tag = SyncTree.getNextQueryTag_(); + this.queryToTagMap_[queryKey] = tag; + // Coerce to string to avoid sparse arrays. + this.tagToQueryMap_['_' + tag] = queryKey; + } + var writesCache = this.pendingWriteTree_.childWrites(path); + var events = syncPoint.addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete); + if (!viewAlreadyExists && !foundAncestorDefaultView) { + var view = /** @type !View */ (syncPoint.viewForQuery(query)); + events = events.concat(this.setupListener_(query, view)); + } + return events; + }; + + /** + * Remove event callback(s). + * + * If query is the default query, we'll check all queries for the specified eventRegistration. + * If eventRegistration is null, we'll remove all callbacks for the specified query/queries. + * + * @param {!Query} query + * @param {?EventRegistration} eventRegistration If null, all callbacks are removed. + * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. + * @return {!Array.} Cancel events, if cancelError was provided. + */ + removeEventRegistration(query, eventRegistration, cancelError?) { + // Find the syncPoint first. Then deal with whether or not it has matching listeners + var path = query.path; + var maybeSyncPoint = this.syncPointTree_.get(path); + var cancelEvents = []; + // A removal on a default query affects all queries at that location. A removal on an indexed query, even one without + // other query constraints, does *not* affect all queries at that location. So this check must be for 'default', and + // not loadsAllData(). + if (maybeSyncPoint && (query.queryIdentifier() === 'default' || maybeSyncPoint.viewExistsForQuery(query))) { + /** + * @type {{removed: !Array., events: !Array.}} + */ + var removedAndEvents = maybeSyncPoint.removeEventRegistration(query, eventRegistration, cancelError); + if (maybeSyncPoint.isEmpty()) { + this.syncPointTree_ = this.syncPointTree_.remove(path); + } + var removed = removedAndEvents.removed; + cancelEvents = removedAndEvents.events; + // We may have just removed one of many listeners and can short-circuit this whole process + // We may also not have removed a default listener, in which case all of the descendant listeners should already be + // properly set up. + // + // Since indexed queries can shadow if they don't have other query constraints, check for loadsAllData(), instead of + // queryId === 'default' + var removingDefault = -1 !== removed.findIndex(function(query) { + return query.getQueryParams().loadsAllData(); + }); + var covered = this.syncPointTree_.findOnPath(path, function(relativePath, parentSyncPoint) { + return parentSyncPoint.hasCompleteView(); + }); + + if (removingDefault && !covered) { + var subtree = this.syncPointTree_.subtree(path); + // There are potentially child listeners. Determine what if any listens we need to send before executing the + // removal + if (!subtree.isEmpty()) { + // We need to fold over our subtree and collect the listeners to send + var newViews = this.collectDistinctViewsForSubTree_(subtree); + + // Ok, we've collected all the listens we need. Set them up. + for (var i = 0; i < newViews.length; ++i) { + var view = newViews[i], newQuery = view.getQuery(); + var listener = this.createListenerForView_(view); + this.listenProvider_.startListening(this.queryForListening_(newQuery), this.tagForQuery_(newQuery), + listener.hashFn, listener.onComplete); + } + } else { + // There's nothing below us, so nothing we need to start listening on + } + } + // If we removed anything and we're not covered by a higher up listen, we need to stop listening on this query + // The above block has us covered in terms of making sure we're set up on listens lower in the tree. + // Also, note that if we have a cancelError, it's already been removed at the provider level. + if (!covered && removed.length > 0 && !cancelError) { + // If we removed a default, then we weren't listening on any of the other queries here. Just cancel the one + // default. Otherwise, we need to iterate through and cancel each individual query + if (removingDefault) { + // We don't tag default listeners + var defaultTag = null; + this.listenProvider_.stopListening(this.queryForListening_(query), defaultTag); + } else { + var self = this; + removed.forEach(function(queryToRemove) { + var queryIdToRemove = queryToRemove.queryIdentifier(); + var tagToRemove = self.queryToTagMap_[self.makeQueryKey_(queryToRemove)]; + self.listenProvider_.stopListening(self.queryForListening_(queryToRemove), tagToRemove); + }); + } + } + // Now, clear all of the tags we're tracking for the removed listens + this.removeTags_(removed); + } else { + // No-op, this listener must've been already removed + } + return cancelEvents; + }; + + /** + * Returns a complete cache, if we have one, of the data at a particular path. The location must have a listener above + * it, but as this is only used by transaction code, that should always be the case anyways. + * + * Note: this method will *include* hidden writes from transaction with applyLocally set to false. + * @param {!Path} path The path to the data we want + * @param {Array.=} writeIdsToExclude A specific set to be excluded + * @return {?Node} + */ + calcCompleteEventCache(path, writeIdsToExclude) { + var includeHiddenSets = true; + var writeTree = this.pendingWriteTree_; + var serverCache = this.syncPointTree_.findOnPath(path, function(pathSoFar, syncPoint) { + var relativePath = Path.relativePath(pathSoFar, path); + var serverCache = syncPoint.getCompleteServerCache(relativePath); + if (serverCache) { + return serverCache; + } + }); + return writeTree.calcCompleteEventCache(path, serverCache, writeIdsToExclude, includeHiddenSets); + }; + + /** + * This collapses multiple unfiltered views into a single view, since we only need a single + * listener for them. + * + * @param {!ImmutableTree.} subtree + * @return {!Array.} + * @private + */ + collectDistinctViewsForSubTree_(subtree) { + return subtree.fold(function(relativePath, maybeChildSyncPoint, childMap) { + if (maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { + var completeView = maybeChildSyncPoint.getCompleteView(); + return [completeView]; + } else { + // No complete view here, flatten any deeper listens into an array + var views = []; + if (maybeChildSyncPoint) { + views = maybeChildSyncPoint.getQueryViews(); + } + forEach(childMap, function(key, childViews) { + views = views.concat(childViews); + }); + return views; + } + }); + }; + + /** + * @param {!Array.} queries + * @private + */ + removeTags_(queries) { + for (var j = 0; j < queries.length; ++j) { + var removedQuery = queries[j]; + if (!removedQuery.getQueryParams().loadsAllData()) { + // We should have a tag for this + var removedQueryKey = this.makeQueryKey_(removedQuery); + var removedQueryTag = this.queryToTagMap_[removedQueryKey]; + delete this.queryToTagMap_[removedQueryKey]; + delete this.tagToQueryMap_['_' + removedQueryTag]; + } + } + }; + + + /** + * Normalizes a query to a query we send the server for listening + * @param {!Query} query + * @return {!Query} The normalized query + * @private + */ + queryForListening_(query: Query) { + if (query.getQueryParams().loadsAllData() && !query.getQueryParams().isDefault()) { + // We treat queries that load all data as default queries + // Cast is necessary because ref() technically returns Firebase which is actually fb.api.Firebase which inherits + // from Query + return /** @type {!Query} */(query.getRef()); + } else { + return query; + } + }; + + + /** + * For a given new listen, manage the de-duplication of outstanding subscriptions. + * + * @param {!Query} query + * @param {!View} view + * @return {!Array.} This method can return events to support synchronous data sources + * @private + */ + setupListener_(query, view) { + var path = query.path; + var tag = this.tagForQuery_(query); + var listener = this.createListenerForView_(view); + + var events = this.listenProvider_.startListening(this.queryForListening_(query), tag, listener.hashFn, + listener.onComplete); + + var subtree = this.syncPointTree_.subtree(path); + // The root of this subtree has our query. We're here because we definitely need to send a listen for that, but we + // may need to shadow other listens as well. + if (tag) { + assert(!subtree.value.hasCompleteView(), "If we're adding a query, it shouldn't be shadowed"); + } else { + // Shadow everything at or below this location, this is a default listener. + var queriesToStop = subtree.fold(function(relativePath, maybeChildSyncPoint, childMap) { + if (!relativePath.isEmpty() && maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { + return [maybeChildSyncPoint.getCompleteView().getQuery()]; + } else { + // No default listener here, flatten any deeper queries into an array + var queries = []; + if (maybeChildSyncPoint) { + queries = queries.concat( + maybeChildSyncPoint.getQueryViews().map(function(view) { + return view.getQuery(); + }) + ); + } + forEach(childMap, function(key, childQueries) { + queries = queries.concat(childQueries); + }); + return queries; + } + }); + for (var i = 0; i < queriesToStop.length; ++i) { + var queryToStop = queriesToStop[i]; + this.listenProvider_.stopListening(this.queryForListening_(queryToStop), this.tagForQuery_(queryToStop)); + } + } + return events; + }; + + /** + * + * @param {!View} view + * @return {{hashFn: function(), onComplete: function(!string, *)}} + * @private + */ + createListenerForView_(view) { + var self = this; + var query = view.getQuery(); + var tag = this.tagForQuery_(query); + + return { + hashFn: function() { + var cache = view.getServerCache() || ChildrenNode.EMPTY_NODE; + return cache.hash(); + }, + onComplete: function(status, data) { + if (status === 'ok') { + if (tag) { + return self.applyTaggedListenComplete(query.path, tag); + } else { + return self.applyListenComplete(query.path); + } + } else { + // If a listen failed, kill all of the listeners here, not just the one that triggered the error. + // Note that this may need to be scoped to just this listener if we change permissions on filtered children + var error = errorForServerCode(status, query); + return self.removeEventRegistration(query, /*eventRegistration*/null, error); + } + } + }; + }; + + /** + * Given a query, computes a "queryKey" suitable for use in our queryToTagMap_. + * @private + * @param {!Query} query + * @return {string} + */ + makeQueryKey_(query) { + return query.path.toString() + '$' + query.queryIdentifier(); + }; + + /** + * Given a queryKey (created by makeQueryKey), parse it back into a path and queryId. + * @private + * @param {!string} queryKey + * @return {{queryId: !string, path: !Path}} + */ + parseQueryKey_(queryKey) { + var splitIndex = queryKey.indexOf('$'); + assert(splitIndex !== -1 && splitIndex < queryKey.length - 1, 'Bad queryKey.'); + return { + queryId: queryKey.substr(splitIndex + 1), + path: new Path(queryKey.substr(0, splitIndex)) + }; + }; + + /** + * Return the query associated with the given tag, if we have one + * @param {!number} tag + * @return {?string} + * @private + */ + queryKeyForTag_(tag) { + return this.tagToQueryMap_['_' + tag]; + }; + + /** + * Return the tag associated with the given query. + * @param {!Query} query + * @return {?number} + * @private + */ + tagForQuery_(query) { + var queryKey = this.makeQueryKey_(query); + return safeGet(this.queryToTagMap_, queryKey); + }; + + /** + * Static tracker for next query tag. + * @type {number} + * @private + */ + static nextQueryTag_ = 1; + + /** + * Static accessor for query tags. + * @return {number} + * @private + */ + static getNextQueryTag_ = function() { + return SyncTree.nextQueryTag_++; + }; + + /** + * A helper method to apply tagged operations + * + * @param {!Path} queryPath + * @param {!string} queryId + * @param {!Operation} operation + * @return {!Array.} + * @private + */ + applyTaggedOperation_(queryPath, queryId, operation) { + var syncPoint = this.syncPointTree_.get(queryPath); + assert(syncPoint, "Missing sync point for query tag that we're tracking"); + var writesCache = this.pendingWriteTree_.childWrites(queryPath); + return syncPoint.applyOperation(operation, writesCache, /*serverCache=*/null); + } + + /** + * A helper method that visits all descendant and ancestor SyncPoints, applying the operation. + * + * NOTES: + * - Descendant SyncPoints will be visited first (since we raise events depth-first). + + * - We call applyOperation() on each SyncPoint passing three things: + * 1. A version of the Operation that has been made relative to the SyncPoint location. + * 2. A WriteTreeRef of any writes we have cached at the SyncPoint location. + * 3. A snapshot Node with cached server data, if we have it. + + * - We concatenate all of the events returned by each SyncPoint and return the result. + * + * @param {!Operation} operation + * @return {!Array.} + * @private + */ + applyOperationToSyncPoints_(operation) { + return this.applyOperationHelper_(operation, this.syncPointTree_, /*serverCache=*/ null, + this.pendingWriteTree_.childWrites(Path.Empty)); + + }; + + /** + * Recursive helper for applyOperationToSyncPoints_ + * + * @private + * @param {!Operation} operation + * @param {ImmutableTree.} syncPointTree + * @param {?Node} serverCache + * @param {!WriteTreeRef} writesCache + * @return {!Array.} + */ + applyOperationHelper_(operation, syncPointTree, serverCache, writesCache) { + + if (operation.path.isEmpty()) { + return this.applyOperationDescendantsHelper_(operation, syncPointTree, serverCache, writesCache); + } else { + var syncPoint = syncPointTree.get(Path.Empty); + + // If we don't have cached server data, see if we can get it from this SyncPoint. + if (serverCache == null && syncPoint != null) { + serverCache = syncPoint.getCompleteServerCache(Path.Empty); + } + + var events = []; + var childName = operation.path.getFront(); + var childOperation = operation.operationForChild(childName); + var childTree = syncPointTree.children.get(childName); + if (childTree && childOperation) { + var childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + var childWritesCache = writesCache.child(childName); + events = events.concat( + this.applyOperationHelper_(childOperation, childTree, childServerCache, childWritesCache)); + } + + if (syncPoint) { + events = events.concat(syncPoint.applyOperation(operation, writesCache, serverCache)); + } + + return events; + } + }; + + /** + * Recursive helper for applyOperationToSyncPoints_ + * + * @private + * @param {!Operation} operation + * @param {ImmutableTree.} syncPointTree + * @param {?Node} serverCache + * @param {!WriteTreeRef} writesCache + * @return {!Array.} + */ + applyOperationDescendantsHelper_(operation, syncPointTree, + serverCache, writesCache) { + var syncPoint = syncPointTree.get(Path.Empty); + + // If we don't have cached server data, see if we can get it from this SyncPoint. + if (serverCache == null && syncPoint != null) { + serverCache = syncPoint.getCompleteServerCache(Path.Empty); + } + + var events = []; + var self = this; + syncPointTree.children.inorderTraversal(function(childName, childTree) { + var childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + var childWritesCache = writesCache.child(childName); + var childOperation = operation.operationForChild(childName); + if (childOperation) { + events = events.concat( + self.applyOperationDescendantsHelper_(childOperation, childTree, childServerCache, childWritesCache)); + } + }); + + if (syncPoint) { + events = events.concat(syncPoint.applyOperation(operation, writesCache, serverCache)); + } + + return events; + }; +} diff --git a/src/database/core/WriteTree.ts b/src/database/core/WriteTree.ts new file mode 100644 index 00000000000..e780039f000 --- /dev/null +++ b/src/database/core/WriteTree.ts @@ -0,0 +1,639 @@ +import { findKey, forEach, safeGet } from "../../utils/obj"; +import { assert, assertionError } from "../../utils/assert"; +import { Path } from "./util/Path"; +import { CompoundWrite } from "./CompoundWrite"; +import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; +import { ChildrenNode } from "./snap/ChildrenNode"; + +/** + * Defines a single user-initiated write operation. May be the result of a set(), transaction(), or update() call. In + * the case of a set() or transaction, snap wil be non-null. In the case of an update(), children will be non-null. + * + * @typedef {{ + * writeId: number, + * path: !Path, + * snap: ?Node, + * children: ?Object., + * visible: boolean + * }} + */ + +/** + * WriteTree tracks all pending user-initiated writes and has methods to calculate the result of merging them + * with underlying server data (to create "event cache" data). Pending writes are added with addOverwrite() + * and addMerge(), and removed with removeWrite(). + * + * @constructor + */ +export class WriteTree { + /** + * A tree tracking the result of applying all visible writes. This does not include transactions with + * applyLocally=false or writes that are completely shadowed by other writes. + * + * @type {!CompoundWrite} + * @private + */ + visibleWrites_; + + /** + * A list of all pending writes, regardless of visibility and shadowed-ness. Used to calculate arbitrary + * sets of the changed data, such as hidden writes (from transactions) or changes with certain writes excluded (also + * used by transactions). + * + * @type {!Array.} + * @private + */ + allWrites_; + lastWriteId_; + + constructor() { + this.visibleWrites_ = CompoundWrite.Empty; + this.allWrites_ = []; + this.lastWriteId_ = -1; + }; + /** + * Create a new WriteTreeRef for the given path. For use with a new sync point at the given path. + * + * @param {!Path} path + * @return {!WriteTreeRef} + */ + childWrites(path): WriteTreeRef { + return new WriteTreeRef(path, this); + }; + + /** + * Record a new overwrite from user code. + * + * @param {!Path} path + * @param {!Node} snap + * @param {!number} writeId + * @param {boolean=} visible This is set to false by some transactions. It should be excluded from event caches + */ + addOverwrite(path, snap, writeId, visible) { + assert(writeId > this.lastWriteId_, 'Stacking an older write on top of newer ones'); + if (visible === undefined) { + visible = true; + } + this.allWrites_.push({path: path, snap: snap, writeId: writeId, visible: visible}); + + if (visible) { + this.visibleWrites_ = this.visibleWrites_.addWrite(path, snap); + } + this.lastWriteId_ = writeId; + }; + + /** + * Record a new merge from user code. + * + * @param {!Path} path + * @param {!Object.} changedChildren + * @param {!number} writeId + */ + addMerge(path, changedChildren, writeId) { + assert(writeId > this.lastWriteId_, 'Stacking an older merge on top of newer ones'); + this.allWrites_.push({path: path, children: changedChildren, writeId: writeId, visible: true}); + + this.visibleWrites_ = this.visibleWrites_.addWrites(path, changedChildren); + this.lastWriteId_ = writeId; + }; + + + /** + * @param {!number} writeId + * @return {?WriteRecord} + */ + getWrite(writeId) { + for (var i = 0; i < this.allWrites_.length; i++) { + var record = this.allWrites_[i]; + if (record.writeId === writeId) { + return record; + } + } + return null; + }; + + + /** + * Remove a write (either an overwrite or merge) that has been successfully acknowledge by the server. Recalculates + * the tree if necessary. We return true if it may have been visible, meaning views need to reevaluate. + * + * @param {!number} writeId + * @return {boolean} true if the write may have been visible (meaning we'll need to reevaluate / raise + * events as a result). + */ + removeWrite(writeId) { + // Note: disabling this check. It could be a transaction that preempted another transaction, and thus was applied + // out of order. + //var validClear = revert || this.allWrites_.length === 0 || writeId <= this.allWrites_[0].writeId; + //assert(validClear, "Either we don't have this write, or it's the first one in the queue"); + + var idx = this.allWrites_.findIndex(function(s) { return s.writeId === writeId; }); + assert(idx >= 0, 'removeWrite called with nonexistent writeId.'); + var writeToRemove = this.allWrites_[idx]; + this.allWrites_.splice(idx, 1); + + var removedWriteWasVisible = writeToRemove.visible; + var removedWriteOverlapsWithOtherWrites = false; + + var i = this.allWrites_.length - 1; + + while (removedWriteWasVisible && i >= 0) { + var currentWrite = this.allWrites_[i]; + if (currentWrite.visible) { + if (i >= idx && this.recordContainsPath_(currentWrite, writeToRemove.path)) { + // The removed write was completely shadowed by a subsequent write. + removedWriteWasVisible = false; + } else if (writeToRemove.path.contains(currentWrite.path)) { + // Either we're covering some writes or they're covering part of us (depending on which came first). + removedWriteOverlapsWithOtherWrites = true; + } + } + i--; + } + + if (!removedWriteWasVisible) { + return false; + } else if (removedWriteOverlapsWithOtherWrites) { + // There's some shadowing going on. Just rebuild the visible writes from scratch. + this.resetTree_(); + return true; + } else { + // There's no shadowing. We can safely just remove the write(s) from visibleWrites. + if (writeToRemove.snap) { + this.visibleWrites_ = this.visibleWrites_.removeWrite(writeToRemove.path); + } else { + var children = writeToRemove.children; + var self = this; + forEach(children, function(childName, childSnap) { + self.visibleWrites_ = self.visibleWrites_.removeWrite(writeToRemove.path.child(childName)); + }); + } + return true; + } + }; + + /** + * Return a complete snapshot for the given path if there's visible write data at that path, else null. + * No server data is considered. + * + * @param {!Path} path + * @return {?Node} + */ + getCompleteWriteData(path) { + return this.visibleWrites_.getCompleteNode(path); + }; + + /** + * Given optional, underlying server data, and an optional set of constraints (exclude some sets, include hidden + * writes), attempt to calculate a complete snapshot for the given path + * + * @param {!Path} treePath + * @param {?Node} completeServerCache + * @param {Array.=} writeIdsToExclude An optional set to be excluded + * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false + * @return {?Node} + */ + calcCompleteEventCache(treePath, completeServerCache, writeIdsToExclude, + includeHiddenWrites) { + if (!writeIdsToExclude && !includeHiddenWrites) { + var shadowingNode = this.visibleWrites_.getCompleteNode(treePath); + if (shadowingNode != null) { + return shadowingNode; + } else { + var subMerge = this.visibleWrites_.childCompoundWrite(treePath); + if (subMerge.isEmpty()) { + return completeServerCache; + } else if (completeServerCache == null && !subMerge.hasCompleteWrite(Path.Empty)) { + // We wouldn't have a complete snapshot, since there's no underlying data and no complete shadow + return null; + } else { + var layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; + return subMerge.apply(layeredCache); + } + } + } else { + var merge = this.visibleWrites_.childCompoundWrite(treePath); + if (!includeHiddenWrites && merge.isEmpty()) { + return completeServerCache; + } else { + // If the server cache is null, and we don't have a complete cache, we need to return null + if (!includeHiddenWrites && completeServerCache == null && !merge.hasCompleteWrite(Path.Empty)) { + return null; + } else { + var filter = function(write) { + return (write.visible || includeHiddenWrites) && + (!writeIdsToExclude || !~writeIdsToExclude.indexOf(write.writeId)) && + (write.path.contains(treePath) || treePath.contains(write.path)); + }; + var mergeAtPath = WriteTree.layerTree_(this.allWrites_, filter, treePath); + layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; + return mergeAtPath.apply(layeredCache); + } + } + } + }; + + /** + * With optional, underlying server data, attempt to return a children node of children that we have complete data for. + * Used when creating new views, to pre-fill their complete event children snapshot. + * + * @param {!Path} treePath + * @param {?ChildrenNode} completeServerChildren + * @return {!ChildrenNode} + */ + calcCompleteEventChildren(treePath, completeServerChildren) { + var completeChildren = ChildrenNode.EMPTY_NODE; + var topLevelSet = this.visibleWrites_.getCompleteNode(treePath); + if (topLevelSet) { + if (!topLevelSet.isLeafNode()) { + // we're shadowing everything. Return the children. + topLevelSet.forEachChild(PRIORITY_INDEX, function(childName, childSnap) { + completeChildren = completeChildren.updateImmediateChild(childName, childSnap); + }); + } + return completeChildren; + } else if (completeServerChildren) { + // Layer any children we have on top of this + // We know we don't have a top-level set, so just enumerate existing children + var merge = this.visibleWrites_.childCompoundWrite(treePath); + completeServerChildren.forEachChild(PRIORITY_INDEX, function(childName, childNode) { + var node = merge.childCompoundWrite(new Path(childName)).apply(childNode); + completeChildren = completeChildren.updateImmediateChild(childName, node); + }); + // Add any complete children we have from the set + merge.getCompleteChildren().forEach(function(namedNode) { + completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); + }); + return completeChildren; + } else { + // We don't have anything to layer on top of. Layer on any children we have + // Note that we can return an empty snap if we have a defined delete + merge = this.visibleWrites_.childCompoundWrite(treePath); + merge.getCompleteChildren().forEach(function(namedNode) { + completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); + }); + return completeChildren; + } + }; + + /** + * Given that the underlying server data has updated, determine what, if anything, needs to be + * applied to the event cache. + * + * Possibilities: + * + * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data + * + * 2. Some write is completely shadowing. No events to be raised + * + * 3. Is partially shadowed. Events + * + * Either existingEventSnap or existingServerSnap must exist + * + * @param {!Path} treePath + * @param {!Path} childPath + * @param {?Node} existingEventSnap + * @param {?Node} existingServerSnap + * @return {?Node} + */ + calcEventCacheAfterServerOverwrite(treePath, childPath, existingEventSnap, + existingServerSnap) { + assert(existingEventSnap || existingServerSnap, + 'Either existingEventSnap or existingServerSnap must exist'); + var path = treePath.child(childPath); + if (this.visibleWrites_.hasCompleteWrite(path)) { + // At this point we can probably guarantee that we're in case 2, meaning no events + // May need to check visibility while doing the findRootMostValueAndPath call + return null; + } else { + // No complete shadowing. We're either partially shadowing or not shadowing at all. + var childMerge = this.visibleWrites_.childCompoundWrite(path); + if (childMerge.isEmpty()) { + // We're not shadowing at all. Case 1 + return existingServerSnap.getChild(childPath); + } else { + // This could be more efficient if the serverNode + updates doesn't change the eventSnap + // However this is tricky to find out, since user updates don't necessary change the server + // snap, e.g. priority updates on empty nodes, or deep deletes. Another special case is if the server + // adds nodes, but doesn't change any existing writes. It is therefore not enough to + // only check if the updates change the serverNode. + // Maybe check if the merge tree contains these special cases and only do a full overwrite in that case? + return childMerge.apply(existingServerSnap.getChild(childPath)); + } + } + }; + + /** + * Returns a complete child for a given server snap after applying all user writes or null if there is no + * complete child for this ChildKey. + * + * @param {!Path} treePath + * @param {!string} childKey + * @param {!CacheNode} existingServerSnap + * @return {?Node} + */ + calcCompleteChild(treePath, childKey, existingServerSnap) { + var path = treePath.child(childKey); + var shadowingNode = this.visibleWrites_.getCompleteNode(path); + if (shadowingNode != null) { + return shadowingNode; + } else { + if (existingServerSnap.isCompleteForChild(childKey)) { + var childMerge = this.visibleWrites_.childCompoundWrite(path); + return childMerge.apply(existingServerSnap.getNode().getImmediateChild(childKey)); + } else { + return null; + } + } + }; + + /** + * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at + * a higher path, this will return the child of that write relative to the write and this path. + * Returns null if there is no write at this path. + * + * @param {!Path} path + * @return {?Node} + */ + shadowingWrite(path) { + return this.visibleWrites_.getCompleteNode(path); + }; + + /** + * This method is used when processing child remove events on a query. If we can, we pull in children that were outside + * the window, but may now be in the window. + * + * @param {!Path} treePath + * @param {?Node} completeServerData + * @param {!NamedNode} startPost + * @param {!number} count + * @param {boolean} reverse + * @param {!Index} index + * @return {!Array.} + */ + calcIndexedSlice(treePath, completeServerData, startPost, count, reverse, + index) { + var toIterate; + var merge = this.visibleWrites_.childCompoundWrite(treePath); + var shadowingNode = merge.getCompleteNode(Path.Empty); + if (shadowingNode != null) { + toIterate = shadowingNode; + } else if (completeServerData != null) { + toIterate = merge.apply(completeServerData); + } else { + // no children to iterate on + return []; + } + toIterate = toIterate.withIndex(index); + if (!toIterate.isEmpty() && !toIterate.isLeafNode()) { + var nodes = []; + var cmp = index.getCompare(); + var iter = reverse ? toIterate.getReverseIteratorFrom(startPost, index) : + toIterate.getIteratorFrom(startPost, index); + var next = iter.getNext(); + while (next && nodes.length < count) { + if (cmp(next, startPost) !== 0) { + nodes.push(next); + } + next = iter.getNext(); + } + return nodes; + } else { + return []; + } + }; + + /** + * @param {!WriteRecord} writeRecord + * @param {!Path} path + * @return {boolean} + * @private + */ + recordContainsPath_(writeRecord, path) { + if (writeRecord.snap) { + return writeRecord.path.contains(path); + } else { + // findKey can return undefined, so use !! to coerce to boolean + return !!findKey(writeRecord.children, function(childSnap, childName) { + return writeRecord.path.child(childName).contains(path); + }); + } + }; + + /** + * Re-layer the writes and merges into a tree so we can efficiently calculate event snapshots + * @private + */ + resetTree_() { + this.visibleWrites_ = WriteTree.layerTree_(this.allWrites_, WriteTree.DefaultFilter_, + Path.Empty); + if (this.allWrites_.length > 0) { + this.lastWriteId_ = this.allWrites_[this.allWrites_.length - 1].writeId; + } else { + this.lastWriteId_ = -1; + } + }; + + /** + * The default filter used when constructing the tree. Keep everything that's visible. + * + * @param {!WriteRecord} write + * @return {boolean} + * @private + * @const + */ + static DefaultFilter_ = function(write) { return write.visible; }; + + /** + * Static method. Given an array of WriteRecords, a filter for which ones to include, and a path, construct the tree of + * event data at that path. + * + * @param {!Array.} writes + * @param {!function(!WriteRecord):boolean} filter + * @param {!Path} treeRoot + * @return {!CompoundWrite} + * @private + */ + static layerTree_ = function(writes, filter, treeRoot) { + var compoundWrite = CompoundWrite.Empty; + for (var i = 0; i < writes.length; ++i) { + var write = writes[i]; + // Theory, a later set will either: + // a) abort a relevant transaction, so no need to worry about excluding it from calculating that transaction + // b) not be relevant to a transaction (separate branch), so again will not affect the data for that transaction + if (filter(write)) { + var writePath = write.path; + var relativePath; + if (write.snap) { + if (treeRoot.contains(writePath)) { + relativePath = Path.relativePath(treeRoot, writePath); + compoundWrite = compoundWrite.addWrite(relativePath, write.snap); + } else if (writePath.contains(treeRoot)) { + relativePath = Path.relativePath(writePath, treeRoot); + compoundWrite = compoundWrite.addWrite(Path.Empty, write.snap.getChild(relativePath)); + } else { + // There is no overlap between root path and write path, ignore write + } + } else if (write.children) { + if (treeRoot.contains(writePath)) { + relativePath = Path.relativePath(treeRoot, writePath); + compoundWrite = compoundWrite.addWrites(relativePath, write.children); + } else if (writePath.contains(treeRoot)) { + relativePath = Path.relativePath(writePath, treeRoot); + if (relativePath.isEmpty()) { + compoundWrite = compoundWrite.addWrites(Path.Empty, write.children); + } else { + var child = safeGet(write.children, relativePath.getFront()); + if (child) { + // There exists a child in this node that matches the root path + var deepNode = child.getChild(relativePath.popFront()); + compoundWrite = compoundWrite.addWrite(Path.Empty, deepNode); + } + } + } else { + // There is no overlap between root path and write path, ignore write + } + } else { + throw assertionError('WriteRecord should have .snap or .children'); + } + } + } + return compoundWrite; + }; +} + +/** + * A WriteTreeRef wraps a WriteTree and a path, for convenient access to a particular subtree. All of the methods + * just proxy to the underlying WriteTree. + * + * @param {!Path} path + * @param {!WriteTree} writeTree + * @constructor + */ +export class WriteTreeRef { + /** + * The path to this particular write tree ref. Used for calling methods on writeTree_ while exposing a simpler + * interface to callers. + * + * @type {!Path} + * @private + * @const + */ + treePath_; + + /** + * * A reference to the actual tree of write data. All methods are pass-through to the tree, but with the appropriate + * path prefixed. + * + * This lets us make cheap references to points in the tree for sync points without having to copy and maintain all of + * the data. + * + * @type {!WriteTree} + * @private + * @const + */ + writeTree_; + + constructor(path, writeTree) { + this.treePath_ = path; + this.writeTree_ = writeTree; + }; + /** + * If possible, returns a complete event cache, using the underlying server data if possible. In addition, can be used + * to get a cache that includes hidden writes, and excludes arbitrary writes. Note that customizing the returned node + * can lead to a more expensive calculation. + * + * @param {?Node} completeServerCache + * @param {Array.=} writeIdsToExclude Optional writes to exclude. + * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false + * @return {?Node} + */ + calcCompleteEventCache(completeServerCache, writeIdsToExclude, + includeHiddenWrites) { + return this.writeTree_.calcCompleteEventCache(this.treePath_, completeServerCache, writeIdsToExclude, + includeHiddenWrites); + }; + + /** + * If possible, returns a children node containing all of the complete children we have data for. The returned data is a + * mix of the given server data and write data. + * + * @param {?ChildrenNode} completeServerChildren + * @return {!ChildrenNode} + */ + calcCompleteEventChildren(completeServerChildren) { + return this.writeTree_.calcCompleteEventChildren(this.treePath_, completeServerChildren); + }; + + /** + * Given that either the underlying server data has updated or the outstanding writes have updated, determine what, + * if anything, needs to be applied to the event cache. + * + * Possibilities: + * + * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data + * + * 2. Some write is completely shadowing. No events to be raised + * + * 3. Is partially shadowed. Events should be raised + * + * Either existingEventSnap or existingServerSnap must exist, this is validated via an assert + * + * @param {!Path} path + * @param {?Node} existingEventSnap + * @param {?Node} existingServerSnap + * @return {?Node} + */ + calcEventCacheAfterServerOverwrite(path, existingEventSnap, existingServerSnap) { + return this.writeTree_.calcEventCacheAfterServerOverwrite(this.treePath_, path, existingEventSnap, existingServerSnap); + }; + + /** + * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at + * a higher path, this will return the child of that write relative to the write and this path. + * Returns null if there is no write at this path. + * + * @param {!Path} path + * @return {?Node} + */ + shadowingWrite(path) { + return this.writeTree_.shadowingWrite(this.treePath_.child(path)); + }; + + /** + * This method is used when processing child remove events on a query. If we can, we pull in children that were outside + * the window, but may now be in the window + * + * @param {?Node} completeServerData + * @param {!NamedNode} startPost + * @param {!number} count + * @param {boolean} reverse + * @param {!Index} index + * @return {!Array.} + */ + calcIndexedSlice(completeServerData, startPost, count, reverse, index) { + return this.writeTree_.calcIndexedSlice(this.treePath_, completeServerData, startPost, count, reverse, index); + }; + + /** + * Returns a complete child for a given server snap after applying all user writes or null if there is no + * complete child for this ChildKey. + * + * @param {!string} childKey + * @param {!CacheNode} existingServerCache + * @return {?Node} + */ + calcCompleteChild(childKey, existingServerCache) { + return this.writeTree_.calcCompleteChild(this.treePath_, childKey, existingServerCache); + }; + + /** + * Return a WriteTreeRef for a child. + * + * @param {string} childName + * @return {!WriteTreeRef} + */ + child(childName) { + return new WriteTreeRef(this.treePath_.child(childName), this.writeTree_); + }; +} diff --git a/src/database/core/operation/AckUserWrite.ts b/src/database/core/operation/AckUserWrite.ts new file mode 100644 index 00000000000..cc7da8cde60 --- /dev/null +++ b/src/database/core/operation/AckUserWrite.ts @@ -0,0 +1,42 @@ +import { assert } from "../../../utils/assert"; +import { Path } from "../util/Path"; +import { Operation, OperationSource, OperationType } from './Operation'; +import { ImmutableTree } from '../util/ImmutableTree'; + +export class AckUserWrite implements Operation { + /** @inheritDoc */ + type = OperationType.ACK_USER_WRITE; + + /** @inheritDoc */ + source = OperationSource.User; + + /** + * + * @param {!Path} path + * @param {!ImmutableTree} affectedTree A tree containing true for each affected path. Affected paths can't overlap. + * @param {!boolean} revert + */ + constructor(/**@inheritDoc */ public path: Path, + /**@inheritDoc */ public affectedTree: ImmutableTree, + /**@inheritDoc */ public revert: boolean) { + + } + + /** + * @inheritDoc + */ + operationForChild(childName: string): AckUserWrite { + if (!this.path.isEmpty()) { + assert(this.path.getFront() === childName, 'operationForChild called for unrelated child.'); + return new AckUserWrite(this.path.popFront(), this.affectedTree, this.revert); + } else if (this.affectedTree.value != null) { + assert(this.affectedTree.children.isEmpty(), + 'affectedTree should not have overlapping affected paths.'); + // All child locations are affected as well; just return same operation. + return this; + } else { + const childTree = this.affectedTree.subtree(new Path(childName)); + return new AckUserWrite(Path.Empty, childTree, this.revert); + } + } +} \ No newline at end of file diff --git a/src/database/core/operation/ListenComplete.ts b/src/database/core/operation/ListenComplete.ts new file mode 100644 index 00000000000..0a9ab05858f --- /dev/null +++ b/src/database/core/operation/ListenComplete.ts @@ -0,0 +1,24 @@ +import { Path } from "../util/Path"; +import { Operation, OperationSource, OperationType } from './Operation'; + +/** + * @param {!OperationSource} source + * @param {!Path} path + * @constructor + * @implements {Operation} + */ +export class ListenComplete implements Operation { + /** @inheritDoc */ + type = OperationType.LISTEN_COMPLETE; + + constructor(public source: OperationSource, public path: Path) { + } + + operationForChild(childName: string): ListenComplete { + if (this.path.isEmpty()) { + return new ListenComplete(this.source, Path.Empty); + } else { + return new ListenComplete(this.source, this.path.popFront()); + } + } +} diff --git a/src/database/core/operation/Merge.ts b/src/database/core/operation/Merge.ts new file mode 100644 index 00000000000..9d1578a6785 --- /dev/null +++ b/src/database/core/operation/Merge.ts @@ -0,0 +1,52 @@ +import { Operation, OperationSource, OperationType } from './Operation'; +import { Overwrite } from "./Overwrite"; +import { Path } from "../util/Path"; +import { assert } from "../../../utils/assert"; +import { ImmutableTree } from '../util/ImmutableTree'; + +/** + * @param {!OperationSource} source + * @param {!Path} path + * @param {!ImmutableTree.} children + * @constructor + * @implements {Operation} + */ +export class Merge implements Operation { + /** @inheritDoc */ + type = OperationType.MERGE; + + constructor(/**@inheritDoc */ public source: OperationSource, + /**@inheritDoc */ public path: Path, + /**@inheritDoc */ public children: ImmutableTree) { + } + + /** + * @inheritDoc + */ + operationForChild(childName: string): Operation { + if (this.path.isEmpty()) { + const childTree = this.children.subtree(new Path(childName)); + if (childTree.isEmpty()) { + // This child is unaffected + return null; + } else if (childTree.value) { + // We have a snapshot for the child in question. This becomes an overwrite of the child. + return new Overwrite(this.source, Path.Empty, childTree.value); + } else { + // This is a merge at a deeper level + return new Merge(this.source, Path.Empty, childTree); + } + } else { + assert(this.path.getFront() === childName, + 'Can\'t get a merge for a child not on the path of the operation'); + return new Merge(this.source, this.path.popFront(), this.children); + } + } + + /** + * @inheritDoc + */ + toString(): string { + return 'Operation(' + this.path + ': ' + this.source.toString() + ' merge: ' + this.children.toString() + ')'; + } +} \ No newline at end of file diff --git a/src/database/core/operation/Operation.ts b/src/database/core/operation/Operation.ts new file mode 100644 index 00000000000..090763b634c --- /dev/null +++ b/src/database/core/operation/Operation.ts @@ -0,0 +1,74 @@ +import { assert } from "../../../utils/assert"; +import { Path } from '../util/Path'; + +/** + * + * @enum + */ +export enum OperationType { + OVERWRITE, + MERGE, + ACK_USER_WRITE, + LISTEN_COMPLETE +} + +/** + * @interface + */ +export interface Operation { + /** + * @type {!OperationSource} + */ + source: OperationSource; + + /** + * @type {!OperationType} + */ + type: OperationType; + + /** + * @type {!Path} + */ + path: Path; + + /** + * @param {string} childName + * @return {?Operation} + */ + operationForChild(childName: string): Operation | null; +} + +/** + * @param {boolean} fromUser + * @param {boolean} fromServer + * @param {?string} queryId + * @param {boolean} tagged + * @constructor + */ +export class OperationSource { + constructor(public fromUser: boolean, + public fromServer: boolean, + public queryId: string | null, + public tagged: boolean) { + assert(!tagged || fromServer, 'Tagged queries must be from server.'); + } + /** + * @const + * @type {!OperationSource} + */ + static User = new OperationSource(/*fromUser=*/true, false, null, /*tagged=*/false); + + /** + * @const + * @type {!OperationSource} + */ + static Server = new OperationSource(false, /*fromServer=*/true, null, /*tagged=*/false); + + /** + * @param {string} queryId + * @return {!OperationSource} + */ + static forServerTaggedQuery = function(queryId) { + return new OperationSource(false, /*fromServer=*/true, queryId, /*tagged=*/true); + }; +} \ No newline at end of file diff --git a/src/database/core/operation/Overwrite.ts b/src/database/core/operation/Overwrite.ts new file mode 100644 index 00000000000..1af32ce1d26 --- /dev/null +++ b/src/database/core/operation/Overwrite.ts @@ -0,0 +1,29 @@ +import { Operation, OperationSource, OperationType } from './Operation'; +import { Path } from "../util/Path"; +import { Node } from '../snap/Node'; + +/** + * @param {!OperationSource} source + * @param {!Path} path + * @param {!Node} snap + * @constructor + * @implements {Operation} + */ +export class Overwrite implements Operation { + /** @inheritDoc */ + type = OperationType.OVERWRITE; + + constructor(public source: OperationSource, + public path: Path, + public snap: Node) { + } + + operationForChild(childName: string): Overwrite { + if (this.path.isEmpty()) { + return new Overwrite(this.source, Path.Empty, + this.snap.getImmediateChild(childName)); + } else { + return new Overwrite(this.source, this.path.popFront(), this.snap); + } + } +} \ No newline at end of file diff --git a/src/database/core/snap/ChildrenNode.ts b/src/database/core/snap/ChildrenNode.ts new file mode 100644 index 00000000000..18e8e79b27a --- /dev/null +++ b/src/database/core/snap/ChildrenNode.ts @@ -0,0 +1,539 @@ +import { assert } from "../../../utils/assert"; +import { + sha1, + MAX_NAME, + MIN_NAME +} from "../util/util"; +import { SortedMap } from "../util/SortedMap"; +import { Node, NamedNode } from "./Node"; +import { + validatePriorityNode, + priorityHashText, + setMaxNode +} from "./snap"; +import { PRIORITY_INDEX, setMaxNode as setPriorityMaxNode } from "./indexes/PriorityIndex"; +import { KEY_INDEX, KeyIndex } from "./indexes/KeyIndex"; +import { IndexMap } from "./IndexMap"; +import { LeafNode } from "./LeafNode"; +import { NAME_COMPARATOR } from "./comparators"; +import "./indexes/Index"; + +// TODO: For memory savings, don't store priorityNode_ if it's empty. + +let EMPTY_NODE; + +/** + * ChildrenNode is a class for storing internal nodes in a DataSnapshot + * (i.e. nodes with children). It implements Node and stores the + * list of children in the children property, sorted by child name. + * + * @constructor + * @implements {Node} + * @param {!SortedMap.} children List of children + * of this node.. + * @param {?Node} priorityNode The priority of this node (as a snapshot node). + * @param {!IndexMap} indexMap + */ +export class ChildrenNode implements Node { + children_; + priorityNode_; + indexMap_; + lazyHash_; + + static get EMPTY_NODE() { + return EMPTY_NODE || (EMPTY_NODE = new ChildrenNode(new SortedMap(NAME_COMPARATOR), null, IndexMap.Default)); + } + + constructor(children, priorityNode, indexMap) { + /** + * @private + * @const + * @type {!SortedMap.} + */ + this.children_ = children; + + /** + * Note: The only reason we allow null priority is to for EMPTY_NODE, since we can't use + * EMPTY_NODE as the priority of EMPTY_NODE. We might want to consider making EMPTY_NODE its own + * class instead of an empty ChildrenNode. + * + * @private + * @const + * @type {?Node} + */ + this.priorityNode_ = priorityNode; + if (this.priorityNode_) { + validatePriorityNode(this.priorityNode_); + } + + if (children.isEmpty()) { + assert(!this.priorityNode_ || this.priorityNode_.isEmpty(), 'An empty node cannot have a priority'); + } + + /** + * + * @type {!IndexMap} + * @private + */ + this.indexMap_ = indexMap; + + /** + * + * @type {?string} + * @private + */ + this.lazyHash_ = null; + }; + + /** @inheritDoc */ + isLeafNode() { + return false; + }; + + /** @inheritDoc */ + getPriority() { + return this.priorityNode_ || EMPTY_NODE; + }; + + /** @inheritDoc */ + updatePriority(newPriorityNode) { + if (this.children_.isEmpty()) { + // Don't allow priorities on empty nodes + return this; + } else { + return new ChildrenNode(this.children_, newPriorityNode, this.indexMap_); + } + }; + + /** @inheritDoc */ + getImmediateChild(childName) { + // Hack to treat priority as a regular child + if (childName === '.priority') { + return this.getPriority(); + } else { + var child = this.children_.get(childName); + return child === null ? EMPTY_NODE : child; + } + }; + + /** @inheritDoc */ + getChild(path) { + var front = path.getFront(); + if (front === null) + return this; + + return this.getImmediateChild(front).getChild(path.popFront()); + }; + + /** @inheritDoc */ + hasChild(childName) { + return this.children_.get(childName) !== null; + }; + + /** @inheritDoc */ + updateImmediateChild(childName, newChildNode) { + assert(newChildNode, 'We should always be passing snapshot nodes'); + if (childName === '.priority') { + return this.updatePriority(newChildNode); + } else { + var namedNode = new NamedNode(childName, newChildNode); + var newChildren, newIndexMap, newPriority; + if (newChildNode.isEmpty()) { + newChildren = this.children_.remove(childName); + newIndexMap = this.indexMap_.removeFromIndexes(namedNode, this.children_ + ); + } else { + newChildren = this.children_.insert(childName, newChildNode); + newIndexMap = this.indexMap_.addToIndexes(namedNode, this.children_); + } + + newPriority = newChildren.isEmpty() ? EMPTY_NODE : this.priorityNode_; + return new ChildrenNode(newChildren, newPriority, newIndexMap); + } + }; + + /** @inheritDoc */ + updateChild(path, newChildNode) { + var front = path.getFront(); + if (front === null) { + return newChildNode; + } else { + assert(path.getFront() !== '.priority' || path.getLength() === 1, + '.priority must be the last token in a path'); + var newImmediateChild = this.getImmediateChild(front). + updateChild(path.popFront(), newChildNode); + return this.updateImmediateChild(front, newImmediateChild); + } + }; + + /** @inheritDoc */ + isEmpty() { + return this.children_.isEmpty(); + }; + + /** @inheritDoc */ + numChildren() { + return this.children_.count(); + }; + + /** + * @private + * @type {RegExp} + */ + static INTEGER_REGEXP_ = /^(0|[1-9]\d*)$/; + + /** @inheritDoc */ + val(opt_exportFormat) { + if (this.isEmpty()) + return null; + + var obj = { }; + var numKeys = 0, maxKey = 0, allIntegerKeys = true; + this.forEachChild(PRIORITY_INDEX, function(key, childNode) { + obj[key] = childNode.val(opt_exportFormat); + + numKeys++; + if (allIntegerKeys && ChildrenNode.INTEGER_REGEXP_.test(key)) { + maxKey = Math.max(maxKey, Number(key)); + } else { + allIntegerKeys = false; + } + }); + + if (!opt_exportFormat && allIntegerKeys && maxKey < 2 * numKeys) { + // convert to array. + var array = []; + for (var key in obj) + array[key] = obj[key]; + + return array; + } else { + if (opt_exportFormat && !this.getPriority().isEmpty()) { + obj['.priority'] = this.getPriority().val(); + } + return obj; + } + }; + + + /** @inheritDoc */ + hash() { + if (this.lazyHash_ === null) { + var toHash = ''; + if (!this.getPriority().isEmpty()) + toHash += 'priority:' + priorityHashText( + /**@type {(!string|!number)} */ (this.getPriority().val())) + ':'; + + this.forEachChild(PRIORITY_INDEX, function(key, childNode) { + var childHash = childNode.hash(); + if (childHash !== '') + toHash += ':' + key + ':' + childHash; + }); + + this.lazyHash_ = (toHash === '') ? '' : sha1(toHash); + } + return this.lazyHash_; + }; + + + /** @inheritDoc */ + getPredecessorChildName(childName, childNode, index) { + var idx = this.resolveIndex_(index); + if (idx) { + var predecessor = idx.getPredecessorKey(new NamedNode(childName, childNode)); + return predecessor ? predecessor.name : null; + } else { + return this.children_.getPredecessorKey(childName); + } + }; + + /** + * @param {!fb.core.snap.Index} indexDefinition + * @return {?string} + */ + getFirstChildName(indexDefinition) { + var idx = this.resolveIndex_(indexDefinition); + if (idx) { + var minKey = idx.minKey(); + return minKey && minKey.name; + } else { + return this.children_.minKey(); + } + }; + + /** + * @param {!fb.core.snap.Index} indexDefinition + * @return {?NamedNode} + */ + getFirstChild(indexDefinition) { + var minKey = this.getFirstChildName(indexDefinition); + if (minKey) { + return new NamedNode(minKey, this.children_.get(minKey)); + } else { + return null; + } + }; + + /** + * Given an index, return the key name of the largest value we have, according to that index + * @param {!fb.core.snap.Index} indexDefinition + * @return {?string} + */ + getLastChildName(indexDefinition) { + var idx = this.resolveIndex_(indexDefinition); + if (idx) { + var maxKey = idx.maxKey(); + return maxKey && maxKey.name; + } else { + return this.children_.maxKey(); + } + }; + + /** + * @param {!fb.core.snap.Index} indexDefinition + * @return {?NamedNode} + */ + getLastChild(indexDefinition) { + var maxKey = this.getLastChildName(indexDefinition); + if (maxKey) { + return new NamedNode(maxKey, this.children_.get(maxKey)); + } else { + return null; + } + }; + + + /** + * @inheritDoc + */ + forEachChild(index, action) { + var idx = this.resolveIndex_(index); + if (idx) { + return idx.inorderTraversal(function(wrappedNode) { + return action(wrappedNode.name, wrappedNode.node); + }); + } else { + return this.children_.inorderTraversal(action); + } + }; + + /** + * @param {!fb.core.snap.Index} indexDefinition + * @return {SortedMapIterator} + */ + getIterator(indexDefinition) { + return this.getIteratorFrom(indexDefinition.minPost(), indexDefinition); + }; + + /** + * + * @param {!NamedNode} startPost + * @param {!fb.core.snap.Index} indexDefinition + * @return {!SortedMapIterator} + */ + getIteratorFrom(startPost, indexDefinition) { + var idx = this.resolveIndex_(indexDefinition); + if (idx) { + return idx.getIteratorFrom(startPost, function(key) { return key; }); + } else { + var iterator = this.children_.getIteratorFrom(startPost.name, NamedNode.Wrap); + var next = iterator.peek(); + while (next != null && indexDefinition.compare(next, startPost) < 0) { + iterator.getNext(); + next = iterator.peek(); + } + return iterator; + } + }; + + /** + * @param {!fb.core.snap.Index} indexDefinition + * @return {!SortedMapIterator} + */ + getReverseIterator(indexDefinition) { + return this.getReverseIteratorFrom(indexDefinition.maxPost(), indexDefinition); + }; + + /** + * @param {!NamedNode} endPost + * @param {!fb.core.snap.Index} indexDefinition + * @return {!SortedMapIterator} + */ + getReverseIteratorFrom(endPost, indexDefinition) { + var idx = this.resolveIndex_(indexDefinition); + if (idx) { + return idx.getReverseIteratorFrom(endPost, function(key) { return key; }); + } else { + var iterator = this.children_.getReverseIteratorFrom(endPost.name, NamedNode.Wrap); + var next = iterator.peek(); + while (next != null && indexDefinition.compare(next, endPost) > 0) { + iterator.getNext(); + next = iterator.peek(); + } + return iterator; + } + }; + + /** + * @inheritDoc + */ + compareTo(other) { + if (this.isEmpty()) { + if (other.isEmpty()) { + return 0; + } else { + return -1; + } + } else if (other.isLeafNode() || other.isEmpty()) { + return 1; + } else if (other === MAX_NODE) { + return -1; + } else { + // Must be another node with children. + return 0; + } + }; + + /** + * @inheritDoc + */ + withIndex(indexDefinition) { + if (indexDefinition === KEY_INDEX || this.indexMap_.hasIndex(indexDefinition)) { + return this; + } else { + var newIndexMap = this.indexMap_.addIndex(indexDefinition, this.children_); + return new ChildrenNode(this.children_, this.priorityNode_, newIndexMap); + } + }; + + /** + * @inheritDoc + */ + isIndexed(index) { + return index === KEY_INDEX || this.indexMap_.hasIndex(index); + }; + + /** + * @inheritDoc + */ + equals(other) { + if (other === this) { + return true; + } + else if (other.isLeafNode()) { + return false; + } else { + var otherChildrenNode = /** @type {!ChildrenNode} */ (other); + if (!this.getPriority().equals(otherChildrenNode.getPriority())) { + return false; + } else if (this.children_.count() === otherChildrenNode.children_.count()) { + var thisIter = this.getIterator(PRIORITY_INDEX); + var otherIter = otherChildrenNode.getIterator(PRIORITY_INDEX); + var thisCurrent = thisIter.getNext(); + var otherCurrent = otherIter.getNext(); + while (thisCurrent && otherCurrent) { + if (thisCurrent.name !== otherCurrent.name || !thisCurrent.node.equals(otherCurrent.node)) { + return false; + } + thisCurrent = thisIter.getNext(); + otherCurrent = otherIter.getNext(); + } + return thisCurrent === null && otherCurrent === null; + } else { + return false; + } + } + }; + + + /** + * Returns a SortedMap ordered by index, or null if the default (by-key) ordering can be used + * instead. + * + * @private + * @param {!fb.core.snap.Index} indexDefinition + * @return {?SortedMap.} + */ + resolveIndex_(indexDefinition) { + if (indexDefinition === KEY_INDEX) { + return null; + } else { + return this.indexMap_.get(indexDefinition.toString()); + } + }; + +} + +/** + * @constructor + * @extends {ChildrenNode} + * @private + */ +export class MaxNode extends ChildrenNode { + constructor() { + super(new SortedMap(NAME_COMPARATOR), ChildrenNode.EMPTY_NODE, IndexMap.Default); + } + + compareTo(other) { + if (other === this) { + return 0; + } else { + return 1; + } + }; + + + equals(other) { + // Not that we every compare it, but MAX_NODE is only ever equal to itself + return other === this; + }; + + + getPriority() { + return this; + }; + + + getImmediateChild(childName) { + return ChildrenNode.EMPTY_NODE; + }; + + + isEmpty() { + return false; + }; +} + +/** + * Marker that will sort higher than any other snapshot. + * @type {!MAX_NODE} + * @const + */ +export const MAX_NODE = new MaxNode(); + +/** + * Document NamedNode extensions + */ +declare module './Node' { + interface NamedNode { + MIN: NamedNode, + MAX: NamedNode + } +} + +Object.defineProperties(NamedNode, { + MIN: { + value: new NamedNode(MIN_NAME, ChildrenNode.EMPTY_NODE) + }, + MAX: { + value: new NamedNode(MAX_NAME, MAX_NODE) + } +}); + +/** + * Reference Extensions + */ +KeyIndex.__EMPTY_NODE = ChildrenNode.EMPTY_NODE; +LeafNode.__childrenNodeConstructor = ChildrenNode; +setMaxNode(MAX_NODE); +setPriorityMaxNode(MAX_NODE); \ No newline at end of file diff --git a/src/database/core/snap/IndexMap.ts b/src/database/core/snap/IndexMap.ts new file mode 100644 index 00000000000..9d5eb731d06 --- /dev/null +++ b/src/database/core/snap/IndexMap.ts @@ -0,0 +1,162 @@ +import { assert } from "../../../utils/assert"; +import { buildChildSet } from "./childSet"; +import { contains, clone, map, safeGet } from "../../../utils/obj"; +import { NamedNode } from "./Node"; +import { PRIORITY_INDEX } from "./indexes/PriorityIndex"; +import { KEY_INDEX } from "./indexes/KeyIndex"; +let _defaultIndexMap; + +const fallbackObject = {}; + +/** + * + * @param {Object.>} indexes + * @param {Object.} indexSet + * @constructor + */ +export class IndexMap { + indexes_; + indexSet_; + + /** + * The default IndexMap for nodes without a priority + * @type {!IndexMap} + * @const + */ + static get Default() { + assert(fallbackObject && PRIORITY_INDEX, 'ChildrenNode.ts has not been loaded'); + _defaultIndexMap = _defaultIndexMap || new IndexMap( + { '.priority': fallbackObject }, + { '.priority': PRIORITY_INDEX } + ); + return _defaultIndexMap; + } + + constructor(indexes, indexSet) { + this.indexes_ = indexes; + this.indexSet_ = indexSet; + } + /** + * + * @param {!string} indexKey + * @return {?SortedMap.} + */ + get(indexKey) { + var sortedMap = safeGet(this.indexes_, indexKey); + if (!sortedMap) throw new Error('No index defined for ' + indexKey); + + if (sortedMap === fallbackObject) { + // The index exists, but it falls back to just name comparison. Return null so that the calling code uses the + // regular child map + return null; + } else { + return sortedMap; + } + }; + + /** + * @param {!Index} indexDefinition + * @return {boolean} + */ + hasIndex(indexDefinition) { + return contains(this.indexSet_, indexDefinition.toString()); + }; + + /** + * @param {!Index} indexDefinition + * @param {!SortedMap.} existingChildren + * @return {!IndexMap} + */ + addIndex(indexDefinition, existingChildren) { + assert(indexDefinition !== KEY_INDEX, + "KeyIndex always exists and isn't meant to be added to the IndexMap."); + var childList = []; + var sawIndexedValue = false; + var iter = existingChildren.getIterator(NamedNode.Wrap); + var next = iter.getNext(); + while (next) { + sawIndexedValue = sawIndexedValue || indexDefinition.isDefinedOn(next.node); + childList.push(next); + next = iter.getNext(); + } + var newIndex; + if (sawIndexedValue) { + newIndex = buildChildSet(childList, indexDefinition.getCompare()); + } else { + newIndex = fallbackObject; + } + var indexName = indexDefinition.toString(); + var newIndexSet = clone(this.indexSet_); + newIndexSet[indexName] = indexDefinition; + var newIndexes = clone(this.indexes_); + newIndexes[indexName] = newIndex; + return new IndexMap(newIndexes, newIndexSet); + }; + + + /** + * Ensure that this node is properly tracked in any indexes that we're maintaining + * @param {!NamedNode} namedNode + * @param {!SortedMap.} existingChildren + * @return {!IndexMap} + */ + addToIndexes(namedNode, existingChildren) { + var self = this; + var newIndexes = map(this.indexes_, function(indexedChildren, indexName) { + var index = safeGet(self.indexSet_, indexName); + assert(index, 'Missing index implementation for ' + indexName); + if (indexedChildren === fallbackObject) { + // Check to see if we need to index everything + if (index.isDefinedOn(namedNode.node)) { + // We need to build this index + var childList = []; + var iter = existingChildren.getIterator(NamedNode.Wrap); + var next = iter.getNext(); + while (next) { + if (next.name != namedNode.name) { + childList.push(next); + } + next = iter.getNext(); + } + childList.push(namedNode); + return buildChildSet(childList, index.getCompare()); + } else { + // No change, this remains a fallback + return fallbackObject; + } + } else { + var existingSnap = existingChildren.get(namedNode.name); + var newChildren = indexedChildren; + if (existingSnap) { + newChildren = newChildren.remove(new NamedNode(namedNode.name, existingSnap)); + } + return newChildren.insert(namedNode, namedNode.node); + } + }); + return new IndexMap(newIndexes, this.indexSet_); + }; + + /** + * Create a new IndexMap instance with the given value removed + * @param {!NamedNode} namedNode + * @param {!SortedMap.} existingChildren + * @return {!IndexMap} + */ + removeFromIndexes(namedNode, existingChildren) { + var newIndexes = map(this.indexes_, function(indexedChildren) { + if (indexedChildren === fallbackObject) { + // This is the fallback. Just return it, nothing to do in this case + return indexedChildren; + } else { + var existingSnap = existingChildren.get(namedNode.name); + if (existingSnap) { + return indexedChildren.remove(new NamedNode(namedNode.name, existingSnap)); + } else { + // No record of this child + return indexedChildren; + } + } + }); + return new IndexMap(newIndexes, this.indexSet_); + }; +} diff --git a/src/database/core/snap/LeafNode.ts b/src/database/core/snap/LeafNode.ts new file mode 100644 index 00000000000..47adae5d179 --- /dev/null +++ b/src/database/core/snap/LeafNode.ts @@ -0,0 +1,271 @@ +import { assert } from '../../../utils/assert' +import { + doubleToIEEE754String, + sha1 +} from "../util/util"; +import { + priorityHashText, + validatePriorityNode +} from "./snap"; +import { Node } from "./Node"; + +let __childrenNodeConstructor; + +/** + * LeafNode is a class for storing leaf nodes in a DataSnapshot. It + * implements Node and stores the value of the node (a string, + * number, or boolean) accessible via getValue(). + */ +export class LeafNode implements Node { + static set __childrenNodeConstructor(val) { + __childrenNodeConstructor = val; + } + static get __childrenNodeConstructor() { + return __childrenNodeConstructor; + } + /** + * The sort order for comparing leaf nodes of different types. If two leaf nodes have + * the same type, the comparison falls back to their value + * @type {Array.} + * @const + */ + static VALUE_TYPE_ORDER = ['object', 'boolean', 'number', 'string']; + + value_; + priorityNode_; + lazyHash_; + /** + * @implements {Node} + * @param {!(string|number|boolean|Object)} value The value to store in this leaf node. + * The object type is possible in the event of a deferred value + * @param {!Node=} opt_priorityNode The priority of this node. + */ + constructor(value, opt_priorityNode?) { + /** + * @private + * @const + * @type {!(string|number|boolean|Object)} + */ + this.value_ = value; + assert(this.value_ !== undefined && this.value_ !== null, + "LeafNode shouldn't be created with null/undefined value."); + + /** + * @private + * @const + * @type {!Node} + */ + this.priorityNode_ = opt_priorityNode || LeafNode.__childrenNodeConstructor.EMPTY_NODE; + validatePriorityNode(this.priorityNode_); + + this.lazyHash_ = null; + } + + /** @inheritDoc */ + isLeafNode() { + return true; + } + + /** @inheritDoc */ + getPriority() { + return this.priorityNode_; + } + + /** @inheritDoc */ + updatePriority(newPriorityNode) { + return new LeafNode(this.value_, newPriorityNode); + } + + /** @inheritDoc */ + getImmediateChild(childName) { + // Hack to treat priority as a regular child + if (childName === '.priority') { + return this.priorityNode_; + } else { + return LeafNode.__childrenNodeConstructor.EMPTY_NODE; + } + } + + /** @inheritDoc */ + getChild(path) { + if (path.isEmpty()) { + return this; + } else if (path.getFront() === '.priority') { + return this.priorityNode_; + } else { + return LeafNode.__childrenNodeConstructor.EMPTY_NODE; + } + } + + /** + * @inheritDoc + */ + hasChild() { + return false; + } + + /** @inheritDoc */ + getPredecessorChildName(childName, childNode) { + return null; + } + + /** @inheritDoc */ + updateImmediateChild(childName, newChildNode) { + if (childName === '.priority') { + return this.updatePriority(newChildNode); + } else if (newChildNode.isEmpty() && childName !== '.priority') { + return this; + } else { + return LeafNode.__childrenNodeConstructor.EMPTY_NODE + .updateImmediateChild(childName, newChildNode) + .updatePriority(this.priorityNode_); + } + } + + /** @inheritDoc */ + updateChild(path, newChildNode) { + var front = path.getFront(); + if (front === null) { + return newChildNode; + } else if (newChildNode.isEmpty() && front !== '.priority') { + return this; + } else { + assert(front !== '.priority' || path.getLength() === 1, + '.priority must be the last token in a path'); + + return this.updateImmediateChild(front, LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateChild(path.popFront(), newChildNode)); + } + } + + /** @inheritDoc */ + isEmpty() { + return false; + } + + /** @inheritDoc */ + numChildren() { + return 0; + } + + /** @inheritDoc */ + forEachChild(index, action) { + return false; + } + + /** + * @inheritDoc + */ + val(opt_exportFormat) { + if (opt_exportFormat && !this.getPriority().isEmpty()) + return { '.value': this.getValue(), '.priority' : this.getPriority().val() }; + else + return this.getValue(); + } + + /** @inheritDoc */ + hash() { + if (this.lazyHash_ === null) { + var toHash = ''; + if (!this.priorityNode_.isEmpty()) + toHash += 'priority:' + priorityHashText( + /** @type {(number|string)} */ (this.priorityNode_.val())) + ':'; + + var type = typeof this.value_; + toHash += type + ':'; + if (type === 'number') { + toHash += doubleToIEEE754String(/** @type {number} */ (this.value_)); + } else { + toHash += this.value_; + } + this.lazyHash_ = sha1(toHash); + } + return /**@type {!string} */ (this.lazyHash_); + } + + /** + * Returns the value of the leaf node. + * @return {Object|string|number|boolean} The value of the node. + */ + getValue() { + return this.value_; + } + + /** + * @inheritDoc + */ + compareTo(other) { + if (other === LeafNode.__childrenNodeConstructor.EMPTY_NODE) { + return 1; + } else if (other instanceof LeafNode.__childrenNodeConstructor) { + return -1; + } else { + assert(other.isLeafNode(), 'Unknown node type'); + return this.compareToLeafNode_(/** @type {!LeafNode} */ (other)); + } + } + + /** + * Comparison specifically for two leaf nodes + * @param {!LeafNode} otherLeaf + * @return {!number} + * @private + */ + compareToLeafNode_(otherLeaf) { + var otherLeafType = typeof otherLeaf.value_; + var thisLeafType = typeof this.value_; + var otherIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(otherLeafType); + var thisIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(thisLeafType); + assert(otherIndex >= 0, 'Unknown leaf type: ' + otherLeafType); + assert(thisIndex >= 0, 'Unknown leaf type: ' + thisLeafType); + if (otherIndex === thisIndex) { + // Same type, compare values + if (thisLeafType === 'object') { + // Deferred value nodes are all equal, but we should also never get to this point... + return 0; + } else { + // Note that this works because true > false, all others are number or string comparisons + if (this.value_ < otherLeaf.value_) { + return -1; + } else if (this.value_ === otherLeaf.value_) { + return 0; + } else { + return 1; + } + } + } else { + return thisIndex - otherIndex; + } + } + + /** + * @inheritDoc + */ + withIndex() { + return this; + } + + /** + * @inheritDoc + */ + isIndexed() { + return true; + } + + /** + * @inheritDoc + */ + equals(other) { + /** + * @inheritDoc + */ + if (other === this) { + return true; + } + else if (other.isLeafNode()) { + var otherLeaf = /** @type {!LeafNode} */ (other); + return this.value_ === otherLeaf.value_ && this.priorityNode_.equals(otherLeaf.priorityNode_); + } else { + return false; + } + } +}; // end LeafNode \ No newline at end of file diff --git a/src/database/core/snap/Node.ts b/src/database/core/snap/Node.ts new file mode 100644 index 00000000000..d6b40b5a090 --- /dev/null +++ b/src/database/core/snap/Node.ts @@ -0,0 +1,161 @@ +import { Path } from "../util/Path"; +import { Index } from "./indexes/Index"; + +/** + * Node is an interface defining the common functionality for nodes in + * a DataSnapshot. + * + * @interface + */ +export interface Node { + /** + * Whether this node is a leaf node. + * @return {boolean} Whether this is a leaf node. + */ + isLeafNode(): boolean; + + + /** + * Gets the priority of the node. + * @return {!Node} The priority of the node. + */ + getPriority(): Node; + + + /** + * Returns a duplicate node with the new priority. + * @param {!Node} newPriorityNode New priority to set for the node. + * @return {!Node} Node with new priority. + */ + updatePriority(newPriorityNode: Node): Node; + + + /** + * Returns the specified immediate child, or null if it doesn't exist. + * @param {string} childName The name of the child to retrieve. + * @return {!Node} The retrieved child, or an empty node. + */ + getImmediateChild(childName: string): Node; + + + /** + * Returns a child by path, or null if it doesn't exist. + * @param {!Path} path The path of the child to retrieve. + * @return {!Node} The retrieved child or an empty node. + */ + getChild(path: Path): Node; + + + /** + * Returns the name of the child immediately prior to the specified childNode, or null. + * @param {!string} childName The name of the child to find the predecessor of. + * @param {!Node} childNode The node to find the predecessor of. + * @param {!Index} index The index to use to determine the predecessor + * @return {?string} The name of the predecessor child, or null if childNode is the first child. + */ + getPredecessorChildName(childName: String, childNode: Node, index: Index): string; + + /** + * Returns a duplicate node, with the specified immediate child updated. + * Any value in the node will be removed. + * @param {string} childName The name of the child to update. + * @param {!Node} newChildNode The new child node + * @return {!Node} The updated node. + */ + updateImmediateChild(childName: string, newChildNode: Node): Node; + + + /** + * Returns a duplicate node, with the specified child updated. Any value will + * be removed. + * @param {!Path} path The path of the child to update. + * @param {!Node} newChildNode The new child node, which may be an empty node + * @return {!Node} The updated node. + */ + updateChild(path: Path, newChildNode: Node): Node; + + /** + * True if the immediate child specified exists + * @param {!string} childName + * @return {boolean} + */ + hasChild(childName: string): boolean; + + /** + * @return {boolean} True if this node has no value or children. + */ + isEmpty(): boolean; + + + /** + * @return {number} The number of children of this node. + */ + numChildren(): number; + + + /** + * Calls action for each child. + * @param {!Index} index + * @param {function(string, !Node)} action Action to be called for + * each child. It's passed the child name and the child node. + * @return {*} The first truthy value return by action, or the last falsey one + */ + forEachChild(index: Index, action: (string, node) => any): any; + + /** + * @param {boolean=} opt_exportFormat True for export format (also wire protocol format). + * @return {*} Value of this node as JSON. + */ + val(exportFormat?: boolean): Object; + + /** + * @return {string} hash representing the node contents. + */ + hash(): string; + + /** + * @param {!Node} other Another node + * @return {!number} -1 for less than, 0 for equal, 1 for greater than other + */ + compareTo(other: Node): number; + + /** + * @param {!Node} other + * @return {boolean} Whether or not this snapshot equals other + */ + equals(other: Node): boolean; + + /** + * @param {!Index} indexDefinition + * @return {!Node} This node, with the specified index now available + */ + withIndex(indexDefinition: Index): Node; + + /** + * @param {!Index} indexDefinition + * @return {boolean} + */ + isIndexed(indexDefinition: Index): boolean; +} + +/** + * + * @param {!string} name + * @param {!Node} node + * @constructor + * @struct + */ +export class NamedNode { + constructor(public name: string, public node: Node) {} + + /** + * + * @param {!string} name + * @param {!Node} node + * @return {NamedNode} + */ + static Wrap(name: string, node: Node) { + return new NamedNode(name, node); + } +} + diff --git a/src/database/core/snap/childSet.ts b/src/database/core/snap/childSet.ts new file mode 100644 index 00000000000..e62fa02b79f --- /dev/null +++ b/src/database/core/snap/childSet.ts @@ -0,0 +1,119 @@ +import { LLRBNode } from "../util/SortedMap"; +import { SortedMap } from "../util/SortedMap"; + +const LOG_2 = Math.log(2); + +/** + * @param {number} length + * @constructor + */ +class Base12Num { + count; + current_; + bits_; + + constructor(length) { + var logBase2 = function(num) { + return parseInt((Math.log(num) / LOG_2 as any), 10); + }; + var bitMask = function(bits) { + return parseInt(Array(bits + 1).join('1'), 2); + }; + this.count = logBase2(length + 1); + this.current_ = this.count - 1; + var mask = bitMask(this.count); + this.bits_ = (length + 1) & mask; + } + + /** + * @return {boolean} + */ + nextBitIsOne() { + //noinspection JSBitwiseOperatorUsage + var result = !(this.bits_ & (0x1 << this.current_)); + this.current_--; + return result; + }; +} + +/** + * Takes a list of child nodes and constructs a SortedSet using the given comparison + * function + * + * Uses the algorithm described in the paper linked here: + * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.46.1458 + * + * @template K, V + * @param {Array.} childList Unsorted list of children + * @param {function(!NamedNode, !NamedNode):number} cmp The comparison method to be used + * @param {(function(NamedNode):K)=} keyFn An optional function to extract K from a node wrapper, if K's + * type is not NamedNode + * @param {(function(K, K):number)=} mapSortFn An optional override for comparator used by the generated sorted map + * @return {SortedMap.} + */ +export const buildChildSet = function(childList, cmp, keyFn?, mapSortFn?) { + childList.sort(cmp); + + var buildBalancedTree = function(low, high) { + var length = high - low; + if (length == 0) { + return null; + } else if (length == 1) { + var namedNode = childList[low]; + var key = keyFn ? keyFn(namedNode) : namedNode; + return new LLRBNode(key, namedNode.node, LLRBNode.BLACK, null, null); + } else { + var middle = parseInt((length / 2 as any), 10) + low; + var left = buildBalancedTree(low, middle); + var right = buildBalancedTree(middle + 1, high); + namedNode = childList[middle]; + key = keyFn ? keyFn(namedNode) : namedNode; + return new LLRBNode(key, namedNode.node, LLRBNode.BLACK, left, right); + } + }; + + var buildFrom12Array = function(base12) { + var node = null; + var root = null; + var index = childList.length; + + var buildPennant = function(chunkSize, color) { + var low = index - chunkSize; + var high = index; + index -= chunkSize; + var childTree = buildBalancedTree(low + 1, high); + var namedNode = childList[low]; + var key = keyFn ? keyFn(namedNode) : namedNode; + attachPennant(new LLRBNode(key, namedNode.node, color, null, childTree)); + }; + + var attachPennant = function(pennant) { + if (node) { + node.left = pennant; + node = pennant; + } else { + root = pennant; + node = pennant; + } + }; + + for (var i = 0; i < base12.count; ++i) { + var isOne = base12.nextBitIsOne(); + // The number of nodes taken in each slice is 2^(arr.length - (i + 1)) + var chunkSize = Math.pow(2, base12.count - (i + 1)); + if (isOne) { + buildPennant(chunkSize, LLRBNode.BLACK); + } else { + // current == 2 + buildPennant(chunkSize, LLRBNode.BLACK); + buildPennant(chunkSize, LLRBNode.RED); + } + } + return root; + }; + + var base12 = new Base12Num(childList.length); + var root = buildFrom12Array(base12); + + return new SortedMap(mapSortFn || cmp, root); +}; \ No newline at end of file diff --git a/src/database/core/snap/comparators.ts b/src/database/core/snap/comparators.ts new file mode 100644 index 00000000000..89fdbc03d2e --- /dev/null +++ b/src/database/core/snap/comparators.ts @@ -0,0 +1,9 @@ +import { nameCompare } from "../util/util"; + +export function NAME_ONLY_COMPARATOR(left, right) { + return nameCompare(left.name, right.name); +}; + +export function NAME_COMPARATOR(left, right) { + return nameCompare(left, right); +}; diff --git a/src/database/core/snap/indexes/Index.ts b/src/database/core/snap/indexes/Index.ts new file mode 100644 index 00000000000..3df5f420a2b --- /dev/null +++ b/src/database/core/snap/indexes/Index.ts @@ -0,0 +1,73 @@ +import { Node, NamedNode } from "../Node"; +import { MIN_NAME, MAX_NAME } from "../../util/util"; + +/** + * + * @constructor + */ +export abstract class Index { + /** + * @param {!NamedNode} a + * @param {!NamedNode} b + * @return {number} + */ + abstract compare(a: NamedNode, b: NamedNode): number; + + /** + * @param {!Node} node + * @return {boolean} + */ + abstract isDefinedOn(node: Node): boolean; + + + /** + * @return {function(!NamedNode, !NamedNode):number} A standalone comparison function for + * this index + */ + getCompare() { + return this.compare.bind(this); + }; + /** + * Given a before and after value for a node, determine if the indexed value has changed. Even if they are different, + * it's possible that the changes are isolated to parts of the snapshot that are not indexed. + * + * @param {!Node} oldNode + * @param {!Node} newNode + * @return {boolean} True if the portion of the snapshot being indexed changed between oldNode and newNode + */ + indexedValueChanged(oldNode, newNode) { + var oldWrapped = new NamedNode(MIN_NAME, oldNode); + var newWrapped = new NamedNode(MIN_NAME, newNode); + return this.compare(oldWrapped, newWrapped) !== 0; + }; + + + /** + * @return {!NamedNode} a node wrapper that will sort equal to or less than + * any other node wrapper, using this index + */ + minPost() { + return (NamedNode as any).MIN; + }; + + + /** + * @return {!NamedNode} a node wrapper that will sort greater than or equal to + * any other node wrapper, using this index + */ + abstract maxPost(): NamedNode; + + + /** + * @param {*} indexValue + * @param {string} name + * @return {!NamedNode} + */ + abstract makePost(indexValue: object, name: string): NamedNode; + + + /** + * @return {!string} String representation for inclusion in a query spec + */ + abstract toString(): string; +}; diff --git a/src/database/core/snap/indexes/KeyIndex.ts b/src/database/core/snap/indexes/KeyIndex.ts new file mode 100644 index 00000000000..d000dd7b8c0 --- /dev/null +++ b/src/database/core/snap/indexes/KeyIndex.ts @@ -0,0 +1,83 @@ +import { Index } from "./Index"; +import { Node, NamedNode } from "../Node"; +import { nameCompare, MAX_NAME } from "../../util/util"; +import { assert, assertionError } from "../../../../utils/assert"; +import { ChildrenNode } from "../ChildrenNode"; + +let __EMPTY_NODE; + +export class KeyIndex extends Index { + static get __EMPTY_NODE() { + return __EMPTY_NODE; + } + static set __EMPTY_NODE(val) { + __EMPTY_NODE = val; + } + constructor() { + super(); + } + /** + * @inheritDoc + */ + compare(a, b) { + return nameCompare(a.name, b.name); + }; + + + /** + * @inheritDoc + */ + isDefinedOn(node: Node): boolean { + // We could probably return true here (since every node has a key), but it's never called + // so just leaving unimplemented for now. + throw assertionError('KeyIndex.isDefinedOn not expected to be called.'); + }; + + + /** + * @inheritDoc + */ + indexedValueChanged(oldNode, newNode) { + return false; // The key for a node never changes. + }; + + + /** + * @inheritDoc + */ + minPost() { + return (NamedNode as any).MIN; + }; + + + /** + * @inheritDoc + */ + maxPost() { + // TODO: This should really be created once and cached in a static property, but + // NamedNode isn't defined yet, so I can't use it in a static. Bleh. + return new NamedNode(MAX_NAME, __EMPTY_NODE); + }; + + + /** + * @param {*} indexValue + * @param {string} name + * @return {!NamedNode} + */ + makePost(indexValue, name) { + assert(typeof indexValue === 'string', 'KeyIndex indexValue must always be a string.'); + // We just use empty node, but it'll never be compared, since our comparator only looks at name. + return new NamedNode(indexValue, __EMPTY_NODE); + }; + + + /** + * @return {!string} String representation for inclusion in a query spec + */ + toString() { + return '.key'; + }; +}; + +export const KEY_INDEX = new KeyIndex(); \ No newline at end of file diff --git a/src/database/core/snap/indexes/PathIndex.ts b/src/database/core/snap/indexes/PathIndex.ts new file mode 100644 index 00000000000..1e2da0823ad --- /dev/null +++ b/src/database/core/snap/indexes/PathIndex.ts @@ -0,0 +1,86 @@ +import { assert } from "../../../../utils/assert"; +import { nameCompare, MAX_NAME } from "../../util/util"; +import { Index } from "./Index"; +import { ChildrenNode, MAX_NODE } from "../ChildrenNode"; +import { NamedNode } from "../Node"; +import { nodeFromJSON } from "../nodeFromJSON"; + +/** + * @param {!Path} indexPath + * @constructor + * @extends {Index} + */ +export class PathIndex extends Index { + indexPath_; + + constructor(indexPath) { + super(); + + assert(!indexPath.isEmpty() && indexPath.getFront() !== '.priority', + 'Can\'t create PathIndex with empty path or .priority key'); + /** + * + * @type {!Path} + * @private + */ + this.indexPath_ = indexPath; + }; + /** + * @param {!Node} snap + * @return {!Node} + * @protected + */ + extractChild(snap) { + return snap.getChild(this.indexPath_); + }; + + + /** + * @inheritDoc + */ + isDefinedOn(node) { + return !node.getChild(this.indexPath_).isEmpty(); + }; + + + /** + * @inheritDoc + */ + compare(a, b) { + var aChild = this.extractChild(a.node); + var bChild = this.extractChild(b.node); + var indexCmp = aChild.compareTo(bChild); + if (indexCmp === 0) { + return nameCompare(a.name, b.name); + } else { + return indexCmp; + } + }; + + + /** + * @inheritDoc + */ + makePost(indexValue, name) { + var valueNode = nodeFromJSON(indexValue); + var node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, valueNode); + return new NamedNode(name, node); + }; + + + /** + * @inheritDoc + */ + maxPost() { + var node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, MAX_NODE); + return new NamedNode(MAX_NAME, node); + }; + + + /** + * @inheritDoc + */ + toString() { + return this.indexPath_.slice().join('/'); + }; +} \ No newline at end of file diff --git a/src/database/core/snap/indexes/PriorityIndex.ts b/src/database/core/snap/indexes/PriorityIndex.ts new file mode 100644 index 00000000000..471c58c6965 --- /dev/null +++ b/src/database/core/snap/indexes/PriorityIndex.ts @@ -0,0 +1,95 @@ +import { Index } from './Index'; +import { nameCompare, MAX_NAME } from "../../util/util"; +import { NamedNode } from "../Node"; +import { LeafNode } from "../LeafNode"; + +let nodeFromJSON; +let MAX_NODE; + +export function setNodeFromJSON(val) { + nodeFromJSON = val; +} + +export function setMaxNode(val) { + MAX_NODE = val; +} + + +/** + * @constructor + * @extends {Index} + * @private + */ +export class PriorityIndex extends Index { + + constructor() { + super(); + } + + /** + * @inheritDoc + */ + compare(a, b) { + var aPriority = a.node.getPriority(); + var bPriority = b.node.getPriority(); + var indexCmp = aPriority.compareTo(bPriority); + if (indexCmp === 0) { + return nameCompare(a.name, b.name); + } else { + return indexCmp; + } + }; + + + /** + * @inheritDoc + */ + isDefinedOn(node) { + return !node.getPriority().isEmpty(); + }; + + + /** + * @inheritDoc + */ + indexedValueChanged(oldNode, newNode) { + return !oldNode.getPriority().equals(newNode.getPriority()); + }; + + + /** + * @inheritDoc + */ + minPost() { + return (NamedNode as any).MIN; + }; + + + /** + * @inheritDoc + */ + maxPost() { + return new NamedNode(MAX_NAME, new LeafNode('[PRIORITY-POST]', MAX_NODE)); + }; + + + /** + * @param {*} indexValue + * @param {string} name + * @return {!NamedNode} + */ + makePost(indexValue, name) { + var priorityNode = nodeFromJSON(indexValue); + return new NamedNode(name, new LeafNode('[PRIORITY-POST]', priorityNode)); + }; + + + /** + * @return {!string} String representation for inclusion in a query spec + */ + toString() { + return '.priority'; + }; +}; + +export const PRIORITY_INDEX = new PriorityIndex(); diff --git a/src/database/core/snap/indexes/ValueIndex.ts b/src/database/core/snap/indexes/ValueIndex.ts new file mode 100644 index 00000000000..391c2a4678c --- /dev/null +++ b/src/database/core/snap/indexes/ValueIndex.ts @@ -0,0 +1,74 @@ +import { Index } from "./Index"; +import { NamedNode } from "../Node"; +import { nameCompare } from "../../util/util"; +import { nodeFromJSON } from "../nodeFromJSON"; + +/** + * @constructor + * @extends {Index} + * @private + */ +export class ValueIndex extends Index { + constructor() { + super(); + } + + /** + * @inheritDoc + */ + compare(a, b) { + var indexCmp = a.node.compareTo(b.node); + if (indexCmp === 0) { + return nameCompare(a.name, b.name); + } else { + return indexCmp; + } + }; + + /** + * @inheritDoc + */ + isDefinedOn(node) { + return true; + }; + + /** + * @inheritDoc + */ + indexedValueChanged(oldNode, newNode) { + return !oldNode.equals(newNode); + }; + + /** + * @inheritDoc + */ + minPost() { + return (NamedNode as any).MIN; + }; + + /** + * @inheritDoc + */ + maxPost() { + return (NamedNode as any).MAX; + }; + + /** + * @param {*} indexValue + * @param {string} name + * @return {!NamedNode} + */ + makePost(indexValue, name) { + var valueNode = nodeFromJSON(indexValue); + return new NamedNode(name, valueNode); + }; + + /** + * @return {!string} String representation for inclusion in a query spec + */ + toString() { + return '.value'; + }; +}; + +export const VALUE_INDEX = new ValueIndex(); \ No newline at end of file diff --git a/src/database/core/snap/nodeFromJSON.ts b/src/database/core/snap/nodeFromJSON.ts new file mode 100644 index 00000000000..e35e4fef150 --- /dev/null +++ b/src/database/core/snap/nodeFromJSON.ts @@ -0,0 +1,95 @@ +import { ChildrenNode } from "./ChildrenNode"; +import { LeafNode } from "./LeafNode"; +import { NamedNode } from "./Node"; +import { forEach, contains } from "../../../utils/obj"; +import { assert } from "../../../utils/assert"; +import { buildChildSet } from "./childSet"; +import { NAME_COMPARATOR, NAME_ONLY_COMPARATOR } from "./comparators"; +import { IndexMap } from "./IndexMap"; +import { PRIORITY_INDEX, setNodeFromJSON } from "./indexes/PriorityIndex"; + +const USE_HINZE = true; + +/** + * Constructs a snapshot node representing the passed JSON and returns it. + * @param {*} json JSON to create a node for. + * @param {?string|?number=} opt_priority Optional priority to use. This will be ignored if the + * passed JSON contains a .priority property. + * @return {!Node} + */ +export function nodeFromJSON(json, priority?) { + if (json === null) { + return ChildrenNode.EMPTY_NODE; + } + + priority = priority !== undefined ? priority : null; + if (typeof json === 'object' && '.priority' in json) { + priority = json['.priority']; + } + + assert( + priority === null || + typeof priority === 'string' || + typeof priority === 'number' || + (typeof priority === 'object' && '.sv' in priority), + 'Invalid priority type found: ' + (typeof priority) + ); + + if (typeof json === 'object' && '.value' in json && json['.value'] !== null) { + json = json['.value']; + } + + // Valid leaf nodes include non-objects or server-value wrapper objects + if (typeof json !== 'object' || '.sv' in json) { + var jsonLeaf = /** @type {!(string|number|boolean|Object)} */ (json); + return new LeafNode(jsonLeaf, nodeFromJSON(priority)); + } + + if (!(json instanceof Array) && USE_HINZE) { + var children = []; + var childrenHavePriority = false; + var hinzeJsonObj = /** @type {!Object} */ (json); + forEach(hinzeJsonObj, function(key, child) { + if (typeof key !== 'string' || key.substring(0, 1) !== '.') { // Ignore metadata nodes + var childNode = nodeFromJSON(hinzeJsonObj[key]); + if (!childNode.isEmpty()) { + childrenHavePriority = childrenHavePriority || !childNode.getPriority().isEmpty(); + children.push(new NamedNode(key, childNode)); + } + } + }); + + if (children.length == 0) { + return ChildrenNode.EMPTY_NODE; + } + + var childSet = /**@type {!SortedMap.} */ (buildChildSet( + children, NAME_ONLY_COMPARATOR, function(namedNode) { return namedNode.name; }, + NAME_COMPARATOR + )); + if (childrenHavePriority) { + var sortedChildSet = buildChildSet(children, PRIORITY_INDEX.getCompare()); + return new ChildrenNode(childSet, nodeFromJSON(priority), + new IndexMap({'.priority': sortedChildSet}, {'.priority': PRIORITY_INDEX})); + } else { + return new ChildrenNode(childSet, nodeFromJSON(priority), + IndexMap.Default); + } + } else { + var node = ChildrenNode.EMPTY_NODE; + var jsonObj = /** @type {!Object} */ (json); + forEach(jsonObj, function(key, childData) { + if (contains(jsonObj, key)) { + if (key.substring(0, 1) !== '.') { // ignore metadata nodes. + var childNode = nodeFromJSON(childData); + if (childNode.isLeafNode() || !childNode.isEmpty()) + node = node.updateImmediateChild(key, childNode); + } + } + }); + + return node.updatePriority(nodeFromJSON(priority)); + } +}; + +setNodeFromJSON(nodeFromJSON); \ No newline at end of file diff --git a/src/database/core/snap/snap.ts b/src/database/core/snap/snap.ts new file mode 100644 index 00000000000..63baab5ea48 --- /dev/null +++ b/src/database/core/snap/snap.ts @@ -0,0 +1,43 @@ +import { assert } from '../../../utils/assert'; +import { + doubleToIEEE754String, +} from "../util/util"; +import { contains } from "../../../utils/obj"; +import { NamedNode } from "./Node"; + +let MAX_NODE; + +export function setMaxNode(val) { + MAX_NODE = val; +} + +/** + * @param {(!string|!number)} priority + * @return {!string} + */ +export const priorityHashText = function(priority) { + if (typeof priority === 'number') + return 'number:' + doubleToIEEE754String(priority); + else + return 'string:' + priority; +}; + +/** + * Validates that a priority snapshot Node is valid. + * + * @param {!Node} priorityNode + */ +export const validatePriorityNode = function(priorityNode) { + if (priorityNode.isLeafNode()) { + var val = priorityNode.val(); + assert(typeof val === 'string' || typeof val === 'number' || + (typeof val === 'object' && contains(val, '.sv')), + 'Priority must be a string or number.'); + } else { + assert(priorityNode === MAX_NODE || priorityNode.isEmpty(), + 'priority of unexpected type.'); + } + // Don't call getPriority() on MAX_NODE to avoid hitting assertion. + assert(priorityNode === MAX_NODE || priorityNode.getPriority().isEmpty(), + "Priority nodes can't have a priority of their own."); +}; diff --git a/src/database/core/stats/StatsCollection.ts b/src/database/core/stats/StatsCollection.ts new file mode 100644 index 00000000000..7dfb813a194 --- /dev/null +++ b/src/database/core/stats/StatsCollection.ts @@ -0,0 +1,27 @@ +import { deepCopy } from '../../../utils/deep_copy'; +import { contains } from '../../../utils/obj'; + +/** + * Tracks a collection of stats. + * + * @constructor + */ +export class StatsCollection { + counters_: object; + constructor() { + this.counters_ = { }; + } + incrementCounter(name, amount) { + if (amount === undefined) + amount = 1; + + if (!contains(this.counters_, name)) + this.counters_[name] = 0; + + this.counters_[name] += amount; + } + get() { + return deepCopy(this.counters_); + }; +} + diff --git a/src/database/core/stats/StatsListener.ts b/src/database/core/stats/StatsListener.ts new file mode 100644 index 00000000000..9b829256f5d --- /dev/null +++ b/src/database/core/stats/StatsListener.ts @@ -0,0 +1,29 @@ +import { clone, forEach } from '../../../utils/obj'; + +/** + * Returns the delta from the previous call to get stats. + * + * @param collection_ The collection to "listen" to. + * @constructor + */ +export class StatsListener { + private last_ = null; + + constructor(private collection_) { + } + + get() { + const newStats = this.collection_.get(); + + const delta = clone(newStats); + if (this.last_) { + forEach(this.last_, (stat, value) => { + delta[stat] = delta[stat] - value; + }); + } + this.last_ = newStats; + + return delta; + } +} + diff --git a/src/database/core/stats/StatsManager.ts b/src/database/core/stats/StatsManager.ts new file mode 100644 index 00000000000..a47fe0a4961 --- /dev/null +++ b/src/database/core/stats/StatsManager.ts @@ -0,0 +1,22 @@ +import { StatsCollection } from "./StatsCollection"; + +export const StatsManager = { + collections_:{ }, + reporters_:{ }, + getCollection:function(repoInfo) { + var hashString = repoInfo.toString(); + if (!this.collections_[hashString]) { + this.collections_[hashString] = new StatsCollection(); + } + return this.collections_[hashString]; + }, + getOrCreateReporter:function(repoInfo, creatorFunction) { + var hashString = repoInfo.toString(); + if (!this.reporters_[hashString]) { + this.reporters_[hashString] = creatorFunction(); + } + + return this.reporters_[hashString]; + } +}; + diff --git a/src/database/core/stats/StatsReporter.ts b/src/database/core/stats/StatsReporter.ts new file mode 100644 index 00000000000..daa88fd1a0d --- /dev/null +++ b/src/database/core/stats/StatsReporter.ts @@ -0,0 +1,54 @@ +import { contains, forEach } from '../../../utils/obj'; +import { setTimeoutNonBlocking } from "../util/util"; +import { StatsListener } from "./StatsListener"; + +// Assuming some apps may have a short amount of time on page, and a bulk of firebase operations probably +// happen on page load, we try to report our first set of stats pretty quickly, but we wait at least 10 +// seconds to try to ensure the Firebase connection is established / settled. +const FIRST_STATS_MIN_TIME = 10 * 1000; +const FIRST_STATS_MAX_TIME = 30 * 1000; + +// We'll continue to report stats on average every 5 minutes. +const REPORT_STATS_INTERVAL = 5 * 60 * 1000; + +/** + * + * @param collection + * @param server_ + * @constructor + */ +export class StatsReporter { + private statsListener_; + private statsToReport_ = {}; + + constructor(collection, private server_: any) { + this.statsListener_ = new StatsListener(collection); + + const timeout = FIRST_STATS_MIN_TIME + (FIRST_STATS_MAX_TIME - FIRST_STATS_MIN_TIME) * Math.random(); + setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(timeout)); + } + + includeStat(stat) { + this.statsToReport_[stat] = true; + } + + private reportStats_() { + const stats = this.statsListener_.get(); + const reportedStats = {}; + let haveStatsToReport = false; + + forEach(stats, (stat, value) => { + if (value > 0 && contains(this.statsToReport_, stat)) { + reportedStats[stat] = value; + haveStatsToReport = true; + } + }); + + if (haveStatsToReport) { + this.server_.reportStats(reportedStats); + } + + // queue our next run. + setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(Math.random() * 2 * REPORT_STATS_INTERVAL)); + } +} diff --git a/src/database/core/storage/DOMStorageWrapper.ts b/src/database/core/storage/DOMStorageWrapper.ts new file mode 100644 index 00000000000..637f7faa116 --- /dev/null +++ b/src/database/core/storage/DOMStorageWrapper.ts @@ -0,0 +1,70 @@ +import { jsonEval, stringify } from "../../../utils/json"; + +/** + * Wraps a DOM Storage object and: + * - automatically encode objects as JSON strings before storing them to allow us to store arbitrary types. + * - prefixes names with "firebase:" to avoid collisions with app data. + * + * We automatically (see storage.js) create two such wrappers, one for sessionStorage, + * and one for localStorage. + * + * @param {Storage} domStorage The underlying storage object (e.g. localStorage or sessionStorage) + * @constructor + */ +export class DOMStorageWrapper { + prefix_; + domStorage_; + + constructor(domStorage) { + this.domStorage_ = domStorage; + + // Use a prefix to avoid collisions with other stuff saved by the app. + this.prefix_ = 'firebase:'; + }; + + /** + * @param {string} key The key to save the value under + * @param {?Object} value The value being stored, or null to remove the key. + */ + set(key, value) { + if (value == null) { + this.domStorage_.removeItem(this.prefixedName_(key)); + } else { + this.domStorage_.setItem(this.prefixedName_(key), stringify(value)); + } + }; + + /** + * @param {string} key + * @return {*} The value that was stored under this key, or null + */ + get(key) { + var storedVal = this.domStorage_.getItem(this.prefixedName_(key)); + if (storedVal == null) { + return null; + } else { + return jsonEval(storedVal); + } + }; + + /** + * @param {string} key + */ + remove(key) { + this.domStorage_.removeItem(this.prefixedName_(key)); + }; + + isInMemoryStorage; + + /** + * @param {string} name + * @return {string} + */ + prefixedName_(name) { + return this.prefix_ + name; + }; + + toString() { + return this.domStorage_.toString(); + }; +} diff --git a/src/database/core/storage/MemoryStorage.ts b/src/database/core/storage/MemoryStorage.ts new file mode 100644 index 00000000000..e0fba630d3a --- /dev/null +++ b/src/database/core/storage/MemoryStorage.ts @@ -0,0 +1,34 @@ +import { contains } from "../../../utils/obj"; + +/** + * An in-memory storage implementation that matches the API of DOMStorageWrapper + * (TODO: create interface for both to implement). + * + * @constructor + */ +export class MemoryStorage { + cache_: object; + constructor() { + this.cache_ = {}; + } + set(key, value) { + if (value == null) { + delete this.cache_[key]; + } else { + this.cache_[key] = value; + } + }; + + get(key) { + if (contains(this.cache_, key)) { + return this.cache_[key]; + } + return null; + }; + + remove(key) { + delete this.cache_[key]; + }; + + isInMemoryStorage = true; +} diff --git a/src/database/core/storage/storage.ts b/src/database/core/storage/storage.ts new file mode 100644 index 00000000000..db63fb0d580 --- /dev/null +++ b/src/database/core/storage/storage.ts @@ -0,0 +1,38 @@ +import { DOMStorageWrapper } from './DOMStorageWrapper'; +import { MemoryStorage } from './MemoryStorage'; + +/** +* Helper to create a DOMStorageWrapper or else fall back to MemoryStorage. +* TODO: Once MemoryStorage and DOMStorageWrapper have a shared interface this method annotation should change +* to reflect this type +* +* @param {string} domStorageName Name of the underlying storage object +* (e.g. 'localStorage' or 'sessionStorage'). +* @return {?} Turning off type information until a common interface is defined. +*/ +const createStoragefor = function(domStorageName) { + try { + // NOTE: just accessing "localStorage" or "window['localStorage']" may throw a security exception, + // so it must be inside the try/catch. + if (typeof window !== 'undefined' && typeof window[domStorageName] !== 'undefined') { + // Need to test cache. Just because it's here doesn't mean it works + var domStorage = window[domStorageName]; + domStorage.setItem('firebase:sentinel', 'cache'); + domStorage.removeItem('firebase:sentinel'); + return new DOMStorageWrapper(domStorage); + } + } catch (e) { + } + + // Failed to create wrapper. Just return in-memory storage. + // TODO: log? + return new MemoryStorage(); +}; + + +/** A storage object that lasts across sessions */ +export const PersistentStorage = createStoragefor('localStorage'); + + +/** A storage object that only lasts one session */ +export const SessionStorage = createStoragefor('sessionStorage'); diff --git a/src/database/core/util/CountedSet.ts b/src/database/core/util/CountedSet.ts new file mode 100644 index 00000000000..6764d324956 --- /dev/null +++ b/src/database/core/util/CountedSet.ts @@ -0,0 +1,91 @@ +import { isEmpty, getCount, forEach, contains } from "../../../utils/obj"; + +/** + * Implements a set with a count of elements. + * + */ +export class CountedSet { + set: object; + + /** + * @template K, V + */ + constructor() { + this.set = {}; + } + + /** + * @param {!K} item + * @param {V} val + */ + add(item, val) { + this.set[item] = val !== null ? val : true; + } + + /** + * @param {!K} key + * @return {boolean} + */ + contains(key) { + return contains(this.set, key); + } + + /** + * @param {!K} item + * @return {V} + */ + get(item) { + return this.contains(item) ? this.set[item] : undefined; + } + + /** + * @param {!K} item + */ + remove(item) { + delete this.set[item]; + } + + /** + * Deletes everything in the set + */ + clear() { + this.set = {}; + } + + /** + * True if there's nothing in the set + * @return {boolean} + */ + isEmpty() { + return isEmpty(this.set); + } + + /** + * @return {number} The number of items in the set + */ + count() { + return getCount(this.set); + } + + /** + * Run a function on each k,v pair in the set + * @param {function(K, V)} fn + */ + each(fn) { + forEach(this.set, function(k, v) { + fn(k, v); + }); + } + + /** + * Mostly for debugging + * @return {Array.} The keys present in this CountedSet + */ + keys() { + var keys = []; + forEach(this.set, function(k, v) { + keys.push(k); + }); + return keys; + } +}; // end fb.core.util.CountedSet diff --git a/src/database/core/util/EventEmitter.ts b/src/database/core/util/EventEmitter.ts new file mode 100644 index 00000000000..3d4a5ed88c1 --- /dev/null +++ b/src/database/core/util/EventEmitter.ts @@ -0,0 +1,74 @@ +import { assert } from "../../../utils/assert"; + +/** + * Base class to be used if you want to emit events. Call the constructor with + * the set of allowed event names. + */ +export abstract class EventEmitter { + allowedEvents_; + listeners_; + /** + * @param {!Array.} allowedEvents + */ + constructor(allowedEvents: Array) { + assert(Array.isArray(allowedEvents) && allowedEvents.length > 0, + 'Requires a non-empty array'); + this.allowedEvents_ = allowedEvents; + this.listeners_ = {}; + } + + /** + * To be overridden by derived classes in order to fire an initial event when + * somebody subscribes for data. + * + * @param {!string} eventType + * @return {Array.<*>} Array of parameters to trigger initial event with. + */ + abstract getInitialEvent(eventType: string); + + /** + * To be called by derived classes to trigger events. + * @param {!string} eventType + * @param {...*} var_args + */ + trigger(eventType, var_args) { + // Clone the list, since callbacks could add/remove listeners. + var listeners = [ + ...this.listeners_[eventType] + ]; + + for (var i = 0; i < listeners.length; i++) { + listeners[i].callback.apply(listeners[i].context, Array.prototype.slice.call(arguments, 1)); + } + } + + on(eventType, callback, context) { + this.validateEventType_(eventType); + this.listeners_[eventType] = this.listeners_[eventType] || []; + this.listeners_[eventType].push({callback: callback, context: context }); + + var eventData = this.getInitialEvent(eventType); + if (eventData) { + callback.apply(context, eventData); + } + } + + off(eventType, callback, context) { + this.validateEventType_(eventType); + var listeners = this.listeners_[eventType] || []; + for (var i = 0; i < listeners.length; i++) { + if (listeners[i].callback === callback && (!context || context === listeners[i].context)) { + listeners.splice(i, 1); + return; + } + } + } + + validateEventType_(eventType) { + assert(this.allowedEvents_.find(function(et) { + return et === eventType; + }), + 'Unknown event: ' + eventType + ); + } +}; // end fb.core.util.EventEmitter diff --git a/src/database/core/util/ImmutableTree.ts b/src/database/core/util/ImmutableTree.ts new file mode 100644 index 00000000000..ced2fbf6bc7 --- /dev/null +++ b/src/database/core/util/ImmutableTree.ts @@ -0,0 +1,342 @@ +import { SortedMap } from "./SortedMap"; +import { Path } from "./Path"; +import { stringCompare } from "./util"; +import { forEach } from "../../../utils/obj"; + +let emptyChildrenSingleton; + +/** + * A tree with immutable elements. + */ +export class ImmutableTree { + value; + children; + + static Empty = new ImmutableTree(null); + + /** + * Singleton empty children collection. + * + * @const + * @type {!SortedMap.>} + * @private + */ + static get EmptyChildren_() { + if (!emptyChildrenSingleton) { + emptyChildrenSingleton = new SortedMap(stringCompare); + } + return emptyChildrenSingleton; + } + + /** + * @template T + * @param {!Object.} obj + * @return {!ImmutableTree.} + */ + static fromObject(obj) { + var tree = ImmutableTree.Empty; + forEach(obj, function(childPath, childSnap) { + tree = tree.set(new Path(childPath), childSnap); + }); + return tree; + } + + /** + * @template T + * @param {?T} value + * @param {SortedMap.>=} opt_children + */ + constructor(value, children?) { + /** + * @const + * @type {?T} + */ + this.value = value; + + /** + * @const + * @type {!SortedMap.>} + */ + this.children = children || ImmutableTree.EmptyChildren_; + } + + /** + * True if the value is empty and there are no children + * @return {boolean} + */ + isEmpty() { + return this.value === null && this.children.isEmpty(); + } + + /** + * Given a path and predicate, return the first node and the path to that node + * where the predicate returns true. + * + * TODO Do a perf test -- If we're creating a bunch of {path: value:} objects + * on the way back out, it may be better to pass down a pathSoFar obj. + * + * @param {!Path} relativePath The remainder of the path + * @param {function(T):boolean} predicate The predicate to satisfy to return a + * node + * @return {?{path:!Path, value:!T}} + */ + findRootMostMatchingPathAndValue(relativePath: Path, predicate) { + if (this.value != null && predicate(this.value)) { + return {path: Path.Empty, value: this.value}; + } else { + if (relativePath.isEmpty()) { + return null; + } else { + var front = relativePath.getFront(); + var child = this.children.get(front); + if (child !== null) { + var childExistingPathAndValue = + child.findRootMostMatchingPathAndValue(relativePath.popFront(), + predicate); + if (childExistingPathAndValue != null) { + var fullPath = new Path(front).child(childExistingPathAndValue.path); + return {path: fullPath, value: childExistingPathAndValue.value}; + } else { + return null; + } + } else { + return null; + } + } + } + } + + /** + * Find, if it exists, the shortest subpath of the given path that points a defined + * value in the tree + * @param {!Path} relativePath + * @return {?{path: !Path, value: !T}} + */ + findRootMostValueAndPath(relativePath) { + return this.findRootMostMatchingPathAndValue(relativePath, function() { return true; }); + } + + /** + * @param {!Path} relativePath + * @return {!ImmutableTree.} The subtree at the given path + */ + subtree(relativePath) { + if (relativePath.isEmpty()) { + return this; + } else { + var front = relativePath.getFront(); + var childTree = this.children.get(front); + if (childTree !== null) { + return childTree.subtree(relativePath.popFront()); + } else { + return ImmutableTree.Empty; + } + } + } + + /** + * Sets a value at the specified path. + * + * @param {!Path} relativePath Path to set value at. + * @param {?T} toSet Value to set. + * @return {!ImmutableTree.} Resulting tree. + */ + set(relativePath, toSet) { + if (relativePath.isEmpty()) { + return new ImmutableTree(toSet, this.children); + } else { + var front = relativePath.getFront(); + var child = this.children.get(front) || ImmutableTree.Empty; + var newChild = child.set(relativePath.popFront(), toSet); + var newChildren = this.children.insert(front, newChild); + return new ImmutableTree(this.value, newChildren); + } + } + + /** + * Removes the value at the specified path. + * + * @param {!Path} relativePath Path to value to remove. + * @return {!ImmutableTree.} Resulting tree. + */ + remove(relativePath) { + if (relativePath.isEmpty()) { + if (this.children.isEmpty()) { + return ImmutableTree.Empty; + } else { + return new ImmutableTree(null, this.children); + } + } else { + var front = relativePath.getFront(); + var child = this.children.get(front); + if (child) { + var newChild = child.remove(relativePath.popFront()); + var newChildren; + if (newChild.isEmpty()) { + newChildren = this.children.remove(front); + } else { + newChildren = this.children.insert(front, newChild); + } + if (this.value === null && newChildren.isEmpty()) { + return ImmutableTree.Empty; + } else { + return new ImmutableTree(this.value, newChildren); + } + } else { + return this; + } + } + } + + /** + * Gets a value from the tree. + * + * @param {!Path} relativePath Path to get value for. + * @return {?T} Value at path, or null. + */ + get(relativePath) { + if (relativePath.isEmpty()) { + return this.value; + } else { + var front = relativePath.getFront(); + var child = this.children.get(front); + if (child) { + return child.get(relativePath.popFront()); + } else { + return null; + } + } + } + + /** + * Replace the subtree at the specified path with the given new tree. + * + * @param {!Path} relativePath Path to replace subtree for. + * @param {!ImmutableTree} newTree New tree. + * @return {!ImmutableTree} Resulting tree. + */ + setTree(relativePath, newTree) { + if (relativePath.isEmpty()) { + return newTree; + } else { + var front = relativePath.getFront(); + var child = this.children.get(front) || ImmutableTree.Empty; + var newChild = child.setTree(relativePath.popFront(), newTree); + var newChildren; + if (newChild.isEmpty()) { + newChildren = this.children.remove(front); + } else { + newChildren = this.children.insert(front, newChild); + } + return new ImmutableTree(this.value, newChildren); + } + } + + /** + * Performs a depth first fold on this tree. Transforms a tree into a single + * value, given a function that operates on the path to a node, an optional + * current value, and a map of child names to folded subtrees + * @template V + * @param {function(Path, ?T, Object.):V} fn + * @return {V} + */ + fold(fn) { + return this.fold_(Path.Empty, fn); + } + + /** + * Recursive helper for public-facing fold() method + * @template V + * @param {!Path} pathSoFar + * @param {function(Path, ?T, Object.):V} fn + * @return {V} + * @private + */ + fold_(pathSoFar, fn) { + var accum = {}; + this.children.inorderTraversal(function(childKey, childTree) { + accum[childKey] = childTree.fold_(pathSoFar.child(childKey), fn); + }); + return fn(pathSoFar, this.value, accum); + } + + /** + * Find the first matching value on the given path. Return the result of applying f to it. + * @template V + * @param {!Path} path + * @param {!function(!Path, !T):?V} f + * @return {?V} + */ + findOnPath(path, f) { + return this.findOnPath_(path, Path.Empty, f); + } + + findOnPath_(pathToFollow, pathSoFar, f) { + var result = this.value ? f(pathSoFar, this.value) : false; + if (result) { + return result; + } else { + if (pathToFollow.isEmpty()) { + return null; + } else { + var front = pathToFollow.getFront(); + var nextChild = this.children.get(front); + if (nextChild) { + return nextChild.findOnPath_(pathToFollow.popFront(), pathSoFar.child(front), f); + } else { + return null; + } + } + } + } + + foreachOnPath(path, f) { + return this.foreachOnPath_(path, Path.Empty, f); + } + + foreachOnPath_(pathToFollow, currentRelativePath, f) { + if (pathToFollow.isEmpty()) { + return this; + } else { + if (this.value) { + f(currentRelativePath, this.value); + } + var front = pathToFollow.getFront(); + var nextChild = this.children.get(front); + if (nextChild) { + return nextChild.foreachOnPath_(pathToFollow.popFront(), + currentRelativePath.child(front), f); + } else { + return ImmutableTree.Empty; + } + } + } + + /** + * Calls the given function for each node in the tree that has a value. + * + * @param {function(!Path, !T)} f A function to be called with + * the path from the root of the tree to a node, and the value at that node. + * Called in depth-first order. + */ + foreach(f) { + this.foreach_(Path.Empty, f); + } + + foreach_(currentRelativePath, f) { + this.children.inorderTraversal(function(childName, childTree) { + childTree.foreach_(currentRelativePath.child(childName), f); + }); + if (this.value) { + f(currentRelativePath, this.value); + } + } + + foreachChild(f) { + this.children.inorderTraversal(function(childName, childTree) { + if (childTree.value) { + f(childName, childTree.value); + } + }); + } +}; // end ImmutableTree diff --git a/src/database/core/util/NextPushId.ts b/src/database/core/util/NextPushId.ts new file mode 100644 index 00000000000..92fe6ebdb4f --- /dev/null +++ b/src/database/core/util/NextPushId.ts @@ -0,0 +1,65 @@ +import { assert } from "../../../utils/assert"; + +/** + * Fancy ID generator that creates 20-character string identifiers with the + * following properties: + * + * 1. They're based on timestamp so that they sort *after* any existing ids. + * 2. They contain 72-bits of random data after the timestamp so that IDs won't + * collide with other clients' IDs. + * 3. They sort *lexicographically* (so the timestamp is converted to characters + * that will sort properly). + * 4. They're monotonically increasing. Even if you generate more than one in + * the same timestamp, the latter ones will sort after the former ones. We do + * this by using the previous random bits but "incrementing" them by 1 (only + * in the case of a timestamp collision). + */ +export const nextPushId = (function() { + // Modeled after base64 web-safe chars, but ordered by ASCII. + var PUSH_CHARS = '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; + + // Timestamp of last push, used to prevent local collisions if you push twice + // in one ms. + var lastPushTime = 0; + + // We generate 72-bits of randomness which get turned into 12 characters and + // appended to the timestamp to prevent collisions with other clients. We + // store the last characters we generated because in the event of a collision, + // we'll use those same characters except "incremented" by one. + var lastRandChars = []; + + return function(now) { + var duplicateTime = (now === lastPushTime); + lastPushTime = now; + + var timeStampChars = new Array(8); + for (var i = 7; i >= 0; i--) { + timeStampChars[i] = PUSH_CHARS.charAt(now % 64); + // NOTE: Can't use << here because javascript will convert to int and lose + // the upper bits. + now = Math.floor(now / 64); + } + assert(now === 0, 'Cannot push at time == 0'); + + var id = timeStampChars.join(''); + + if (!duplicateTime) { + for (i = 0; i < 12; i++) { + lastRandChars[i] = Math.floor(Math.random() * 64); + } + } else { + // If the timestamp hasn't changed since last push, use the same random + // number, except incremented by 1. + for (i = 11; i >= 0 && lastRandChars[i] === 63; i--) { + lastRandChars[i] = 0; + } + lastRandChars[i]++; + } + for (i = 0; i < 12; i++) { + id += PUSH_CHARS.charAt(lastRandChars[i]); + } + assert(id.length === 20, 'nextPushId: Length should be 20.'); + + return id; + }; +})(); diff --git a/src/database/core/util/OnlineMonitor.ts b/src/database/core/util/OnlineMonitor.ts new file mode 100644 index 00000000000..286699ca99a --- /dev/null +++ b/src/database/core/util/OnlineMonitor.ts @@ -0,0 +1,64 @@ +import { assert } from "../../../utils/assert"; +import { EventEmitter } from "./EventEmitter"; +import { isMobileCordova } from "../../../utils/environment"; + +/** + * Monitors online state (as reported by window.online/offline events). + * + * The expectation is that this could have many false positives (thinks we are online + * when we're not), but no false negatives. So we can safely use it to determine when + * we definitely cannot reach the internet. + * + * @extends {fb.core.util.EventEmitter} + */ +export class OnlineMonitor extends EventEmitter { + online_; + + static getInstance() { + return new OnlineMonitor(); + } + + constructor() { + super(['online']); + this.online_ = true; + + // We've had repeated complaints that Cordova apps can get stuck "offline", e.g. + // https://forum.ionicframework.com/t/firebase-connection-is-lost-and-never-come-back/43810 + // It would seem that the 'online' event does not always fire consistently. So we disable it + // for Cordova. + if (typeof window !== 'undefined' && + typeof window.addEventListener !== 'undefined' && + !isMobileCordova()) { + var self = this; + window.addEventListener('online', function() { + if (!self.online_) { + self.online_ = true; + self.trigger('online', true); + } + }, false); + + window.addEventListener('offline', function() { + if (self.online_) { + self.online_ = false; + self.trigger('online', false); + } + }, false); + } + } + + /** + * @param {!string} eventType + * @return {Array.} + */ + getInitialEvent(eventType) { + assert(eventType === 'online', 'Unknown event type: ' + eventType); + return [this.online_]; + } + + /** + * @return {boolean} + */ + currentlyOnline() { + return this.online_; + } +}; // end OnlineMonitor diff --git a/src/database/core/util/Path.ts b/src/database/core/util/Path.ts new file mode 100644 index 00000000000..12edb44a1bf --- /dev/null +++ b/src/database/core/util/Path.ts @@ -0,0 +1,322 @@ +import { nameCompare } from "./util"; +import { stringLength } from "../../../utils/utf8"; +/** + * An immutable object representing a parsed path. It's immutable so that you + * can pass them around to other functions without worrying about them changing + * it. + */ + +export class Path { + pieces_; + pieceNum_; + + /** + * Singleton to represent an empty path + * + * @const + */ + static get Empty() { + return new Path(''); + } + /** + * @param {string|Array.} pathOrString Path string to parse, + * or another path, or the raw tokens array + * @param {number=} opt_pieceNum + */ + constructor(pathOrString: string|string[], opt_pieceNum?) { + if (arguments.length == 1) { + this.pieces_ = (pathOrString).split('/'); + + // Remove empty pieces. + var copyTo = 0; + for (var i = 0; i < this.pieces_.length; i++) { + if (this.pieces_[i].length > 0) { + this.pieces_[copyTo] = this.pieces_[i]; + copyTo++; + } + } + this.pieces_.length = copyTo; + + this.pieceNum_ = 0; + } else { + this.pieces_ = pathOrString; + this.pieceNum_ = opt_pieceNum; + } + } + + getFront() { + if (this.pieceNum_ >= this.pieces_.length) + return null; + + return this.pieces_[this.pieceNum_]; + } + + /** + * @return {number} The number of segments in this path + */ + getLength() { + return this.pieces_.length - this.pieceNum_; + } + + /** + * @return {!Path} + */ + popFront() { + var pieceNum = this.pieceNum_; + if (pieceNum < this.pieces_.length) { + pieceNum++; + } + return new Path(this.pieces_, pieceNum); + } + + /** + * @return {?string} + */ + getBack() { + if (this.pieceNum_ < this.pieces_.length) + return this.pieces_[this.pieces_.length - 1]; + + return null; + } + + toString() { + var pathString = ''; + for (var i = this.pieceNum_; i < this.pieces_.length; i++) { + if (this.pieces_[i] !== '') + pathString += '/' + this.pieces_[i]; + } + + return pathString || '/'; + } + + toUrlEncodedString() { + var pathString = ''; + for (var i = this.pieceNum_; i < this.pieces_.length; i++) { + if (this.pieces_[i] !== '') + pathString += '/' + encodeURIComponent(String(this.pieces_[i])); + } + + return pathString || '/'; + } + + /** + * Shallow copy of the parts of the path. + * + * @param {number=} opt_begin + * @return {!Array} + */ + slice(opt_begin) { + var begin = opt_begin || 0; + return this.pieces_.slice(this.pieceNum_ + begin); + } + + /** + * @return {?Path} + */ + parent() { + if (this.pieceNum_ >= this.pieces_.length) + return null; + + var pieces = []; + for (var i = this.pieceNum_; i < this.pieces_.length - 1; i++) + pieces.push(this.pieces_[i]); + + return new Path(pieces, 0); + } + + /** + * @param {string|!Path} childPathObj + * @return {!Path} + */ + child(childPathObj) { + var pieces = []; + for (var i = this.pieceNum_; i < this.pieces_.length; i++) + pieces.push(this.pieces_[i]); + + if (childPathObj instanceof Path) { + for (i = childPathObj.pieceNum_; i < childPathObj.pieces_.length; i++) { + pieces.push(childPathObj.pieces_[i]); + } + } else { + var childPieces = childPathObj.split('/'); + for (i = 0; i < childPieces.length; i++) { + if (childPieces[i].length > 0) + pieces.push(childPieces[i]); + } + } + + return new Path(pieces, 0); + } + + /** + * @return {boolean} True if there are no segments in this path + */ + isEmpty() { + return this.pieceNum_ >= this.pieces_.length; + } + + /** + * @param {!Path} outerPath + * @param {!Path} innerPath + * @return {!Path} The path from outerPath to innerPath + */ + static relativePath(outerPath, innerPath) { + var outer = outerPath.getFront(), inner = innerPath.getFront(); + if (outer === null) { + return innerPath; + } else if (outer === inner) { + return Path.relativePath(outerPath.popFront(), + innerPath.popFront()); + } else { + throw new Error('INTERNAL ERROR: innerPath (' + innerPath + ') is not within ' + + 'outerPath (' + outerPath + ')'); + } + } + /** + * @param {!Path} left + * @param {!Path} right + * @return {number} -1, 0, 1 if left is less, equal, or greater than the right. + */ + static comparePaths(left, right) { + var leftKeys = left.slice(); + var rightKeys = right.slice(); + for (var i = 0; i < leftKeys.length && i < rightKeys.length; i++) { + var cmp = nameCompare(leftKeys[i], rightKeys[i]); + if (cmp !== 0) return cmp; + } + if (leftKeys.length === rightKeys.length) return 0; + return (leftKeys.length < rightKeys.length) ? -1 : 1; + } + + /** + * + * @param {Path} other + * @return {boolean} true if paths are the same. + */ + equals(other) { + if (this.getLength() !== other.getLength()) { + return false; + } + + for (var i = this.pieceNum_, j = other.pieceNum_; i <= this.pieces_.length; i++, j++) { + if (this.pieces_[i] !== other.pieces_[j]) { + return false; + } + } + + return true; + } + + /** + * + * @param {!Path} other + * @return {boolean} True if this path is a parent (or the same as) other + */ + contains(other) { + var i = this.pieceNum_; + var j = other.pieceNum_; + if (this.getLength() > other.getLength()) { + return false; + } + while (i < this.pieces_.length) { + if (this.pieces_[i] !== other.pieces_[j]) { + return false; + } + ++i; + ++j; + } + return true; + } +} // end Path + +/** + * Dynamic (mutable) path used to count path lengths. + * + * This class is used to efficiently check paths for valid + * length (in UTF8 bytes) and depth (used in path validation). + * + * Throws Error exception if path is ever invalid. + * + * The definition of a path always begins with '/'. + */ +export class ValidationPath { + /** @type {!Array} */ + parts_; + /** @type {number} Initialize to number of '/' chars needed in path. */ + byteLength_; + /** @type {string} */ + errorPrefix_; + + /** + * @param {!Path} path Initial Path. + * @param {string} errorPrefix Prefix for any error messages. + */ + constructor(path, errorPrefix) { + /** @type {!Array} */ + this.parts_ = path.slice(); + /** @type {number} Initialize to number of '/' chars needed in path. */ + this.byteLength_ = Math.max(1, this.parts_.length); + /** @type {string} */ + this.errorPrefix_ = errorPrefix; + + for (var i = 0; i < this.parts_.length; i++) { + this.byteLength_ += stringLength(this.parts_[i]); + } + this.checkValid_(); + } + /** @const {number} Maximum key depth. */ + static get MAX_PATH_DEPTH() { + return 32; + } + + /** @const {number} Maximum number of (UTF8) bytes in a Firebase path. */ + static get MAX_PATH_LENGTH_BYTES() { + return 768 + } + + /** @param {string} child */ + push(child) { + // Count the needed '/' + if (this.parts_.length > 0) { + this.byteLength_ += 1; + } + this.parts_.push(child); + this.byteLength_ += stringLength(child); + this.checkValid_(); + } + + pop() { + var last = this.parts_.pop(); + this.byteLength_ -= stringLength(last); + // Un-count the previous '/' + if (this.parts_.length > 0) { + this.byteLength_ -= 1; + } + } + + checkValid_() { + if (this.byteLength_ > ValidationPath.MAX_PATH_LENGTH_BYTES) { + throw new Error(this.errorPrefix_ + 'has a key path longer than ' + + ValidationPath.MAX_PATH_LENGTH_BYTES + ' bytes (' + + this.byteLength_ + ').'); + } + if (this.parts_.length > ValidationPath.MAX_PATH_DEPTH) { + throw new Error(this.errorPrefix_ + 'path specified exceeds the maximum depth that can be written (' + + ValidationPath.MAX_PATH_DEPTH + + ') or object contains a cycle ' + this.toErrorString()); + } + } + + /** + * String for use in error messages - uses '.' notation for path. + * + * @return {string} + */ + toErrorString() { + if (this.parts_.length == 0) { + return ''; + } + return 'in property \'' + this.parts_.join('.') + '\''; + } + +}; // end fb.core.util.validation.ValidationPath diff --git a/src/database/core/util/ServerValues.ts b/src/database/core/util/ServerValues.ts new file mode 100644 index 00000000000..89a9b52362b --- /dev/null +++ b/src/database/core/util/ServerValues.ts @@ -0,0 +1,87 @@ +import { assert } from "../../../utils/assert"; +import { Path } from "./Path"; +import { SparseSnapshotTree } from "../SparseSnapshotTree"; +import { LeafNode } from "../snap/LeafNode"; +import { nodeFromJSON } from "../snap/nodeFromJSON"; +import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; +/** + * Generate placeholders for deferred values. + * @param {?Object} values + * @return {!Object} + */ +export const generateWithValues = function(values) { + values = values || {}; + values['timestamp'] = values['timestamp'] || new Date().getTime(); + return values; +}; + + +/** + * Value to use when firing local events. When writing server values, fire + * local events with an approximate value, otherwise return value as-is. + * @param {(Object|string|number|boolean)} value + * @param {!Object} serverValues + * @return {!(string|number|boolean)} + */ +export const resolveDeferredValue = function(value, serverValues) { + if (!value || (typeof value !== 'object')) { + return /** @type {(string|number|boolean)} */ (value); + } else { + assert('.sv' in value, 'Unexpected leaf node or priority contents'); + return serverValues[value['.sv']]; + } +}; + + +/** + * Recursively replace all deferred values and priorities in the tree with the + * specified generated replacement values. + * @param {!SparseSnapshotTree} tree + * @param {!Object} serverValues + * @return {!SparseSnapshotTree} + */ +export const resolveDeferredValueTree = function(tree, serverValues) { + var resolvedTree = new SparseSnapshotTree(); + tree.forEachTree(new Path(''), function(path, node) { + resolvedTree.remember(path, resolveDeferredValueSnapshot(node, serverValues)); + }); + return resolvedTree; +}; + + +/** + * Recursively replace all deferred values and priorities in the node with the + * specified generated replacement values. If there are no server values in the node, + * it'll be returned as-is. + * @param {!fb.core.snap.Node} node + * @param {!Object} serverValues + * @return {!fb.core.snap.Node} + */ +export const resolveDeferredValueSnapshot = function(node, serverValues) { + var rawPri = /** @type {Object|boolean|null|number|string} */ (node.getPriority().val()), + priority = resolveDeferredValue(rawPri, serverValues), + newNode; + + if (node.isLeafNode()) { + var leafNode = /** @type {!LeafNode} */ (node); + var value = resolveDeferredValue(leafNode.getValue(), serverValues); + if (value !== leafNode.getValue() || priority !== leafNode.getPriority().val()) { + return new LeafNode(value, nodeFromJSON(priority)); + } else { + return node; + } + } else { + var childrenNode = /** @type {!fb.core.snap.ChildrenNode} */ (node); + newNode = childrenNode; + if (priority !== childrenNode.getPriority().val()) { + newNode = newNode.updatePriority(new LeafNode(priority)); + } + childrenNode.forEachChild(PRIORITY_INDEX, function(childName, childNode) { + var newChildNode = resolveDeferredValueSnapshot(childNode, serverValues); + if (newChildNode !== childNode) { + newNode = newNode.updateImmediateChild(childName, newChildNode); + } + }); + return newNode; + } +}; diff --git a/src/database/core/util/SortedMap.ts b/src/database/core/util/SortedMap.ts new file mode 100644 index 00000000000..55dbf5b4da8 --- /dev/null +++ b/src/database/core/util/SortedMap.ts @@ -0,0 +1,748 @@ +/** + * @fileoverview Implementation of an immutable SortedMap using a Left-leaning + * Red-Black Tree, adapted from the implementation in Mugs + * (http://mads379.github.com/mugs/) by Mads Hartmann Jensen + * (mads379@gmail.com). + * + * Original paper on Left-leaning Red-Black Trees: + * http://www.cs.princeton.edu/~rs/talks/LLRB/LLRB.pdf + * + * Invariant 1: No red node has a red child + * Invariant 2: Every leaf path has the same number of black nodes + * Invariant 3: Only the left child can be red (left leaning) + */ + + +// TODO: There are some improvements I'd like to make to improve memory / perf: +// * Create two prototypes, LLRedNode and LLBlackNode, instead of storing a +// color property in every node. +// TODO: It would also be good (and possibly necessary) to create a base +// interface for LLRBNode and LLRBEmptyNode. + + +/** + * An iterator over an LLRBNode. + */ +export class SortedMapIterator { + /** @private + * @type {?function(!K, !V): T} + */ + resultGenerator_; + isReverse_; + + /** @private + * @type {Array.} + */ + nodeStack_: Array; + + /** + * @template K, V, T + * @param {LLRBNode|LLRBEmptyNode} node Node to iterate. + * @param {?K} startKey + * @param {function(K, K): number} comparator + * @param {boolean} isReverse Whether or not to iterate in reverse + * @param {(function(K, V):T)=} opt_resultGenerator + */ + constructor(node, startKey, comparator, isReverse, opt_resultGenerator?) { + /** @private + * @type {?function(!K, !V): T} + */ + this.resultGenerator_ = opt_resultGenerator || null; + this.isReverse_ = isReverse; + + /** @private + * @type {Array.} + */ + this.nodeStack_ = []; + + var cmp = 1; + while (!node.isEmpty()) { + cmp = startKey ? comparator(node.key, startKey) : 1; + // flip the comparison if we're going in reverse + if (isReverse) cmp *= -1; + + if (cmp < 0) { + // This node is less than our start key. ignore it + if (this.isReverse_) { + node = node.left; + } else { + node = node.right; + } + } else if (cmp === 0) { + // This node is exactly equal to our start key. Push it on the stack, but stop iterating; + this.nodeStack_.push(node); + break; + } else { + // This node is greater than our start key, add it to the stack and move to the next one + this.nodeStack_.push(node); + if (this.isReverse_) { + node = node.right; + } else { + node = node.left; + } + } + } + } + + getNext() { + if (this.nodeStack_.length === 0) + return null; + + var node = this.nodeStack_.pop(), result; + if (this.resultGenerator_) + result = this.resultGenerator_(node.key, node.value); + else + result = {key: node.key, value: node.value}; + + if (this.isReverse_) { + node = node.left; + while (!node.isEmpty()) { + this.nodeStack_.push(node); + node = node.right; + } + } else { + node = node.right; + while (!node.isEmpty()) { + this.nodeStack_.push(node); + node = node.left; + } + } + + return result; + } + + hasNext() { + return this.nodeStack_.length > 0; + } + + peek() { + if (this.nodeStack_.length === 0) + return null; + + var node = this.nodeStack_[this.nodeStack_.length - 1]; + if (this.resultGenerator_) { + return this.resultGenerator_(node.key, node.value); + } else { + return { key: node.key, value: node.value }; + } + } +}; // end SortedMapIterator + + +/** + * Represents a node in a Left-leaning Red-Black tree. + */ +export class LLRBNode { + key; + value; + color; + left; + right; + + /** + * @template K, V + * @param {!K} key Key associated with this node. + * @param {!V} value Value associated with this node. + * @param {?boolean} color Whether this node is red. + * @param {?(LLRBNode|LLRBEmptyNode)=} opt_left Left child. + * @param {?(LLRBNode|LLRBEmptyNode)=} opt_right Right child. + */ + constructor(key, value, color, opt_left?, opt_right?) { + this.key = key; + this.value = value; + this.color = color != null ? color : LLRBNode.RED; + this.left = opt_left != null ? opt_left : SortedMap.EMPTY_NODE_; + this.right = opt_right != null ? opt_right : SortedMap.EMPTY_NODE_; + } + + static RED = true; + static BLACK = false; + + /** + * Returns a copy of the current node, optionally replacing pieces of it. + * + * @param {?K} key New key for the node, or null. + * @param {?V} value New value for the node, or null. + * @param {?boolean} color New color for the node, or null. + * @param {?LLRBNode|LLRBEmptyNode} left New left child for the node, or null. + * @param {?LLRBNode|LLRBEmptyNode} right New right child for the node, or null. + * @return {!LLRBNode} The node copy. + */ + copy(key, value, color, left, right) { + return new LLRBNode( + (key != null) ? key : this.key, + (value != null) ? value : this.value, + (color != null) ? color : this.color, + (left != null) ? left : this.left, + (right != null) ? right : this.right); + } + + /** + * @return {number} The total number of nodes in the tree. + */ + count() { + return this.left.count() + 1 + this.right.count(); + } + + /** + * @return {boolean} True if the tree is empty. + */ + isEmpty() { + return false; + } + + /** + * Traverses the tree in key order and calls the specified action function + * for each node. + * + * @param {function(!K, !V):*} action Callback function to be called for each + * node. If it returns true, traversal is aborted. + * @return {*} The first truthy value returned by action, or the last falsey + * value returned by action + */ + inorderTraversal(action) { + return this.left.inorderTraversal(action) || + action(this.key, this.value) || + this.right.inorderTraversal(action); + } + + /** + * Traverses the tree in reverse key order and calls the specified action function + * for each node. + * + * @param {function(!Object, !Object)} action Callback function to be called for each + * node. If it returns true, traversal is aborted. + * @return {*} True if traversal was aborted. + */ + reverseTraversal(action) { + return this.right.reverseTraversal(action) || + action(this.key, this.value) || + this.left.reverseTraversal(action); + } + + /** + * @return {!Object} The minimum node in the tree. + * @private + */ + min_() { + if (this.left.isEmpty()) { + return this; + } else { + return this.left.min_(); + } + } + + /** + * @return {!K} The maximum key in the tree. + */ + minKey() { + return this.min_().key; + } + + /** + * @return {!K} The maximum key in the tree. + */ + maxKey() { + if (this.right.isEmpty()) { + return this.key; + } else { + return this.right.maxKey(); + } + } + + /** + * + * @param {!Object} key Key to insert. + * @param {!Object} value Value to insert. + * @param {fb.Comparator} comparator Comparator. + * @return {!LLRBNode} New tree, with the key/value added. + */ + insert(key, value, comparator) { + var cmp, n; + n = this; + cmp = comparator(key, n.key); + if (cmp < 0) { + n = n.copy(null, null, null, n.left.insert(key, value, comparator), null); + } else if (cmp === 0) { + n = n.copy(null, value, null, null, null); + } else { + n = n.copy(null, null, null, null, n.right.insert(key, value, comparator)); + } + return n.fixUp_(); + } + + /** + * @private + * @return {!LLRBNode|LLRBEmptyNode} New tree, with the minimum key removed. + */ + removeMin_() { + var n; + if (this.left.isEmpty()) { + return SortedMap.EMPTY_NODE_; + } + n = this; + if (!n.left.isRed_() && !n.left.left.isRed_()) + n = n.moveRedLeft_(); + n = n.copy(null, null, null, n.left.removeMin_(), null); + return n.fixUp_(); + } + + /** + * @param {!Object} key The key of the item to remove. + * @param {fb.Comparator} comparator Comparator. + * @return {!LLRBNode|LLRBEmptyNode} New tree, with the specified item removed. + */ + remove(key, comparator) { + var n, smallest; + n = this; + if (comparator(key, n.key) < 0) { + if (!n.left.isEmpty() && !n.left.isRed_() && !n.left.left.isRed_()) { + n = n.moveRedLeft_(); + } + n = n.copy(null, null, null, n.left.remove(key, comparator), null); + } else { + if (n.left.isRed_()) n = n.rotateRight_(); + if (!n.right.isEmpty() && !n.right.isRed_() && !n.right.left.isRed_()) { + n = n.moveRedRight_(); + } + if (comparator(key, n.key) === 0) { + if (n.right.isEmpty()) { + return SortedMap.EMPTY_NODE_; + } else { + smallest = n.right.min_(); + n = n.copy(smallest.key, smallest.value, null, null, + n.right.removeMin_()); + } + } + n = n.copy(null, null, null, null, n.right.remove(key, comparator)); + } + return n.fixUp_(); + } + + /** + * @private + * @return {boolean} Whether this is a RED node. + */ + isRed_() { + return this.color; + } + + /** + * @private + * @return {!LLRBNode} New tree after performing any needed rotations. + */ + fixUp_() { + var n = this; + if (n.right.isRed_() && !n.left.isRed_()) n = n.rotateLeft_(); + if (n.left.isRed_() && n.left.left.isRed_()) n = n.rotateRight_(); + if (n.left.isRed_() && n.right.isRed_()) n = n.colorFlip_(); + return n; + } + + /** + * @private + * @return {!LLRBNode} New tree, after moveRedLeft. + */ + moveRedLeft_() { + var n = this.colorFlip_(); + if (n.right.left.isRed_()) { + n = n.copy(null, null, null, null, n.right.rotateRight_()); + n = n.rotateLeft_(); + n = n.colorFlip_(); + } + return n; + } + + /** + * @private + * @return {!LLRBNode} New tree, after moveRedRight. + */ + moveRedRight_() { + var n = this.colorFlip_(); + if (n.left.left.isRed_()) { + n = n.rotateRight_(); + n = n.colorFlip_(); + } + return n; + } + + /** + * @private + * @return {!LLRBNode} New tree, after rotateLeft. + */ + rotateLeft_() { + var nl; + nl = this.copy(null, null, LLRBNode.RED, null, this.right.left); + return this.right.copy(null, null, this.color, nl, null); + } + + /** + * @private + * @return {!LLRBNode} New tree, after rotateRight. + */ + rotateRight_() { + var nr; + nr = this.copy(null, null, LLRBNode.RED, this.left.right, null); + return this.left.copy(null, null, this.color, null, nr); + } + + /** + * @private + * @return {!LLRBNode} New tree, after colorFlip. + */ + colorFlip_() { + var left, right; + left = this.left.copy(null, null, !this.left.color, null, null); + right = this.right.copy(null, null, !this.right.color, null, null); + return this.copy(null, null, !this.color, left, right); + } + + /** + * For testing. + * + * @private + * @return {boolean} True if all is well. + */ + checkMaxDepth_() { + var blackDepth; + blackDepth = this.check_(); + if (Math.pow(2.0, blackDepth) <= this.count() + 1) { + return true; + } else { + return false; + } + } + + /** + * @private + * @return {number} Not sure what this returns exactly. :-). + */ + check_() { + var blackDepth; + if (this.isRed_() && this.left.isRed_()) { + throw new Error('Red node has red child(' + this.key + ',' + + this.value + ')'); + } + if (this.right.isRed_()) { + throw new Error('Right child of (' + this.key + ',' + + this.value + ') is red'); + } + blackDepth = this.left.check_(); + if (blackDepth !== this.right.check_()) { + throw new Error('Black depths differ'); + } else { + return blackDepth + (this.isRed_() ? 0 : 1); + } + } +}; // end LLRBNode + + +/** + * Represents an empty node (a leaf node in the Red-Black Tree). + */ +export class LLRBEmptyNode { + /** + * @template K, V + */ + constructor() {} + + /** + * Returns a copy of the current node. + * + * @return {!LLRBEmptyNode} The node copy. + */ + copy() { + return this; + } + + /** + * Returns a copy of the tree, with the specified key/value added. + * + * @param {!K} key Key to be added. + * @param {!V} value Value to be added. + * @param {fb.Comparator} comparator Comparator. + * @return {!LLRBNode} New tree, with item added. + */ + insert(key, value, comparator) { + return new LLRBNode(key, value, null); + } + + /** + * Returns a copy of the tree, with the specified key removed. + * + * @param {!K} key The key to remove. + * @return {!LLRBEmptyNode} New tree, with item removed. + */ + remove(key, comparator) { + return this; + } + + /** + * @return {number} The total number of nodes in the tree. + */ + count() { + return 0; + } + + /** + * @return {boolean} True if the tree is empty. + */ + isEmpty() { + return true; + } + + /** + * Traverses the tree in key order and calls the specified action function + * for each node. + * + * @param {function(!K, !V)} action Callback function to be called for each + * node. If it returns true, traversal is aborted. + * @return {boolean} True if traversal was aborted. + */ + inorderTraversal(action) { + return false; + } + + /** + * Traverses the tree in reverse key order and calls the specified action function + * for each node. + * + * @param {function(!K, !V)} action Callback function to be called for each + * node. If it returns true, traversal is aborted. + * @return {boolean} True if traversal was aborted. + */ + reverseTraversal(action) { + return false; + } + + /** + * @return {null} + */ + minKey() { + return null; + } + + /** + * @return {null} + */ + maxKey() { + return null; + } + + /** + * @private + * @return {number} Not sure what this returns exactly. :-). + */ + check_() { return 0; } + + /** + * @private + * @return {boolean} Whether this node is red. + */ + isRed_() { return false; } +}; // end LLRBEmptyNode + +/** + * An immutable sorted map implementation, based on a Left-leaning Red-Black + * tree. + */ +export class SortedMap { + /** @private */ + comparator_; + + /** @private */ + root_; + + /** + * Always use the same empty node, to reduce memory. + * @private + * @const + */ + static EMPTY_NODE_ = new LLRBEmptyNode(); + + /** + * @template K, V + * @param {function(K, K):number} comparator Key comparator. + * @param {LLRBNode=} opt_root (Optional) Root node for the map. + */ + constructor(comparator, opt_root?) { + /** @private */ + this.comparator_ = comparator; + + /** @private */ + this.root_ = opt_root ? opt_root : SortedMap.EMPTY_NODE_; + } + + /** + * Returns a copy of the map, with the specified key/value added or replaced. + * (TODO: We should perhaps rename this method to 'put') + * + * @param {!K} key Key to be added. + * @param {!V} value Value to be added. + * @return {!SortedMap.} New map, with item added. + */ + insert(key, value) { + return new SortedMap( + this.comparator_, + this.root_.insert(key, value, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null)); + } + + /** + * Returns a copy of the map, with the specified key removed. + * + * @param {!K} key The key to remove. + * @return {!SortedMap.} New map, with item removed. + */ + remove(key) { + return new SortedMap( + this.comparator_, + this.root_.remove(key, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null)); + } + + /** + * Returns the value of the node with the given key, or null. + * + * @param {!K} key The key to look up. + * @return {?V} The value of the node with the given key, or null if the + * key doesn't exist. + */ + get(key) { + var cmp; + var node = this.root_; + while (!node.isEmpty()) { + cmp = this.comparator_(key, node.key); + if (cmp === 0) { + return node.value; + } else if (cmp < 0) { + node = node.left; + } else if (cmp > 0) { + node = node.right; + } + } + return null; + } + + /** + * Returns the key of the item *before* the specified key, or null if key is the first item. + * @param {K} key The key to find the predecessor of + * @return {?K} The predecessor key. + */ + getPredecessorKey(key) { + var cmp, node = this.root_, rightParent = null; + while (!node.isEmpty()) { + cmp = this.comparator_(key, node.key); + if (cmp === 0) { + if (!node.left.isEmpty()) { + node = node.left; + while (!node.right.isEmpty()) + node = node.right; + return node.key; + } else if (rightParent) { + return rightParent.key; + } else { + return null; // first item. + } + } else if (cmp < 0) { + node = node.left; + } else if (cmp > 0) { + rightParent = node; + node = node.right; + } + } + + throw new Error('Attempted to find predecessor key for a nonexistent key. What gives?'); + } + + /** + * @return {boolean} True if the map is empty. + */ + isEmpty() { + return this.root_.isEmpty(); + } + + /** + * @return {number} The total number of nodes in the map. + */ + count() { + return this.root_.count(); + } + + /** + * @return {?K} The minimum key in the map. + */ + minKey() { + return this.root_.minKey(); + } + + /** + * @return {?K} The maximum key in the map. + */ + maxKey() { + return this.root_.maxKey(); + } + + /** + * Traverses the map in key order and calls the specified action function + * for each key/value pair. + * + * @param {function(!K, !V):*} action Callback function to be called + * for each key/value pair. If action returns true, traversal is aborted. + * @return {*} The first truthy value returned by action, or the last falsey + * value returned by action + */ + inorderTraversal(action) { + return this.root_.inorderTraversal(action); + } + + /** + * Traverses the map in reverse key order and calls the specified action function + * for each key/value pair. + * + * @param {function(!Object, !Object)} action Callback function to be called + * for each key/value pair. If action returns true, traversal is aborted. + * @return {*} True if the traversal was aborted. + */ + reverseTraversal(action) { + return this.root_.reverseTraversal(action); + } + + /** + * Returns an iterator over the SortedMap. + * @template T + * @param {(function(K, V):T)=} opt_resultGenerator + * @return {SortedMapIterator.} The iterator. + */ + getIterator(opt_resultGenerator) { + return new SortedMapIterator(this.root_, + null, + this.comparator_, + false, + opt_resultGenerator); + } + + getIteratorFrom(key, opt_resultGenerator) { + return new SortedMapIterator(this.root_, + key, + this.comparator_, + false, + opt_resultGenerator); + } + + getReverseIteratorFrom(key, opt_resultGenerator) { + return new SortedMapIterator(this.root_, + key, + this.comparator_, + true, + opt_resultGenerator); + } + + getReverseIterator(opt_resultGenerator) { + return new SortedMapIterator(this.root_, + null, + this.comparator_, + true, + opt_resultGenerator); + } +}; // end SortedMap \ No newline at end of file diff --git a/src/database/core/util/Tree.ts b/src/database/core/util/Tree.ts new file mode 100644 index 00000000000..75caba7ed7c --- /dev/null +++ b/src/database/core/util/Tree.ts @@ -0,0 +1,230 @@ +import { assert } from "../../../utils/assert"; +import { Path } from "./Path"; +import { forEach, contains, safeGet } from '../../../utils/obj' + +/** + * Node in a Tree. + */ +export class TreeNode { + children; + childCount; + value; + + constructor() { + // TODO: Consider making accessors that create children and value lazily or + // separate Internal / Leaf 'types'. + this.children = { }; + this.childCount = 0; + this.value = null; + } +}; // end TreeNode + + +/** + * A light-weight tree, traversable by path. Nodes can have both values and children. + * Nodes are not enumerated (by forEachChild) unless they have a value or non-empty + * children. + */ +export class Tree { + name_; + parent_; + node_; + + /** + * @template T + * @param {string=} opt_name Optional name of the node. + * @param {Tree=} opt_parent Optional parent node. + * @param {TreeNode=} opt_node Optional node to wrap. + */ + constructor(opt_name?, opt_parent?, opt_node?) { + this.name_ = opt_name ? opt_name : ''; + this.parent_ = opt_parent ? opt_parent : null; + this.node_ = opt_node ? opt_node : new TreeNode(); + } + + /** + * Returns a sub-Tree for the given path. + * + * @param {!(string|Path)} pathObj Path to look up. + * @return {!Tree.} Tree for path. + */ + subTree(pathObj) { + // TODO: Require pathObj to be Path? + var path = (pathObj instanceof Path) ? + pathObj : new Path(pathObj); + var child = this, next; + while ((next = path.getFront()) !== null) { + var childNode = safeGet(child.node_.children, next) || new TreeNode(); + child = new Tree(next, child, childNode); + path = path.popFront(); + } + + return child; + } + + /** + * Returns the data associated with this tree node. + * + * @return {?T} The data or null if no data exists. + */ + getValue() { + return this.node_.value; + } + + /** + * Sets data to this tree node. + * + * @param {!T} value Value to set. + */ + setValue(value) { + assert(typeof value !== 'undefined', 'Cannot set value to undefined'); + this.node_.value = value; + this.updateParents_(); + } + + /** + * Clears the contents of the tree node (its value and all children). + */ + clear() { + this.node_.value = null; + this.node_.children = { }; + this.node_.childCount = 0; + this.updateParents_(); + } + + /** + * @return {boolean} Whether the tree has any children. + */ + hasChildren() { + return this.node_.childCount > 0; + } + + /** + * @return {boolean} Whether the tree is empty (no value or children). + */ + isEmpty() { + return this.getValue() === null && !this.hasChildren(); + } + + /** + * Calls action for each child of this tree node. + * + * @param {function(!Tree.)} action Action to be called for each child. + */ + forEachChild(action) { + var self = this; + forEach(this.node_.children, function(child, childTree) { + action(new Tree(child, self, childTree)); + }); + } + + /** + * Does a depth-first traversal of this node's descendants, calling action for each one. + * + * @param {function(!Tree.)} action Action to be called for each child. + * @param {boolean=} opt_includeSelf Whether to call action on this node as well. Defaults to + * false. + * @param {boolean=} opt_childrenFirst Whether to call action on children before calling it on + * parent. + */ + forEachDescendant(action, opt_includeSelf, opt_childrenFirst) { + if (opt_includeSelf && !opt_childrenFirst) + action(this); + + this.forEachChild(function(child) { + child.forEachDescendant(action, /*opt_includeSelf=*/true, opt_childrenFirst); + }); + + if (opt_includeSelf && opt_childrenFirst) + action(this); + } + + /** + * Calls action on each ancestor node. + * + * @param {function(!Tree.)} action Action to be called on each parent; return + * true to abort. + * @param {boolean=} opt_includeSelf Whether to call action on this node as well. + * @return {boolean} true if the action callback returned true. + */ + forEachAncestor(action, opt_includeSelf) { + var node = opt_includeSelf ? this : this.parent(); + while (node !== null) { + if (action(node)) { + return true; + } + node = node.parent(); + } + return false; + } + + /** + * Does a depth-first traversal of this node's descendants. When a descendant with a value + * is found, action is called on it and traversal does not continue inside the node. + * Action is *not* called on this node. + * + * @param {function(!Tree.)} action Action to be called for each child. + */ + forEachImmediateDescendantWithValue(action) { + this.forEachChild(function(child) { + if (child.getValue() !== null) + action(child); + else + child.forEachImmediateDescendantWithValue(action); + }); + } + + /** + * @return {!Path} The path of this tree node, as a Path. + */ + path() { + return new Path(this.parent_ === null ? + this.name_ : this.parent_.path() + '/' + this.name_); + } + + /** + * @return {string} The name of the tree node. + */ + name() { + return this.name_; + } + + /** + * @return {?Tree} The parent tree node, or null if this is the root of the tree. + */ + parent() { + return this.parent_; + } + + /** + * Adds or removes this child from its parent based on whether it's empty or not. + * + * @private + */ + updateParents_() { + if (this.parent_ !== null) + this.parent_.updateChild_(this.name_, this); + } + + /** + * Adds or removes the passed child to this tree node, depending on whether it's empty. + * + * @param {string} childName The name of the child to update. + * @param {!Tree.} child The child to update. + * @private + */ + updateChild_(childName, child) { + var childEmpty = child.isEmpty(); + var childExists = contains(this.node_.children, childName); + if (childEmpty && childExists) { + delete (this.node_.children[childName]); + this.node_.childCount--; + this.updateParents_(); + } + else if (!childEmpty && !childExists) { + this.node_.children[childName] = child.node_; + this.node_.childCount++; + this.updateParents_(); + } + } +}; // end Tree diff --git a/src/database/core/util/VisibilityMonitor.ts b/src/database/core/util/VisibilityMonitor.ts new file mode 100644 index 00000000000..42d489650c0 --- /dev/null +++ b/src/database/core/util/VisibilityMonitor.ts @@ -0,0 +1,60 @@ +import { EventEmitter } from "./EventEmitter"; +import { assert } from "../../../utils/assert"; + +/** + * @extends {fb.core.util.EventEmitter} + */ +export class VisibilityMonitor extends EventEmitter { + visible_; + + static getInstance() { + return new VisibilityMonitor(); + } + + constructor() { + super(['visible']); + var hidden, visibilityChange; + if (typeof document !== 'undefined' && typeof document.addEventListener !== 'undefined') { + if (typeof document['hidden'] !== 'undefined') { + // Opera 12.10 and Firefox 18 and later support + visibilityChange = 'visibilitychange'; + hidden = 'hidden'; + } else if (typeof document['mozHidden'] !== 'undefined') { + visibilityChange = 'mozvisibilitychange'; + hidden = 'mozHidden'; + } else if (typeof document['msHidden'] !== 'undefined') { + visibilityChange = 'msvisibilitychange'; + hidden = 'msHidden'; + } else if (typeof document['webkitHidden'] !== 'undefined') { + visibilityChange = 'webkitvisibilitychange'; + hidden = 'webkitHidden'; + } + } + + // Initially, we always assume we are visible. This ensures that in browsers + // without page visibility support or in cases where we are never visible + // (e.g. chrome extension), we act as if we are visible, i.e. don't delay + // reconnects + this.visible_ = true; + + if (visibilityChange) { + var self = this; + document.addEventListener(visibilityChange, function() { + var visible = !document[hidden]; + if (visible !== self.visible_) { + self.visible_ = visible; + self.trigger('visible', visible); + } + }, false); + } + } + + /** + * @param {!string} eventType + * @return {Array.} + */ + getInitialEvent(eventType) { + assert(eventType === 'visible', 'Unknown event type: ' + eventType); + return [this.visible_]; + } +}; // end VisibilityMonitor \ No newline at end of file diff --git a/src/database/core/util/libs/parser.ts b/src/database/core/util/libs/parser.ts new file mode 100644 index 00000000000..593bc6e3826 --- /dev/null +++ b/src/database/core/util/libs/parser.ts @@ -0,0 +1,111 @@ +import { Path } from "../Path"; +import { RepoInfo } from "../../RepoInfo"; +import { warnIfPageIsSecure, fatal } from "../util"; + +/** + * @param {!string} pathString + * @return {string} + */ +function decodePath(pathString) { + var pathStringDecoded = ''; + var pieces = pathString.split('/'); + for (var i = 0; i < pieces.length; i++) { + if (pieces[i].length > 0) { + var piece = pieces[i]; + try { + piece = decodeURIComponent(piece.replace(/\+/g, " ")); + } catch (e) {} + pathStringDecoded += '/' + piece; + } + } + return pathStringDecoded; +}; + +/** + * + * @param {!string} dataURL + * @return {{repoInfo: !RepoInfo, path: !Path}} + */ +export const parseRepoInfo = function(dataURL) { + var parsedUrl = parseURL(dataURL), + namespace = parsedUrl.subdomain; + + if (parsedUrl.domain === 'firebase') { + fatal(parsedUrl.host + + ' is no longer supported. ' + + 'Please use .firebaseio.com instead'); + } + + // Catch common error of uninitialized namespace value. + if (!namespace || namespace == 'undefined') { + fatal('Cannot parse Firebase url. Please use https://.firebaseio.com'); + } + + if (!parsedUrl.secure) { + warnIfPageIsSecure(); + } + + var webSocketOnly = (parsedUrl.scheme === 'ws') || (parsedUrl.scheme === 'wss'); + + return { + repoInfo: new RepoInfo(parsedUrl.host, parsedUrl.secure, namespace, webSocketOnly), + path: new Path(parsedUrl.pathString) + }; +}; + +/** + * + * @param {!string} dataURL + * @return {{host: string, port: number, domain: string, subdomain: string, secure: boolean, scheme: string, pathString: string}} + */ +export const parseURL = function(dataURL) { + // Default to empty strings in the event of a malformed string. + var host = '', domain = '', subdomain = '', pathString = ''; + + // Always default to SSL, unless otherwise specified. + var secure = true, scheme = 'https', port = 443; + + // Don't do any validation here. The caller is responsible for validating the result of parsing. + if (typeof dataURL === 'string') { + // Parse scheme. + var colonInd = dataURL.indexOf('//'); + if (colonInd >= 0) { + scheme = dataURL.substring(0, colonInd - 1); + dataURL = dataURL.substring(colonInd + 2); + } + + // Parse host and path. + var slashInd = dataURL.indexOf('/'); + if (slashInd === -1) { + slashInd = dataURL.length; + } + host = dataURL.substring(0, slashInd); + pathString = decodePath(dataURL.substring(slashInd)); + + var parts = host.split('.'); + if (parts.length === 3) { + // Normalize namespaces to lowercase to share storage / connection. + domain = parts[1]; + subdomain = parts[0].toLowerCase(); + } else if (parts.length === 2) { + domain = parts[0]; + } + + // If we have a port, use scheme for determining if it's secure. + colonInd = host.indexOf(':'); + if (colonInd >= 0) { + secure = (scheme === 'https') || (scheme === 'wss'); + port = parseInt(host.substring(colonInd + 1), 10); + } + } + + return { + host: host, + port: port, + domain: domain, + subdomain: subdomain, + secure: secure, + scheme: scheme, + pathString: pathString + }; +}; \ No newline at end of file diff --git a/src/database/core/util/util.ts b/src/database/core/util/util.ts new file mode 100644 index 00000000000..2d64004305f --- /dev/null +++ b/src/database/core/util/util.ts @@ -0,0 +1,693 @@ +declare const Windows; + +import { assert } from '../../../utils/assert'; +import { forEach } from '../../../utils/obj'; +import { base64 } from '../../../utils/crypt'; +import { Sha1 } from '../../../utils/Sha1'; +import { + assert as _assert, + assertionError as _assertError +} from "../../../utils/assert"; +import { stringToByteArray } from "../../../utils/utf8"; +import { stringify } from "../../../utils/json"; +import { SessionStorage } from "../storage/storage"; +import { RepoInfo } from "../RepoInfo"; +import { isNodeSdk } from "../../../utils/environment"; + +/** + * Returns a locally-unique ID (generated by just incrementing up from 0 each time its called). + * @type {function(): number} Generated ID. + */ +export const LUIDGenerator = (function() { + var id = 1; + return function() { + return id++; + }; +})(); + +/** + * Same as fb.util.assert(), but forcefully logs instead of throws. + * @param {*} assertion The assertion to be tested for falsiness + * @param {!string} message The message to be logged on failure + */ +export const assertWeak = function(assertion, message) { + if (!assertion) { + error(message); + } +}; + + +/** + * URL-safe base64 encoding + * @param {!string} str + * @return {!string} + */ +export const base64Encode = function(str) { + var utf8Bytes = stringToByteArray(str); + return base64.encodeByteArray(utf8Bytes, /*useWebSafe=*/true); +}; + + +/** + * URL-safe base64 decoding + * + * NOTE: DO NOT use the global atob() function - it does NOT support the + * base64Url variant encoding. + * + * @param {string} str To be decoded + * @return {?string} Decoded result, if possible + */ +export const base64Decode = function(str) { + try { + if (isNodeSdk()) { + return new Buffer(str, 'base64').toString('utf8'); + } else { + return base64.decodeString(str, /*useWebSafe=*/true); + } + } catch (e) { + log('base64Decode failed: ', e); + } + return null; +}; + + +/** + * Sha1 hash of the input string + * @param {!string} str The string to hash + * @return {!string} The resulting hash + */ +export const sha1 = function(str) { + var utf8Bytes = stringToByteArray(str); + var sha1 = new Sha1(); + sha1.update(utf8Bytes); + var sha1Bytes = sha1.digest(); + return base64.encodeByteArray(sha1Bytes); +}; + + +/** + * @param {...*} var_args + * @return {string} + * @private + */ +export const buildLogMessage_ = function(var_args) { + var message = ''; + for (var i = 0; i < arguments.length; i++) { + if (Array.isArray(arguments[i]) || + (arguments[i] && typeof arguments[i] === 'object' && typeof arguments[i].length === 'number')) { + message += buildLogMessage_.apply(null, arguments[i]); + } + else if (typeof arguments[i] === 'object') { + message += stringify(arguments[i]); + } + else { + message += arguments[i]; + } + message += ' '; + } + + return message; +}; + + +/** + * Use this for all debug messages in Firebase. + * @type {?function(string)} + */ +export var logger = console.log.bind(console); + + +/** + * Flag to check for log availability on first log message + * @type {boolean} + * @private + */ +export var firstLog_ = true; + + +/** + * The implementation of Firebase.enableLogging (defined here to break dependencies) + * @param {boolean|?function(string)} logger A flag to turn on logging, or a custom logger + * @param {boolean=} opt_persistent Whether or not to persist logging settings across refreshes + */ +export const enableLogging = function(logger, opt_persistent?) { + assert(!opt_persistent || (logger === true || logger === false), "Can't turn on custom loggers persistently."); + if (logger === true) { + if (typeof console !== 'undefined') { + if (typeof console.log === 'function') { + logger = console.log.bind(console); + } else if (typeof console.log === 'object') { + // IE does this. + logger = function(message) { console.log(message); }; + } + } + if (opt_persistent) + SessionStorage.set('logging_enabled', true); + } + else if (typeof logger === 'function') { + logger = logger; + } else { + logger = null; + SessionStorage.remove('logging_enabled'); + } +}; + + +/** + * + * @param {...(string|Arguments)} var_args + */ +export const log = function(...var_args) { + if (firstLog_ === true) { + firstLog_ = false; + if (logger === null && SessionStorage.get('logging_enabled') === true) + enableLogging(true); + } + + if (logger) { + var message = buildLogMessage_.apply(null, arguments); + logger(message); + } +}; + + +/** + * @param {!string} prefix + * @return {function(...[*])} + */ +export const logWrapper = function(prefix) { + return function() { + log(prefix, arguments); + }; +}; + + +/** + * @param {...string} var_args + */ +export const error = function(var_args) { + if (typeof console !== 'undefined') { + var message = 'FIREBASE INTERNAL ERROR: ' + + buildLogMessage_.apply(null, arguments); + if (typeof console.error !== 'undefined') { + console.error(message); + } else { + console.log(message); + } + } +}; + + +/** + * @param {...string} var_args + */ +export const fatal = function(var_args) { + var message = buildLogMessage_.apply(null, arguments); + throw new Error('FIREBASE FATAL ERROR: ' + message); +}; + + +/** + * @param {...*} var_args + */ +export const warn = function(...var_args) { + if (typeof console !== 'undefined') { + var message = 'FIREBASE WARNING: ' + buildLogMessage_.apply(null, arguments); + if (typeof console.warn !== 'undefined') { + console.warn(message); + } else { + console.log(message); + } + } +}; + + +/** + * Logs a warning if the containing page uses https. Called when a call to new Firebase + * does not use https. + */ +export const warnIfPageIsSecure = function() { + // Be very careful accessing browser globals. Who knows what may or may not exist. + if (typeof window !== 'undefined' && window.location && window.location.protocol && + window.location.protocol.indexOf('https:') !== -1) { + warn('Insecure Firebase access from a secure page. ' + + 'Please use https in calls to new Firebase().'); + } +}; + + +/** + * @param {!String} methodName + */ +export const warnAboutUnsupportedMethod = function(methodName) { + warn(methodName + + ' is unsupported and will likely change soon. ' + + 'Please do not use.'); +}; + + +/** + * Returns true if data is NaN, or +/- Infinity. + * @param {*} data + * @return {boolean} + */ +export const isInvalidJSONNumber = function(data) { + return typeof data === 'number' && + (data != data || // NaN + data == Number.POSITIVE_INFINITY || + data == Number.NEGATIVE_INFINITY); +}; + + +/** + * @param {function()} fn + */ +export const executeWhenDOMReady = function(fn) { + if (isNodeSdk() || document.readyState === 'complete') { + fn(); + } else { + // Modeled after jQuery. Try DOMContentLoaded and onreadystatechange (which + // fire before onload), but fall back to onload. + + var called = false; + let wrappedFn = function() { + if (!document.body) { + setTimeout(wrappedFn, Math.floor(10)); + return; + } + + if (!called) { + called = true; + fn(); + } + }; + + if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', wrappedFn, false); + // fallback to onload. + window.addEventListener('load', wrappedFn, false); + } else if ((document as any).attachEvent) { + // IE. + (document as any).attachEvent('onreadystatechange', + function() { + if (document.readyState === 'complete') + wrappedFn(); + } + ); + // fallback to onload. + (window as any).attachEvent('onload', wrappedFn); + + // jQuery has an extra hack for IE that we could employ (based on + // http://javascript.nwbox.com/IEContentLoaded/) But it looks really old. + // I'm hoping we don't need it. + } + } +}; + + +/** + * Minimum key name. Invalid for actual data, used as a marker to sort before any valid names + * @type {!string} + */ +export const MIN_NAME = '[MIN_NAME]'; + + +/** + * Maximum key name. Invalid for actual data, used as a marker to sort above any valid names + * @type {!string} + */ +export const MAX_NAME = '[MAX_NAME]'; + + +/** + * Compares valid Firebase key names, plus min and max name + * @param {!string} a + * @param {!string} b + * @return {!number} + */ +export const nameCompare = function(a, b) { + if (a === b) { + return 0; + } else if (a === MIN_NAME || b === MAX_NAME) { + return -1; + } else if (b === MIN_NAME || a === MAX_NAME) { + return 1; + } else { + var aAsInt = tryParseInt(a), + bAsInt = tryParseInt(b); + + if (aAsInt !== null) { + if (bAsInt !== null) { + return (aAsInt - bAsInt) == 0 ? (a.length - b.length) : (aAsInt - bAsInt); + } else { + return -1; + } + } else if (bAsInt !== null) { + return 1; + } else { + return (a < b) ? -1 : 1; + } + } +}; + + +/** + * @param {!string} a + * @param {!string} b + * @return {!number} comparison result. + */ +export const stringCompare = function(a, b) { + if (a === b) { + return 0; + } else if (a < b) { + return -1; + } else { + return 1; + } +}; + + +/** + * @param {string} key + * @param {Object} obj + * @return {*} + */ +export const requireKey = function(key, obj) { + if (obj && (key in obj)) { + return obj[key]; + } else { + throw new Error('Missing required key (' + key + ') in object: ' + stringify(obj)); + } +}; + + +/** + * @param {*} obj + * @return {string} + */ +export const ObjectToUniqueKey = function(obj) { + if (typeof obj !== 'object' || obj === null) + return stringify(obj); + + var keys = []; + for (var k in obj) { + keys.push(k); + } + + // Export as json, but with the keys sorted. + keys.sort(); + var key = '{'; + for (var i = 0; i < keys.length; i++) { + if (i !== 0) + key += ','; + key += stringify(keys[i]); + key += ':'; + key += ObjectToUniqueKey(obj[keys[i]]); + } + + key += '}'; + return key; +}; + + +/** + * Splits a string into a number of smaller segments of maximum size + * @param {!string} str The string + * @param {!number} segsize The maximum number of chars in the string. + * @return {Array.} The string, split into appropriately-sized chunks + */ +export const splitStringBySize = function(str, segsize) { + if (str.length <= segsize) { + return [str]; + } + + var dataSegs = []; + for (var c = 0; c < str.length; c += segsize) { + if (c + segsize > str) { + dataSegs.push(str.substring(c, str.length)); + } + else { + dataSegs.push(str.substring(c, c + segsize)); + } + } + return dataSegs; +}; + + +/** + * Apply a function to each (key, value) pair in an object or + * apply a function to each (index, value) pair in an array + * @param {!(Object|Array)} obj The object or array to iterate over + * @param {function(?, ?)} fn The function to apply + */ +export const each = function(obj, fn) { + if (Array.isArray(obj)) { + for (var i = 0; i < obj.length; ++i) { + fn(i, obj[i]); + } + } else { + /** + * in the conversion of code we removed the goog.object.forEach + * function which did a value,key callback. We standardized on + * a single impl that does a key, value callback. So we invert + * to not have to touch the `each` code points + */ + forEach(obj, (key, val) => fn(val, key)); + } +}; + + +/** + * Like goog.bind, but doesn't bother to create a closure if opt_context is null/undefined. + * @param {function(*)} callback Callback function. + * @param {?Object=} opt_context Optional context to bind to. + * @return {function(*)} + */ +export const bindCallback = function(callback, opt_context) { + return opt_context ? callback.bind(opt_context) : callback; +}; + + +/** + * Borrowed from http://hg.secondlife.com/llsd/src/tip/js/typedarray.js (MIT License) + * I made one modification at the end and removed the NaN / Infinity + * handling (since it seemed broken [caused an overflow] and we don't need it). See MJL comments. + * @param {!number} v A double + * @return {string} + */ +export const doubleToIEEE754String = function(v) { + assert(!isInvalidJSONNumber(v), 'Invalid JSON number'); // MJL + + var ebits = 11, fbits = 52; + var bias = (1 << (ebits - 1)) - 1, + s, e, f, ln, + i, bits, str, bytes; + + // Compute sign, exponent, fraction + // Skip NaN / Infinity handling --MJL. + if (v === 0) { + e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0; + } + else { + s = v < 0; + v = Math.abs(v); + + if (v >= Math.pow(2, 1 - bias)) { + // Normalized + ln = Math.min(Math.floor(Math.log(v) / Math.LN2), bias); + e = ln + bias; + f = Math.round(v * Math.pow(2, fbits - ln) - Math.pow(2, fbits)); + } + else { + // Denormalized + e = 0; + f = Math.round(v / Math.pow(2, 1 - bias - fbits)); + } + } + + // Pack sign, exponent, fraction + bits = []; + for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = Math.floor(f / 2); } + for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = Math.floor(e / 2); } + bits.push(s ? 1 : 0); + bits.reverse(); + str = bits.join(''); + + // Return the data as a hex string. --MJL + var hexByteString = ''; + for (i = 0; i < 64; i += 8) { + var hexByte = parseInt(str.substr(i, 8), 2).toString(16); + if (hexByte.length === 1) + hexByte = '0' + hexByte; + hexByteString = hexByteString + hexByte; + } + return hexByteString.toLowerCase(); +}; + + +/** + * Used to detect if we're in a Chrome content script (which executes in an + * isolated environment where long-polling doesn't work). + * @return {boolean} + */ +export const isChromeExtensionContentScript = function() { + return !!(typeof window === 'object' && + window['chrome'] && + window['chrome']['extension'] && + !/^chrome/.test(window.location.href) + ); +}; + + +/** + * Used to detect if we're in a Windows 8 Store app. + * @return {boolean} + */ +export const isWindowsStoreApp = function() { + // Check for the presence of a couple WinRT globals + return typeof Windows === 'object' && typeof Windows.UI === 'object'; +}; + + +/** + * Converts a server error code to a Javascript Error + * @param {!string} code + * @param {!fb.api.Query} query + * @return {Error} + */ +export const errorForServerCode = function(code, query) { + var reason = 'Unknown Error'; + if (code === 'too_big') { + reason = 'The data requested exceeds the maximum size ' + + 'that can be accessed with a single request.'; + } else if (code == 'permission_denied') { + reason = "Client doesn't have permission to access the desired data."; + } else if (code == 'unavailable') { + reason = 'The service is unavailable'; + } + + var error = new Error(code + ' at ' + query.path.toString() + ': ' + reason); + (error as any).code = code.toUpperCase(); + return error; +}; + + +/** + * Used to test for integer-looking strings + * @type {RegExp} + * @private + */ +export const INTEGER_REGEXP_ = new RegExp('^-?\\d{1,10}$'); + + +/** + * If the string contains a 32-bit integer, return it. Else return null. + * @param {!string} str + * @return {?number} + */ +export const tryParseInt = function(str) { + if (INTEGER_REGEXP_.test(str)) { + var intVal = Number(str); + if (intVal >= -2147483648 && intVal <= 2147483647) { + return intVal; + } + } + return null; +}; + + +/** + * Helper to run some code but catch any exceptions and re-throw them later. + * Useful for preventing user callbacks from breaking internal code. + * + * Re-throwing the exception from a setTimeout is a little evil, but it's very + * convenient (we don't have to try to figure out when is a safe point to + * re-throw it), and the behavior seems reasonable: + * + * * If you aren't pausing on exceptions, you get an error in the console with + * the correct stack trace. + * * If you're pausing on all exceptions, the debugger will pause on your + * exception and then again when we rethrow it. + * * If you're only pausing on uncaught exceptions, the debugger will only pause + * on us re-throwing it. + * + * @param {!function()} fn The code to guard. + */ +export const exceptionGuard = function(fn) { + try { + fn(); + } catch (e) { + // Re-throw exception when it's safe. + setTimeout(function() { + // It used to be that "throw e" would result in a good console error with + // relevant context, but as of Chrome 39, you just get the firebase.js + // file/line number where we re-throw it, which is useless. So we log + // e.stack explicitly. + var stack = e.stack || ''; + warn('Exception was thrown by user callback.', stack); + throw e; + }, Math.floor(0)); + } +}; + + +/** + * Helper function to safely call opt_callback with the specified arguments. It: + * 1. Turns into a no-op if opt_callback is null or undefined. + * 2. Wraps the call inside exceptionGuard to prevent exceptions from breaking our state. + * + * @param {?Function=} opt_callback Optional onComplete callback. + * @param {...*} var_args Arbitrary args to be passed to opt_onComplete + */ +export const callUserCallback = function(opt_callback, var_args) { + if (typeof opt_callback === 'function') { + var args = Array.prototype.slice.call(arguments, 1); + var newArgs = args.slice(); + exceptionGuard(function() { + opt_callback.apply(null, newArgs); + }); + } +}; + + +/** + * @return {boolean} true if we think we're currently being crawled. +*/ +export const beingCrawled = function() { + var userAgent = (typeof window === 'object' && window['navigator'] && window['navigator']['userAgent']) || ''; + + // For now we whitelist the most popular crawlers. We should refine this to be the set of crawlers we + // believe to support JavaScript/AJAX rendering. + // NOTE: Google Webmaster Tools doesn't really belong, but their "This is how a visitor to your website + // would have seen the page" is flaky if we don't treat it as a crawler. + return userAgent.search(/googlebot|google webmaster tools|bingbot|yahoo! slurp|baiduspider|yandexbot|duckduckbot/i) >= + 0; +}; + +/** + * Export a property of an object using a getter function. + * + * @param {!Object} object + * @param {string} name + * @param {!function(): *} fnGet + */ +export const exportPropGetter = function(object, name, fnGet) { + Object.defineProperty(object, name, {get: fnGet}); +}; + +/** + * Same as setTimeout() except on Node.JS it will /not/ prevent the process from exiting. + * + * It is removed with clearTimeout() as normal. + * + * @param fn {Function} Function to run. + * @param time {number} Milliseconds to wait before running. + * @return {number|Object} The setTimeout() return value. + */ +export const setTimeoutNonBlocking = function(fn, time) { + var timeout = setTimeout(fn, time); + if (typeof timeout === 'object' && timeout['unref']) { + timeout['unref'](); + } + return timeout; +}; diff --git a/src/database/core/util/validation.ts b/src/database/core/util/validation.ts new file mode 100644 index 00000000000..73305e471d0 --- /dev/null +++ b/src/database/core/util/validation.ts @@ -0,0 +1,380 @@ +import { Path, ValidationPath } from "./Path"; +import { forEach, contains, safeGet } from "../../../utils/obj"; +import { isInvalidJSONNumber } from "./util"; +import { errorPrefix as errorPrefixFxn } from "../../../utils/validation"; +import { stringLength } from "../../../utils/utf8"; + +/** + * True for invalid Firebase keys + * @type {RegExp} + * @private + */ +export const INVALID_KEY_REGEX_ = /[\[\].#$\/\u0000-\u001F\u007F]/; + +/** + * True for invalid Firebase paths. + * Allows '/' in paths. + * @type {RegExp} + * @private + */ +export const INVALID_PATH_REGEX_ = /[\[\].#$\u0000-\u001F\u007F]/; + +/** + * Maximum number of characters to allow in leaf value + * @type {number} + * @private + */ +export const MAX_LEAF_SIZE_ = 10 * 1024 * 1024; + + +/** + * @param {*} key + * @return {boolean} + */ +export const isValidKey = function(key) { + return typeof key === 'string' && key.length !== 0 && + !INVALID_KEY_REGEX_.test(key); +} + +/** + * @param {string} pathString + * @return {boolean} + */ +export const isValidPathString = function(pathString) { + return typeof pathString === 'string' && pathString.length !== 0 && + !INVALID_PATH_REGEX_.test(pathString); +} + +/** + * @param {string} pathString + * @return {boolean} + */ +export const isValidRootPathString = function(pathString) { + if (pathString) { + // Allow '/.info/' at the beginning. + pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); + } + + return isValidPathString(pathString); +} + +/** + * @param {*} priority + * @return {boolean} + */ +export const isValidPriority = function(priority) { + return priority === null || + typeof priority === 'string' || + (typeof priority === 'number' && !isInvalidJSONNumber(priority)) || + ((priority && typeof priority === 'object') && contains(priority, '.sv')); +} + +/** + * Pre-validate a datum passed as an argument to Firebase function. + * + * @param {string} fnName + * @param {number} argumentNumber + * @param {*} data + * @param {!Path} path + * @param {boolean} optional + */ +export const validateFirebaseDataArg = function(fnName, argumentNumber, data, path, optional) { + if (optional && data === undefined) + return; + + validateFirebaseData( + errorPrefixFxn(fnName, argumentNumber, optional), + data, path + ); +} + +/** + * Validate a data object client-side before sending to server. + * + * @param {string} errorPrefix + * @param {*} data + * @param {!Path|!ValidationPath} path + */ +export const validateFirebaseData = function(errorPrefix, data, path) { + if (path instanceof Path) { + path = new ValidationPath(path, errorPrefix); + } + + if (data === undefined) { + throw new Error(errorPrefix + 'contains undefined ' + path.toErrorString()); + } + if (typeof data === 'function') { + throw new Error(errorPrefix + 'contains a function ' + path.toErrorString() + + ' with contents = ' + data.toString()); + } + if (isInvalidJSONNumber(data)) { + throw new Error(errorPrefix + 'contains ' + data.toString() + ' ' + path.toErrorString()); + } + + // Check max leaf size, but try to avoid the utf8 conversion if we can. + if (typeof data === 'string' && + data.length > MAX_LEAF_SIZE_ / 3 && + stringLength(data) > MAX_LEAF_SIZE_) { + throw new Error(errorPrefix + 'contains a string greater than ' + + MAX_LEAF_SIZE_ + + ' utf8 bytes ' + path.toErrorString() + + " ('" + data.substring(0, 50) + "...')"); + } + + // TODO = Perf = Consider combining the recursive validation of keys into NodeFromJSON + // to save extra walking of large objects. + if ((data && typeof data === 'object')) { + var hasDotValue = false, hasActualChild = false; + forEach(data, function(key, value) { + if (key === '.value') { + hasDotValue = true; + } + else if (key !== '.priority' && key !== '.sv') { + hasActualChild = true; + if (!isValidKey(key)) { + throw new Error(errorPrefix + ' contains an invalid key (' + key + ') ' + + path.toErrorString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + } + } + + path.push(key); + validateFirebaseData(errorPrefix, value, path); + path.pop(); + }); + + if (hasDotValue && hasActualChild) { + throw new Error(errorPrefix + ' contains ".value" child ' + + path.toErrorString() + + ' in addition to actual children.'); + } + } +} + +/** + * Pre-validate paths passed in the firebase function. + * + * @param {string} errorPrefix + * @param {Array} mergePaths + */ +export const validateFirebaseMergePaths = function(errorPrefix, mergePaths) { + var i, curPath; + for (i = 0; i < mergePaths.length; i++) { + curPath = mergePaths[i]; + var keys = curPath.slice(); + for (var j = 0; j < keys.length; j++) { + if (keys[j] === '.priority' && j === (keys.length - 1)) { + // .priority is OK + } else if (!isValidKey(keys[j])) { + throw new Error(errorPrefix + 'contains an invalid key (' + keys[j] + ') in path ' + + curPath.toString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + } + } + } + + // Check that update keys are not descendants of each other. + // We rely on the property that sorting guarantees that ancestors come + // right before descendants. + mergePaths.sort(Path.comparePaths); + var prevPath = null; + for (i = 0; i < mergePaths.length; i++) { + curPath = mergePaths[i]; + if (prevPath !== null && prevPath.contains(curPath)) { + throw new Error(errorPrefix + 'contains a path ' + prevPath.toString() + + ' that is ancestor of another path ' + curPath.toString()); + } + prevPath = curPath; + } +} + +/** + * pre-validate an object passed as an argument to firebase function ( + * must be an object - e.g. for firebase.update()). + * + * @param {string} fnName + * @param {number} argumentNumber + * @param {*} data + * @param {!Path} path + * @param {boolean} optional + */ +export const validateFirebaseMergeDataArg = function(fnName, argumentNumber, data, path, optional) { + if (optional && data === undefined) + return; + + var errorPrefix = errorPrefixFxn(fnName, argumentNumber, optional); + + if (!(data && typeof data === 'object') || Array.isArray(data)) { + throw new Error(errorPrefix + ' must be an object containing the children to replace.'); + } + + var mergePaths = []; + forEach(data, function(key, value) { + var curPath = new Path(key); + validateFirebaseData(errorPrefix, value, path.child(curPath)); + if (curPath.getBack() === '.priority') { + if (!isValidPriority(value)) { + throw new Error( + errorPrefix + 'contains an invalid value for \'' + curPath.toString() + '\', which must be a valid ' + + 'Firebase priority (a string, finite number, server value, or null).'); + } + } + mergePaths.push(curPath); + }); + validateFirebaseMergePaths(errorPrefix, mergePaths); +} + +export const validatePriority = function(fnName, argumentNumber, priority, optional) { + if (optional && priority === undefined) + return; + if (isInvalidJSONNumber(priority)) + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'is ' + priority.toString() + + ', but must be a valid Firebase priority (a string, finite number, ' + + 'server value, or null).'); + // Special case to allow importing data with a .sv. + if (!isValidPriority(priority)) + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid Firebase priority ' + + '(a string, finite number, server value, or null).'); +} + +export const validateEventType = function(fnName, argumentNumber, eventType, optional) { + if (optional && eventType === undefined) + return; + + switch (eventType) { + case 'value': + case 'child_added': + case 'child_removed': + case 'child_changed': + case 'child_moved': + break; + default: + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid event type = "value", "child_added", "child_removed", ' + + '"child_changed", or "child_moved".'); + } +} + +export const validateKey = function(fnName, argumentNumber, key, optional) { + if (optional && key === undefined) + return; + if (!isValidKey(key)) + throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + + 'was an invalid key = "' + key + + '". Firebase keys must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "/", "[", or "]").'); +} + +export const validatePathString = function(fnName, argumentNumber, pathString, optional) { + if (optional && pathString === undefined) + return; + + if (!isValidPathString(pathString)) + throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + + 'was an invalid path = "' + + pathString + + '". Paths must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "[", or "]"'); +} + +export const validateRootPathString = function(fnName, argumentNumber, pathString, optional) { + if (pathString) { + // Allow '/.info/' at the beginning. + pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); + } + + validatePathString(fnName, argumentNumber, pathString, optional); +} + +export const validateWritablePath = function(fnName, path) { + if (path.getFront() === '.info') { + throw new Error(fnName + ' failed = Can\'t modify data under /.info/'); + } +} + +export const validateUrl = function(fnName, argumentNumber, parsedUrl) { + // TODO = Validate server better. + var pathString = parsedUrl.path.toString(); + if (!(typeof parsedUrl.repoInfo.host === 'string') || parsedUrl.repoInfo.host.length === 0 || + !isValidKey(parsedUrl.repoInfo.namespace) || + (pathString.length !== 0 && !isValidRootPathString(pathString))) { + throw new Error(errorPrefixFxn(fnName, argumentNumber, false) + + 'must be a valid firebase URL and ' + + 'the path can\'t contain ".", "#", "$", "[", or "]".'); + } +} + +export const validateCredential = function(fnName, argumentNumber, cred, optional) { + if (optional && cred === undefined) + return; + if (!(typeof cred === 'string')) + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid credential (a string).'); +} + +export const validateBoolean = function(fnName, argumentNumber, bool, optional) { + if (optional && bool === undefined) + return; + if (typeof bool !== 'boolean') + throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a boolean.'); +} + +export const validateString = function(fnName, argumentNumber, string, optional) { + if (optional && string === undefined) + return; + if (!(typeof string === 'string')) { + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid string.'); + } +} + +export const validateObject = function(fnName, argumentNumber, obj, optional) { + if (optional && obj === undefined) + return; + if (!(obj && typeof obj === 'object') || obj === null) { + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid object.'); + } +} + +export const validateObjectContainsKey = function(fnName, argumentNumber, obj, key, optional, opt_type) { + var objectContainsKey = ((obj && typeof obj === 'object') && contains(obj, key)); + + if (!objectContainsKey) { + if (optional) { + return; + } else { + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must contain the key "' + key + '"'); + } + } + + if (opt_type) { + var val = safeGet(obj, key); + if ((opt_type === 'number' && !(typeof val === 'number')) || + (opt_type === 'string' && !(typeof val === 'string')) || + (opt_type === 'boolean' && !(typeof val === 'boolean')) || + (opt_type === 'function' && !(typeof val === 'function')) || + (opt_type === 'object' && !(typeof val === 'object') && val)) { + if (optional) { + throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + + 'contains invalid value for key "' + key + '" (must be of type "' + opt_type + '")'); + } else { + throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + + 'must contain the key "' + key + '" with type "' + opt_type + '"'); + } + } + } +} diff --git a/src/database/core/view/CacheNode.ts b/src/database/core/view/CacheNode.ts new file mode 100644 index 00000000000..db24e74a7c7 --- /dev/null +++ b/src/database/core/view/CacheNode.ts @@ -0,0 +1,66 @@ +import { Node } from '../snap/Node'; +import { Path } from '../util/Path'; + +/** + * A cache node only stores complete children. Additionally it holds a flag whether the node can be considered fully + * initialized in the sense that we know at one point in time this represented a valid state of the world, e.g. + * initialized with data from the server, or a complete overwrite by the client. The filtered flag also tracks + * whether a node potentially had children removed due to a filter. + */ +export class CacheNode { + /** + * @param {!Node} node_ + * @param {boolean} fullyInitialized_ + * @param {boolean} filtered_ + */ + constructor(private node_: Node, + private fullyInitialized_: boolean, + private filtered_: boolean) { + + } + + /** + * Returns whether this node was fully initialized with either server data or a complete overwrite by the client + * @return {boolean} + */ + isFullyInitialized(): boolean { + return this.fullyInitialized_; + } + + /** + * Returns whether this node is potentially missing children due to a filter applied to the node + * @return {boolean} + */ + isFiltered(): boolean { + return this.filtered_; + } + + /** + * @param {!Path} path + * @return {boolean} + */ + isCompleteForPath(path: Path): boolean { + if (path.isEmpty()) { + return this.isFullyInitialized() && !this.filtered_; + } + + const childKey = path.getFront(); + return this.isCompleteForChild(childKey); + } + + /** + * @param {!string} key + * @return {boolean} + */ + isCompleteForChild(key: string): boolean { + return (this.isFullyInitialized() && !this.filtered_) || this.node_.hasChild(key); + } + + /** + * @return {!Node} + */ + getNode(): Node { + return this.node_; + } + +} diff --git a/src/database/core/view/Change.ts b/src/database/core/view/Change.ts new file mode 100644 index 00000000000..8f9396849a4 --- /dev/null +++ b/src/database/core/view/Change.ts @@ -0,0 +1,81 @@ +import { Node } from '../snap/Node'; + +/** + * @constructor + * @struct + * @param {!string} type The event type + * @param {!Node} snapshotNode The data + * @param {string=} childName The name for this child, if it's a child event + * @param {Node=} oldSnap Used for intermediate processing of child changed events + * @param {string=} prevName The name for the previous child, if applicable + */ +export class Change { + constructor(public type: string, + public snapshotNode: Node, + public childName?: string, + public oldSnap?: Node, + public prevName?: string) { + }; + + /** + * @param {!Node} snapshot + * @return {!Change} + */ + static valueChange(snapshot: Node): Change { + return new Change(Change.VALUE, snapshot); + }; + + /** + * @param {string} childKey + * @param {!Node} snapshot + * @return {!Change} + */ + static childAddedChange(childKey: string, snapshot: Node): Change { + return new Change(Change.CHILD_ADDED, snapshot, childKey); + }; + + /** + * @param {string} childKey + * @param {!Node} snapshot + * @return {!Change} + */ + static childRemovedChange(childKey: string, snapshot: Node): Change { + return new Change(Change.CHILD_REMOVED, snapshot, childKey); + }; + + /** + * @param {string} childKey + * @param {!Node} newSnapshot + * @param {!Node} oldSnapshot + * @return {!Change} + */ + static childChangedChange(childKey: string, newSnapshot: Node, oldSnapshot: Node): Change { + return new Change(Change.CHILD_CHANGED, newSnapshot, childKey, oldSnapshot); + }; + + /** + * @param {string} childKey + * @param {!Node} snapshot + * @return {!Change} + */ + static childMovedChange(childKey: string, snapshot: Node): Change { + return new Change(Change.CHILD_MOVED, snapshot, childKey); + }; + + //event types + /** Event type for a child added */ + static CHILD_ADDED = 'child_added'; + + /** Event type for a child removed */ + static CHILD_REMOVED = 'child_removed'; + + /** Event type for a child changed */ + static CHILD_CHANGED = 'child_changed'; + + /** Event type for a child moved */ + static CHILD_MOVED = 'child_moved'; + + /** Event type for a value change */ + static VALUE = 'value'; +} + diff --git a/src/database/core/view/ChildChangeAccumulator.ts b/src/database/core/view/ChildChangeAccumulator.ts new file mode 100644 index 00000000000..ae082bf2c2f --- /dev/null +++ b/src/database/core/view/ChildChangeAccumulator.ts @@ -0,0 +1,53 @@ +import { getValues, safeGet } from '../../../utils/obj'; +import { Change } from "./Change"; +import { assert, assertionError } from "../../../utils/assert"; + +/** + * @constructor + */ +export class ChildChangeAccumulator { + changeMap_ = {}; + + /** + * @param {!Change} change + */ + trackChildChange(change: Change) { + const type = change.type; + const childKey = /** @type {!string} */ (change.childName); + assert(type == Change.CHILD_ADDED || + type == Change.CHILD_CHANGED || + type == Change.CHILD_REMOVED, 'Only child changes supported for tracking'); + assert(childKey !== '.priority', 'Only non-priority child changes can be tracked.'); + const oldChange = safeGet(this.changeMap_, childKey); + if (oldChange) { + const oldType = oldChange.type; + if (type == Change.CHILD_ADDED && oldType == Change.CHILD_REMOVED) { + this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, oldChange.snapshotNode); + } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_ADDED) { + delete this.changeMap_[childKey]; + } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_CHANGED) { + this.changeMap_[childKey] = Change.childRemovedChange(childKey, + /** @type {!Node} */ (oldChange.oldSnap)); + } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_ADDED) { + this.changeMap_[childKey] = Change.childAddedChange(childKey, change.snapshotNode); + } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_CHANGED) { + this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, + /** @type {!Node} */ (oldChange.oldSnap)); + } else { + throw assertionError('Illegal combination of changes: ' + change + ' occurred after ' + oldChange); + } + } else { + this.changeMap_[childKey] = change; + } + }; + + + /** + * @return {!Array.} + */ + getChanges(): Change[] { + return getValues(this.changeMap_); + }; +} + + diff --git a/src/database/core/view/CompleteChildSource.ts b/src/database/core/view/CompleteChildSource.ts new file mode 100644 index 00000000000..18f5602614d --- /dev/null +++ b/src/database/core/view/CompleteChildSource.ts @@ -0,0 +1,110 @@ +import { CacheNode } from './CacheNode'; +import { NamedNode, Node } from '../snap/Node'; +import { Index } from '../snap/indexes/Index'; +import { WriteTreeRef } from '../WriteTree'; +import { ViewCache } from './ViewCache'; + +/** + * Since updates to filtered nodes might require nodes to be pulled in from "outside" the node, this interface + * can help to get complete children that can be pulled in. + * A class implementing this interface takes potentially multiple sources (e.g. user writes, server data from + * other views etc.) to try it's best to get a complete child that might be useful in pulling into the view. + * + * @interface + */ +export interface CompleteChildSource { + /** + * @param {!string} childKey + * @return {?Node} + */ + getCompleteChild(childKey: string): Node | null; + + /** + * @param {!Index} index + * @param {!NamedNode} child + * @param {boolean} reverse + * @return {?NamedNode} + */ + getChildAfterChild(index: Index, child: NamedNode, reverse: boolean): NamedNode | null; +} + + +/** + * An implementation of CompleteChildSource that never returns any additional children + * + * @private + * @constructor + * @implements CompleteChildSource + */ +export class NoCompleteChildSource_ implements CompleteChildSource { + + /** + * @inheritDoc + */ + getCompleteChild() { + return null; + } + + /** + * @inheritDoc + */ + getChildAfterChild() { + return null; + } +} + + +/** + * Singleton instance. + * @const + * @type {!CompleteChildSource} + */ +export const NO_COMPLETE_CHILD_SOURCE = new NoCompleteChildSource_(); + + +/** + * An implementation of CompleteChildSource that uses a WriteTree in addition to any other server data or + * old event caches available to calculate complete children. + * + * + * @implements CompleteChildSource + */ +export class WriteTreeCompleteChildSource implements CompleteChildSource { + /** + * @param {!WriteTreeRef} writes_ + * @param {!ViewCache} viewCache_ + * @param {?Node} optCompleteServerCache_ + */ + constructor(private writes_: WriteTreeRef, + private viewCache_: ViewCache, + private optCompleteServerCache_: Node | null = null) { + } + + /** + * @inheritDoc + */ + getCompleteChild(childKey) { + const node = this.viewCache_.getEventCache(); + if (node.isCompleteForChild(childKey)) { + return node.getNode().getImmediateChild(childKey); + } else { + const serverNode = this.optCompleteServerCache_ != null ? + new CacheNode(this.optCompleteServerCache_, true, false) : this.viewCache_.getServerCache(); + return this.writes_.calcCompleteChild(childKey, serverNode); + } + } + + /** + * @inheritDoc + */ + getChildAfterChild(index, child, reverse) { + const completeServerData = this.optCompleteServerCache_ != null ? this.optCompleteServerCache_ : + this.viewCache_.getCompleteServerSnap(); + const nodes = this.writes_.calcIndexedSlice(completeServerData, child, 1, reverse, index); + if (nodes.length === 0) { + return null; + } else { + return nodes[0]; + } + } +} diff --git a/src/database/core/view/Event.ts b/src/database/core/view/Event.ts new file mode 100644 index 00000000000..a18729e1538 --- /dev/null +++ b/src/database/core/view/Event.ts @@ -0,0 +1,124 @@ +import { stringify } from '../../../utils/json'; +import { Path } from '../util/Path'; +import { EventRegistration } from './EventRegistration'; +import { DataSnapshot } from '../../api/DataSnapshot'; + +/** + * Encapsulates the data needed to raise an event + * @interface + */ +export interface Event { + /** + * @return {!Path} + */ + getPath(): Path; + + /** + * @return {!string} + */ + getEventType(): string; + + /** + * @return {!function()} + */ + getEventRunner(): () => void; + + /** + * @return {!string} + */ + toString(): string; +} + + +/** + * Encapsulates the data needed to raise an event + * @implements {Event} + */ +export class DataEvent implements Event { + /** + * @param {!string} eventType One of: value, child_added, child_changed, child_moved, child_removed + * @param {!EventRegistration} eventRegistration The function to call to with the event data. User provided + * @param {!DataSnapshot} snapshot The data backing the event + * @param {?string=} prevName Optional, the name of the previous child for child_* events. + */ + constructor(public eventType: 'value' | ' child_added' | ' child_changed' | ' child_moved' | ' child_removed', + public eventRegistration: EventRegistration, + public snapshot: DataSnapshot, + public prevName?: string | null) { + } + + /** + * @inheritDoc + */ + getPath(): Path { + const ref = this.snapshot.getRef(); + if (this.eventType === 'value') { + return ref.path; + } else { + return ref.getParent().path; + } + } + + /** + * @inheritDoc + */ + getEventType(): string { + return this.eventType; + } + + /** + * @inheritDoc + */ + getEventRunner(): () => void { + return this.eventRegistration.getEventRunner(this); + } + + /** + * @inheritDoc + */ + toString(): string { + return this.getPath().toString() + ':' + this.eventType + ':' + + stringify(this.snapshot.exportVal()); + } +} + + +export class CancelEvent implements Event { + /** + * @param {EventRegistration} eventRegistration + * @param {Error} error + * @param {!Path} path + */ + constructor(public eventRegistration: EventRegistration, + public error: Error, + public path: Path) { + } + + /** + * @inheritDoc + */ + getPath(): Path { + return this.path; + } + + /** + * @inheritDoc + */ + getEventType(): string { + return 'cancel'; + } + + /** + * @inheritDoc + */ + getEventRunner(): () => any { + return this.eventRegistration.getEventRunner(this); + } + + /** + * @inheritDoc + */ + toString(): string { + return this.path.toString() + ':cancel'; + } +} diff --git a/src/database/core/view/EventGenerator.ts b/src/database/core/view/EventGenerator.ts new file mode 100644 index 00000000000..9ef6061a4a5 --- /dev/null +++ b/src/database/core/view/EventGenerator.ts @@ -0,0 +1,117 @@ +import { NamedNode, Node } from '../snap/Node'; +import { Change } from "./Change"; +import { assertionError } from "../../../utils/assert"; +import { Query } from '../../api/Query'; +import { Index } from '../snap/indexes/Index'; +import { EventRegistration } from './EventRegistration'; +import { Event } from './Event'; + +/** + * An EventGenerator is used to convert "raw" changes (Change) as computed by the + * CacheDiffer into actual events (Event) that can be raised. See generateEventsForChanges() + * for details. + * + * @param {!Query} query + * @constructor + */ +export class EventGenerator { + private index_: Index; + + constructor(private query_: Query) { + /** + * @private + * @type {!Index} + */ + this.index_ = this.query_.getQueryParams().getIndex(); + } + + /** + * Given a set of raw changes (no moved events and prevName not specified yet), and a set of + * EventRegistrations that should be notified of these changes, generate the actual events to be raised. + * + * Notes: + * - child_moved events will be synthesized at this time for any child_changed events that affect + * our index. + * - prevName will be calculated based on the index ordering. + * + * @param {!Array.} changes + * @param {!Node} eventCache + * @param {!Array.} eventRegistrations + * @return {!Array.} + */ + generateEventsForChanges(changes: Change[], eventCache: Node, eventRegistrations: EventRegistration[]): Event[] { + const events = []; + const moves = []; + + changes.forEach((change) => { + if (change.type === Change.CHILD_CHANGED && + this.index_.indexedValueChanged(/** @type {!Node} */ (change.oldSnap), change.snapshotNode)) { + moves.push(Change.childMovedChange(/** @type {!string} */ (change.childName), change.snapshotNode)); + } + }); + + this.generateEventsForType_(events, Change.CHILD_REMOVED, changes, eventRegistrations, eventCache); + this.generateEventsForType_(events, Change.CHILD_ADDED, changes, eventRegistrations, eventCache); + this.generateEventsForType_(events, Change.CHILD_MOVED, moves, eventRegistrations, eventCache); + this.generateEventsForType_(events, Change.CHILD_CHANGED, changes, eventRegistrations, eventCache); + this.generateEventsForType_(events, Change.VALUE, changes, eventRegistrations, eventCache); + + return events; + } + + /** + * Given changes of a single change type, generate the corresponding events. + * + * @param {!Array.} events + * @param {!string} eventType + * @param {!Array.} changes + * @param {!Array.} registrations + * @param {!Node} eventCache + * @private + */ + private generateEventsForType_(events: Event[], eventType: string, changes: Change[], + registrations: EventRegistration[], eventCache: Node) { + const filteredChanges = changes.filter((change) => change.type === eventType); + + filteredChanges.sort(this.compareChanges_.bind(this)); + filteredChanges.forEach((change) => { + const materializedChange = this.materializeSingleChange_(change, eventCache); + registrations.forEach((registration) => { + if (registration.respondsTo(change.type)) { + events.push(registration.createEvent(materializedChange, this.query_)); + } + }); + }); + } + + /** + * @param {!Change} change + * @param {!Node} eventCache + * @return {!Change} + * @private + */ + private materializeSingleChange_(change: Change, eventCache: Node): Change { + if (change.type === 'value' || change.type === 'child_removed') { + return change; + } else { + change.prevName = eventCache.getPredecessorChildName(/** @type {!string} */ (change.childName), change.snapshotNode, + this.index_); + return change; + } + } + + /** + * @param {!Change} a + * @param {!Change} b + * @return {number} + * @private + */ + private compareChanges_(a: Change, b: Change) { + if (a.childName == null || b.childName == null) { + throw assertionError('Should only compare child_ events.'); + } + const aWrapped = new NamedNode(a.childName, a.snapshotNode); + const bWrapped = new NamedNode(b.childName, b.snapshotNode); + return this.index_.compare(aWrapped, bWrapped); + } +} diff --git a/src/database/core/view/EventQueue.ts b/src/database/core/view/EventQueue.ts new file mode 100644 index 00000000000..2fd51ddb91a --- /dev/null +++ b/src/database/core/view/EventQueue.ts @@ -0,0 +1,165 @@ +import { Path } from '../util/Path'; +import { log, logger, exceptionGuard } from '../util/util'; +import { Event } from './Event'; + +/** + * The event queue serves a few purposes: + * 1. It ensures we maintain event order in the face of event callbacks doing operations that result in more + * events being queued. + * 2. raiseQueuedEvents() handles being called reentrantly nicely. That is, if in the course of raising events, + * raiseQueuedEvents() is called again, the "inner" call will pick up raising events where the "outer" call + * left off, ensuring that the events are still raised synchronously and in order. + * 3. You can use raiseEventsAtPath and raiseEventsForChangedPath to ensure only relevant previously-queued + * events are raised synchronously. + * + * NOTE: This can all go away if/when we move to async events. + * + * @constructor + */ +export class EventQueue { + /** + * @private + * @type {!Array.} + */ + private eventLists_: EventList[] = []; + + /** + * Tracks recursion depth of raiseQueuedEvents_, for debugging purposes. + * @private + * @type {!number} + */ + private recursionDepth_ = 0; + + + /** + * @param {!Array.} eventDataList The new events to queue. + */ + queueEvents(eventDataList: Event[]) { + // We group events by path, storing them in a single EventList, to make it easier to skip over them quickly. + let currList = null; + for (let i = 0; i < eventDataList.length; i++) { + const eventData = eventDataList[i]; + const eventPath = eventData.getPath(); + if (currList !== null && !eventPath.equals(currList.getPath())) { + this.eventLists_.push(currList); + currList = null; + } + + if (currList === null) { + currList = new EventList(eventPath); + } + + currList.add(eventData); + } + if (currList) { + this.eventLists_.push(currList); + } + } + + /** + * Queues the specified events and synchronously raises all events (including previously queued ones) + * for the specified path. + * + * It is assumed that the new events are all for the specified path. + * + * @param {!Path} path The path to raise events for. + * @param {!Array.} eventDataList The new events to raise. + */ + raiseEventsAtPath(path: Path, eventDataList: Event[]) { + this.queueEvents(eventDataList); + this.raiseQueuedEventsMatchingPredicate_((eventPath: Path) => eventPath.equals(path)); + } + + /** + * Queues the specified events and synchronously raises all events (including previously queued ones) for + * locations related to the specified change path (i.e. all ancestors and descendants). + * + * It is assumed that the new events are all related (ancestor or descendant) to the specified path. + * + * @param {!Path} changedPath The path to raise events for. + * @param {!Array.} eventDataList The events to raise + */ + raiseEventsForChangedPath(changedPath: Path, eventDataList: Event[]) { + this.queueEvents(eventDataList); + + this.raiseQueuedEventsMatchingPredicate_((eventPath: Path) => { + return eventPath.contains(changedPath) || changedPath.contains(eventPath); + }); + }; + + /** + * @param {!function(!Path):boolean} predicate + * @private + */ + private raiseQueuedEventsMatchingPredicate_(predicate: (path: Path) => boolean) { + this.recursionDepth_++; + + let sentAll = true; + for (let i = 0; i < this.eventLists_.length; i++) { + const eventList = this.eventLists_[i]; + if (eventList) { + const eventPath = eventList.getPath(); + if (predicate(eventPath)) { + this.eventLists_[i].raise(); + this.eventLists_[i] = null; + } else { + sentAll = false; + } + } + } + + if (sentAll) { + this.eventLists_ = []; + } + + this.recursionDepth_--; + } +} + + +/** + * @param {!Path} path + * @constructor + */ +export class EventList { + /** + * @type {!Array.} + * @private + */ + private events_: Event[] = []; + + constructor(private readonly path_: Path) { + } + + /** + * @param {!Event} eventData + */ + add(eventData: Event) { + this.events_.push(eventData); + } + + /** + * Iterates through the list and raises each event + */ + raise() { + for (let i = 0; i < this.events_.length; i++) { + const eventData = this.events_[i]; + if (eventData !== null) { + this.events_[i] = null; + const eventFn = eventData.getEventRunner(); + if (logger) { + log('event: ' + eventData.toString()); + } + exceptionGuard(eventFn); + } + } + } + + /** + * @return {!Path} + */ + getPath(): Path { + return this.path_; + } +} + diff --git a/src/database/core/view/EventRegistration.ts b/src/database/core/view/EventRegistration.ts new file mode 100644 index 00000000000..39febc024f1 --- /dev/null +++ b/src/database/core/view/EventRegistration.ts @@ -0,0 +1,260 @@ +import { DataSnapshot } from '../../api/DataSnapshot'; +import { DataEvent, CancelEvent, Event } from './Event'; +import { contains, getCount, getAnyKey, every } from '../../../utils/obj'; +import { assert } from '../../../utils/assert'; +import { Path } from '../util/Path'; +import { Change } from './Change'; +import { Query } from '../../api/Query'; + +/** + * An EventRegistration is basically an event type ('value', 'child_added', etc.) and a callback + * to be notified of that type of event. + * + * That said, it can also contain a cancel callback to be notified if the event is canceled. And + * currently, this code is organized around the idea that you would register multiple child_ callbacks + * together, as a single EventRegistration. Though currently we don't do that. + */ +export interface EventRegistration { + /** + * True if this container has a callback to trigger for this event type + * @param {!string} eventType + * @return {boolean} + */ + respondsTo(eventType: string): boolean; + + /** + * @param {!Change} change + * @param {!Query} query + * @return {!Event} + */ + createEvent(change: Change, query: Query): Event; + + /** + * Given event data, return a function to trigger the user's callback + * @param {!Event} eventData + * @return {function()} + */ + getEventRunner(eventData: Event): () => any; + + /** + * @param {!Error} error + * @param {!Path} path + * @return {?CancelEvent} + */ + createCancelEvent(error: Error, path: Path): CancelEvent | null; + + /** + * @param {!EventRegistration} other + * @return {boolean} + */ + matches(other: EventRegistration): boolean; + + /** + * False basically means this is a "dummy" callback container being used as a sentinel + * to remove all callback containers of a particular type. (e.g. if the user does + * ref.off('value') without specifying a specific callback). + * + * (TODO: Rework this, since it's hacky) + * + * @return {boolean} + */ + hasAnyCallback(): boolean; +} + + +/** + * Represents registration for 'value' events. + */ +export class ValueEventRegistration implements EventRegistration { + /** + * @param {?function(!DataSnapshot)} callback_ + * @param {?function(Error)} cancelCallback_ + * @param {?Object} context_ + */ + constructor(private callback_: ((d: DataSnapshot) => any) | null, + private cancelCallback_: ((e: Error) => any) | null, + private context_: Object | null) { + } + + /** + * @inheritDoc + */ + respondsTo(eventType: string): boolean { + return eventType === 'value'; + } + + /** + * @inheritDoc + */ + createEvent(change: Change, query: Query): DataEvent { + const index = query.getQueryParams().getIndex(); + return new DataEvent('value', this, new DataSnapshot(change.snapshotNode, query.getRef(), index)); + } + + /** + * @inheritDoc + */ + getEventRunner(eventData: CancelEvent | DataEvent): () => void { + const ctx = this.context_; + if (eventData.getEventType() === 'cancel') { + assert(this.cancelCallback_, 'Raising a cancel event on a listener with no cancel callback'); + const cancelCB = this.cancelCallback_; + return function () { + // We know that error exists, we checked above that this is a cancel event + cancelCB.call(ctx, (eventData).error); + }; + } else { + const cb = this.callback_; + return function () { + cb.call(ctx, (eventData).snapshot); + }; + } + } + + /** + * @inheritDoc + */ + createCancelEvent(error: Error, path: Path): CancelEvent | null { + if (this.cancelCallback_) { + return new CancelEvent(this, error, path); + } else { + return null; + } + } + + /** + * @inheritDoc + */ + matches(other: EventRegistration): boolean { + if (!(other instanceof ValueEventRegistration)) { + return false; + } else if (!other.callback_ || !this.callback_) { + // If no callback specified, we consider it to match any callback. + return true; + } else { + return other.callback_ === this.callback_ && other.context_ === this.context_; + } + } + + /** + * @inheritDoc + */ + hasAnyCallback(): boolean { + return this.callback_ !== null; + } +} + +/** + * Represents the registration of 1 or more child_xxx events. + * + * Currently, it is always exactly 1 child_xxx event, but the idea is we might let you + * register a group of callbacks together in the future. + * + * @constructor + * @implements {EventRegistration} + */ +export class ChildEventRegistration implements EventRegistration { + /** + * @param {?Object.} callbacks_ + * @param {?function(Error)} cancelCallback_ + * @param {Object=} context_ + */ + constructor(private callbacks_: ({ [k: string]: (d: DataSnapshot, s?: string | null) => any }) | null, + private cancelCallback_: ((e: Error) => any) | null, + private context_: Object) { + } + + /** + * @inheritDoc + */ + respondsTo(eventType): boolean { + let eventToCheck = eventType === 'children_added' ? 'child_added' : eventType; + eventToCheck = eventToCheck === 'children_removed' ? 'child_removed' : eventToCheck; + return contains(this.callbacks_, eventToCheck); + } + + /** + * @inheritDoc + */ + createCancelEvent(error: Error, path: Path): CancelEvent | null { + if (this.cancelCallback_) { + return new CancelEvent(this, error, path); + } else { + return null; + } + } + + /** + * @inheritDoc + */ + createEvent(change: Change, query: Query): DataEvent { + assert(change.childName != null, 'Child events should have a childName.'); + const ref = query.getRef().child(/** @type {!string} */ (change.childName)); + const index = query.getQueryParams().getIndex(); + return new DataEvent(change.type, this, new DataSnapshot(change.snapshotNode, ref, index), + change.prevName); + } + + /** + * @inheritDoc + */ + getEventRunner(eventData: CancelEvent | DataEvent): () => void { + const ctx = this.context_; + if (eventData.getEventType() === 'cancel') { + assert(this.cancelCallback_, 'Raising a cancel event on a listener with no cancel callback'); + const cancelCB = this.cancelCallback_; + return function () { + // We know that error exists, we checked above that this is a cancel event + cancelCB.call(ctx, (eventData).error); + }; + } else { + const cb = this.callbacks_[(eventData).eventType]; + return function () { + cb.call(ctx, (eventData).snapshot, (eventData).prevName); + } + } + } + + /** + * @inheritDoc + */ + matches(other: EventRegistration): boolean { + if (other instanceof ChildEventRegistration) { + if (!this.callbacks_ || !other.callbacks_) { + return true; + } else if (this.context_ === other.context_) { + const otherCount = getCount(other.callbacks_); + const thisCount = getCount(this.callbacks_); + if (otherCount === thisCount) { + // If count is 1, do an exact match on eventType, if either is defined but null, it's a match. + // If event types don't match, not a match + // If count is not 1, exact match across all + + if (otherCount === 1) { + const otherKey = /** @type {!string} */ (getAnyKey(other.callbacks_)); + const thisKey = /** @type {!string} */ (getAnyKey(this.callbacks_)); + return (thisKey === otherKey && ( + !other.callbacks_[otherKey] || + !this.callbacks_[thisKey] || + other.callbacks_[otherKey] === this.callbacks_[thisKey] + ) + ); + } else { + // Exact match on each key. + return every(this.callbacks_, (eventType, cb) => other.callbacks_[eventType] === cb); + } + } + } + } + + return false; + } + + /** + * @inheritDoc + */ + hasAnyCallback(): boolean { + return (this.callbacks_ !== null); + } +} + diff --git a/src/database/core/view/QueryParams.ts b/src/database/core/view/QueryParams.ts new file mode 100644 index 00000000000..a2c8b320a65 --- /dev/null +++ b/src/database/core/view/QueryParams.ts @@ -0,0 +1,425 @@ +import { assert } from "../../../utils/assert"; +import { + MIN_NAME, + MAX_NAME +} from "../util/util"; +import { KEY_INDEX } from "../snap/indexes/KeyIndex"; +import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; +import { VALUE_INDEX } from "../snap/indexes/ValueIndex"; +import { PathIndex } from "../snap/indexes/PathIndex"; +import { IndexedFilter } from "./filter/IndexedFilter"; +import { LimitedFilter } from "./filter/LimitedFilter"; +import { RangedFilter } from "./filter/RangedFilter"; +import { stringify } from "../../../utils/json"; + +/** + * This class is an immutable-from-the-public-api struct containing a set of query parameters defining a + * range to be returned for a particular location. It is assumed that validation of parameters is done at the + * user-facing API level, so it is not done here. + * @constructor + */ +export class QueryParams { + endNameSet_ + endSet_ + index_ + indexEndName_ + indexEndValue_ + indexStartName_ + indexStartValue_ + limit_ + limitSet_ + startEndSet_ + startNameSet_ + startSet_ + viewFrom_ + + constructor() { + this.limitSet_ = false; + this.startSet_ = false; + this.startNameSet_ = false; + this.endSet_ = false; + this.endNameSet_ = false; + + this.limit_ = 0; + this.viewFrom_ = ''; + this.indexStartValue_ = null; + this.indexStartName_ = ''; + this.indexEndValue_ = null; + this.indexEndName_ = ''; + + this.index_ = PRIORITY_INDEX; + }; + /** + * Wire Protocol Constants + * @const + * @enum {string} + * @private + */ + private static WIRE_PROTOCOL_CONSTANTS_ = { + INDEX_START_VALUE: 'sp', + INDEX_START_NAME: 'sn', + INDEX_END_VALUE: 'ep', + INDEX_END_NAME: 'en', + LIMIT: 'l', + VIEW_FROM: 'vf', + VIEW_FROM_LEFT: 'l', + VIEW_FROM_RIGHT: 'r', + INDEX: 'i' + }; + + /** + * REST Query Constants + * @const + * @enum {string} + * @private + */ + private static REST_QUERY_CONSTANTS_ = { + ORDER_BY: 'orderBy', + PRIORITY_INDEX: '$priority', + VALUE_INDEX: '$value', + KEY_INDEX: '$key', + START_AT: 'startAt', + END_AT: 'endAt', + LIMIT_TO_FIRST: 'limitToFirst', + LIMIT_TO_LAST: 'limitToLast' + }; + + /** + * Default, empty query parameters + * @type {!QueryParams} + * @const + */ + static DEFAULT = new QueryParams(); + + /** + * @return {boolean} + */ + hasStart() { + return this.startSet_; + }; + + /** + * @return {boolean} True if it would return from left. + */ + isViewFromLeft() { + if (this.viewFrom_ === '') { + // limit(), rather than limitToFirst or limitToLast was called. + // This means that only one of startSet_ and endSet_ is true. Use them + // to calculate which side of the view to anchor to. If neither is set, + // anchor to the end. + return this.startSet_; + } else { + return this.viewFrom_ === QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT; + } + }; + + /** + * Only valid to call if hasStart() returns true + * @return {*} + */ + getIndexStartValue() { + assert(this.startSet_, 'Only valid if start has been set'); + return this.indexStartValue_; + }; + + /** + * Only valid to call if hasStart() returns true. + * Returns the starting key name for the range defined by these query parameters + * @return {!string} + */ + getIndexStartName() { + assert(this.startSet_, 'Only valid if start has been set'); + if (this.startNameSet_) { + return this.indexStartName_; + } else { + return MIN_NAME; + } + }; + + /** + * @return {boolean} + */ + hasEnd() { + return this.endSet_; + }; + + /** + * Only valid to call if hasEnd() returns true. + * @return {*} + */ + getIndexEndValue() { + assert(this.endSet_, 'Only valid if end has been set'); + return this.indexEndValue_; + }; + + /** + * Only valid to call if hasEnd() returns true. + * Returns the end key name for the range defined by these query parameters + * @return {!string} + */ + getIndexEndName() { + assert(this.endSet_, 'Only valid if end has been set'); + if (this.endNameSet_) { + return this.indexEndName_; + } else { + return MAX_NAME; + } + }; + + /** + * @return {boolean} + */ + hasLimit() { + return this.limitSet_; + }; + + /** + * @return {boolean} True if a limit has been set and it has been explicitly anchored + */ + hasAnchoredLimit() { + return this.limitSet_ && this.viewFrom_ !== ''; + }; + + /** + * Only valid to call if hasLimit() returns true + * @return {!number} + */ + getLimit() { + assert(this.limitSet_, 'Only valid if limit has been set'); + return this.limit_; + }; + + /** + * @return {!Index} + */ + getIndex() { + return this.index_; + }; + + /** + * @return {!QueryParams} + * @private + */ + copy_() { + var copy = new QueryParams(); + copy.limitSet_ = this.limitSet_; + copy.limit_ = this.limit_; + copy.startSet_ = this.startSet_; + copy.indexStartValue_ = this.indexStartValue_; + copy.startNameSet_ = this.startNameSet_; + copy.indexStartName_ = this.indexStartName_; + copy.endSet_ = this.endSet_; + copy.indexEndValue_ = this.indexEndValue_; + copy.endNameSet_ = this.endNameSet_; + copy.indexEndName_ = this.indexEndName_; + copy.index_ = this.index_; + copy.viewFrom_ = this.viewFrom_; + return copy; + }; + + /** + * @param {!number} newLimit + * @return {!QueryParams} + */ + limit(newLimit) { + var newParams = this.copy_(); + newParams.limitSet_ = true; + newParams.limit_ = newLimit; + newParams.viewFrom_ = ''; + return newParams; + }; + + /** + * @param {!number} newLimit + * @return {!QueryParams} + */ + limitToFirst(newLimit) { + var newParams = this.copy_(); + newParams.limitSet_ = true; + newParams.limit_ = newLimit; + newParams.viewFrom_ = QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT; + return newParams; + }; + + /** + * @param {!number} newLimit + * @return {!QueryParams} + */ + limitToLast(newLimit) { + var newParams = this.copy_(); + newParams.limitSet_ = true; + newParams.limit_ = newLimit; + newParams.viewFrom_ = QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_RIGHT; + return newParams; + }; + + /** + * @param {*} indexValue + * @param {?string=} key + * @return {!QueryParams} + */ + startAt(indexValue, key) { + var newParams = this.copy_(); + newParams.startSet_ = true; + if (!(indexValue !== undefined)) { + indexValue = null; + } + newParams.indexStartValue_ = indexValue; + if (key != null) { + newParams.startNameSet_ = true; + newParams.indexStartName_ = key; + } else { + newParams.startNameSet_ = false; + newParams.indexStartName_ = ''; + } + return newParams; + }; + + /** + * @param {*} indexValue + * @param {?string=} key + * @return {!QueryParams} + */ + endAt(indexValue, key) { + var newParams = this.copy_(); + newParams.endSet_ = true; + if (!(indexValue !== undefined)) { + indexValue = null; + } + newParams.indexEndValue_ = indexValue; + if ((key !== undefined)) { + newParams.endNameSet_ = true; + newParams.indexEndName_ = key; + } else { + newParams.startEndSet_ = false; + newParams.indexEndName_ = ''; + } + return newParams; + }; + + /** + * @param {!Index} index + * @return {!QueryParams} + */ + orderBy(index) { + var newParams = this.copy_(); + newParams.index_ = index; + return newParams; + }; + + /** + * @return {!Object} + */ + getQueryObject() { + var WIRE_PROTOCOL_CONSTANTS = QueryParams.WIRE_PROTOCOL_CONSTANTS_; + var obj = {}; + if (this.startSet_) { + obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_VALUE] = this.indexStartValue_; + if (this.startNameSet_) { + obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_NAME] = this.indexStartName_; + } + } + if (this.endSet_) { + obj[WIRE_PROTOCOL_CONSTANTS.INDEX_END_VALUE] = this.indexEndValue_; + if (this.endNameSet_) { + obj[WIRE_PROTOCOL_CONSTANTS.INDEX_END_NAME] = this.indexEndName_; + } + } + if (this.limitSet_) { + obj[WIRE_PROTOCOL_CONSTANTS.LIMIT] = this.limit_; + var viewFrom = this.viewFrom_; + if (viewFrom === '') { + if (this.isViewFromLeft()) { + viewFrom = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_LEFT; + } else { + viewFrom = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_RIGHT; + } + } + obj[WIRE_PROTOCOL_CONSTANTS.VIEW_FROM] = viewFrom; + } + // For now, priority index is the default, so we only specify if it's some other index + if (this.index_ !== PRIORITY_INDEX) { + obj[WIRE_PROTOCOL_CONSTANTS.INDEX] = this.index_.toString(); + } + return obj; + }; + + /** + * @return {boolean} + */ + loadsAllData() { + return !(this.startSet_ || this.endSet_ || this.limitSet_); + }; + + /** + * @return {boolean} + */ + isDefault() { + return this.loadsAllData() && this.index_ == PRIORITY_INDEX; + }; + + /** + * @return {!NodeFilter} + */ + getNodeFilter() { + if (this.loadsAllData()) { + return new IndexedFilter(this.getIndex()); + } else if (this.hasLimit()) { + return new LimitedFilter(this); + } else { + return new RangedFilter(this); + } + }; + + + /** + * Returns a set of REST query string parameters representing this query. + * + * @return {!Object.} query string parameters + */ + toRestQueryStringParameters() { + var REST_CONSTANTS = QueryParams.REST_QUERY_CONSTANTS_; + var qs = { }; + + if (this.isDefault()) { + return qs; + } + + var orderBy; + if (this.index_ === PRIORITY_INDEX) { + orderBy = REST_CONSTANTS.PRIORITY_INDEX; + } else if (this.index_ === VALUE_INDEX) { + orderBy = REST_CONSTANTS.VALUE_INDEX; + } else if (this.index_ === KEY_INDEX) { + orderBy = REST_CONSTANTS.KEY_INDEX; + } else { + assert(this.index_ instanceof PathIndex, 'Unrecognized index type!'); + orderBy = this.index_.toString(); + } + qs[REST_CONSTANTS.ORDER_BY] = stringify(orderBy); + + if (this.startSet_) { + qs[REST_CONSTANTS.START_AT] = stringify(this.indexStartValue_); + if (this.startNameSet_) { + qs[REST_CONSTANTS.START_AT] += ',' + stringify(this.indexStartName_); + } + } + + if (this.endSet_) { + qs[REST_CONSTANTS.END_AT] = stringify(this.indexEndValue_); + if (this.endNameSet_) { + qs[REST_CONSTANTS.END_AT] += ',' + stringify(this.indexEndName_); + } + } + + if (this.limitSet_) { + if (this.isViewFromLeft()) { + qs[REST_CONSTANTS.LIMIT_TO_FIRST] = this.limit_; + } else { + qs[REST_CONSTANTS.LIMIT_TO_LAST] = this.limit_; + } + } + + return qs; + }; +} diff --git a/src/database/core/view/View.ts b/src/database/core/view/View.ts new file mode 100644 index 00000000000..3703c8613f8 --- /dev/null +++ b/src/database/core/view/View.ts @@ -0,0 +1,223 @@ +import { IndexedFilter } from "./filter/IndexedFilter"; +import { ViewProcessor } from "./ViewProcessor"; +import { ChildrenNode } from "../snap/ChildrenNode"; +import { CacheNode } from "./CacheNode"; +import { ViewCache } from "./ViewCache"; +import { EventGenerator } from "./EventGenerator"; +import { assert } from "../../../utils/assert"; +import { OperationType } from "../operation/Operation"; +import { Change } from "./Change"; +import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; +import { Query } from "../../api/Query"; + +/** + * A view represents a specific location and query that has 1 or more event registrations. + * + * It does several things: + * - Maintains the list of event registrations for this location/query. + * - Maintains a cache of the data visible for this location/query. + * - Applies new operations (via applyOperation), updates the cache, and based on the event + * registrations returns the set of events to be raised. + * + * @param {!fb.api.Query} query + * @param {!ViewCache} initialViewCache + * @constructor + */ +export class View { + query_: Query + processor_ + viewCache_ + eventRegistrations_ + eventGenerator_ + constructor(query, initialViewCache) { + /** + * @type {!fb.api.Query} + * @private + */ + this.query_ = query; + var params = query.getQueryParams(); + + var indexFilter = new IndexedFilter(params.getIndex()); + var filter = params.getNodeFilter(); + + /** + * @type {ViewProcessor} + * @private + */ + this.processor_ = new ViewProcessor(filter); + + var initialServerCache = initialViewCache.getServerCache(); + var initialEventCache = initialViewCache.getEventCache(); + + // Don't filter server node with other filter than index, wait for tagged listen + var serverSnap = indexFilter.updateFullNode(ChildrenNode.EMPTY_NODE, initialServerCache.getNode(), null); + var eventSnap = filter.updateFullNode(ChildrenNode.EMPTY_NODE, initialEventCache.getNode(), null); + var newServerCache = new CacheNode(serverSnap, initialServerCache.isFullyInitialized(), + indexFilter.filtersNodes()); + var newEventCache = new CacheNode(eventSnap, initialEventCache.isFullyInitialized(), + filter.filtersNodes()); + + /** + * @type {!ViewCache} + * @private + */ + this.viewCache_ = new ViewCache(newEventCache, newServerCache); + + /** + * @type {!Array.} + * @private + */ + this.eventRegistrations_ = []; + + /** + * @type {!EventGenerator} + * @private + */ + this.eventGenerator_ = new EventGenerator(query); + }; + /** + * @return {!fb.api.Query} + */ + getQuery() { + return this.query_; + }; + + /** + * @return {?fb.core.snap.Node} + */ + getServerCache() { + return this.viewCache_.getServerCache().getNode(); + }; + + /** + * @param {!Path} path + * @return {?fb.core.snap.Node} + */ + getCompleteServerCache(path) { + var cache = this.viewCache_.getCompleteServerSnap(); + if (cache) { + // If this isn't a "loadsAllData" view, then cache isn't actually a complete cache and + // we need to see if it contains the child we're interested in. + if (this.query_.getQueryParams().loadsAllData() || + (!path.isEmpty() && !cache.getImmediateChild(path.getFront()).isEmpty())) { + return cache.getChild(path); + } + } + return null; + }; + + /** + * @return {boolean} + */ + isEmpty() { + return this.eventRegistrations_.length === 0; + }; + + /** + * @param {!fb.core.view.EventRegistration} eventRegistration + */ + addEventRegistration(eventRegistration) { + this.eventRegistrations_.push(eventRegistration); + }; + + /** + * @param {?fb.core.view.EventRegistration} eventRegistration If null, remove all callbacks. + * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. + * @return {!Array.} Cancel events, if cancelError was provided. + */ + removeEventRegistration(eventRegistration, cancelError) { + var cancelEvents = []; + if (cancelError) { + assert(eventRegistration == null, 'A cancel should cancel all event registrations.'); + var path = this.query_.path; + this.eventRegistrations_.forEach(function(registration) { + cancelError = /** @type {!Error} */ (cancelError); + var maybeEvent = registration.createCancelEvent(cancelError, path); + if (maybeEvent) { + cancelEvents.push(maybeEvent); + } + }); + } + + if (eventRegistration) { + var remaining = []; + for (var i = 0; i < this.eventRegistrations_.length; ++i) { + var existing = this.eventRegistrations_[i]; + if (!existing.matches(eventRegistration)) { + remaining.push(existing); + } else if (eventRegistration.hasAnyCallback()) { + // We're removing just this one + remaining = remaining.concat(this.eventRegistrations_.slice(i + 1)); + break; + } + } + this.eventRegistrations_ = remaining; + } else { + this.eventRegistrations_ = []; + } + return cancelEvents; + }; + + /** + * Applies the given Operation, updates our cache, and returns the appropriate events. + * + * @param {!fb.core.Operation} operation + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteServerCache + * @return {!Array.} + */ + applyOperation(operation, writesCache, optCompleteServerCache) { + if (operation.type === OperationType.MERGE && + operation.source.queryId !== null) { + + assert(this.viewCache_.getCompleteServerSnap(), + 'We should always have a full cache before handling merges'); + assert(this.viewCache_.getCompleteEventSnap(), + 'Missing event cache, even though we have a server cache'); + } + + var oldViewCache = this.viewCache_; + var result = this.processor_.applyOperation(oldViewCache, operation, writesCache, optCompleteServerCache); + this.processor_.assertIndexed(result.viewCache); + + assert(result.viewCache.getServerCache().isFullyInitialized() || + !oldViewCache.getServerCache().isFullyInitialized(), + 'Once a server snap is complete, it should never go back'); + + this.viewCache_ = result.viewCache; + + return this.generateEventsForChanges_(result.changes, result.viewCache.getEventCache().getNode(), null); + }; + + /** + * @param {!fb.core.view.EventRegistration} registration + * @return {!Array.} + */ + getInitialEvents(registration) { + var eventSnap = this.viewCache_.getEventCache(); + var initialChanges = []; + if (!eventSnap.getNode().isLeafNode()) { + var eventNode = /** @type {!fb.core.snap.ChildrenNode} */ (eventSnap.getNode()); + eventNode.forEachChild(PRIORITY_INDEX, function(key, childNode) { + initialChanges.push(Change.childAddedChange(key, childNode)); + }); + } + if (eventSnap.isFullyInitialized()) { + initialChanges.push(Change.valueChange(eventSnap.getNode())); + } + return this.generateEventsForChanges_(initialChanges, eventSnap.getNode(), registration); + }; + + /** + * @private + * @param {!Array.} changes + * @param {!fb.core.snap.Node} eventCache + * @param {fb.core.view.EventRegistration=} opt_eventRegistration + * @return {!Array.} + */ + generateEventsForChanges_(changes, eventCache, opt_eventRegistration) { + var registrations = opt_eventRegistration ? [opt_eventRegistration] : this.eventRegistrations_; + return this.eventGenerator_.generateEventsForChanges(changes, eventCache, registrations); + }; +} + diff --git a/src/database/core/view/ViewCache.ts b/src/database/core/view/ViewCache.ts new file mode 100644 index 00000000000..a407582562a --- /dev/null +++ b/src/database/core/view/ViewCache.ts @@ -0,0 +1,99 @@ +import { ChildrenNode } from "../snap/ChildrenNode"; +import { CacheNode } from "./CacheNode"; + +/** + * Stores the data we have cached for a view. + * + * serverSnap is the cached server data, eventSnap is the cached event data (server data plus any local writes). + * + * @param {!CacheNode} eventCache + * @param {!CacheNode} serverCache + * @constructor + */ +export class ViewCache { + /** + * @const + * @type {!CacheNode} + * @private + */ + private eventCache_; + + /** + * @const + * @type {!CacheNode} + * @private + */ + private serverCache_; + constructor(eventCache, serverCache) { + /** + * @const + * @type {!CacheNode} + * @private + */ + this.eventCache_ = eventCache; + + /** + * @const + * @type {!CacheNode} + * @private + */ + this.serverCache_ = serverCache; + }; + /** + * @const + * @type {ViewCache} + */ + static Empty = new ViewCache( + new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false), + new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false) + ); + + /** + * @param {!fb.core.snap.Node} eventSnap + * @param {boolean} complete + * @param {boolean} filtered + * @return {!ViewCache} + */ + updateEventSnap(eventSnap, complete, filtered) { + return new ViewCache(new CacheNode(eventSnap, complete, filtered), this.serverCache_); + }; + + /** + * @param {!fb.core.snap.Node} serverSnap + * @param {boolean} complete + * @param {boolean} filtered + * @return {!ViewCache} + */ + updateServerSnap(serverSnap, complete, filtered) { + return new ViewCache(this.eventCache_, new CacheNode(serverSnap, complete, filtered)); + }; + + /** + * @return {!CacheNode} + */ + getEventCache() { + return this.eventCache_; + }; + + /** + * @return {?fb.core.snap.Node} + */ + getCompleteEventSnap() { + return (this.eventCache_.isFullyInitialized()) ? this.eventCache_.getNode() : null; + }; + + /** + * @return {!CacheNode} + */ + getServerCache() { + return this.serverCache_; + }; + + /** + * @return {?fb.core.snap.Node} + */ + getCompleteServerSnap() { + return this.serverCache_.isFullyInitialized() ? this.serverCache_.getNode() : null; + }; +} + diff --git a/src/database/core/view/ViewProcessor.ts b/src/database/core/view/ViewProcessor.ts new file mode 100644 index 00000000000..8dbeb57988c --- /dev/null +++ b/src/database/core/view/ViewProcessor.ts @@ -0,0 +1,571 @@ +import { OperationType } from "../operation/Operation"; +import { assert, assertionError } from "../../../utils/assert"; +import { ChildChangeAccumulator } from "./ChildChangeAccumulator"; +import { Change } from "./Change"; +import { ChildrenNode } from "../snap/ChildrenNode"; +import { KEY_INDEX } from "../snap/indexes/KeyIndex"; +import { ImmutableTree } from "../util/ImmutableTree"; +import { Path } from "../util/Path"; +import { WriteTreeCompleteChildSource, NO_COMPLETE_CHILD_SOURCE } from "./CompleteChildSource"; + +/** + * @param {!ViewCache} viewCache + * @param {!Array.} changes + * @constructor + * @struct + */ +export class ProcessorResult { + /** + * @const + * @type {!ViewCache} + */ + viewCache; + + /** + * @const + * @type {!Array.} + */ + changes; + + constructor(viewCache, changes) { + this.viewCache = viewCache; + this.changes = changes; + }; +} + +/** + * @param {!NodeFilter} filter + * @constructor + */ +export class ViewProcessor { + /** + * @type {!NodeFilter} + * @private + * @const + */ + private filter_; + constructor(filter) { + this.filter_ = filter; + }; + + /** + * @param {!ViewCache} viewCache + */ + assertIndexed(viewCache) { + assert(viewCache.getEventCache().getNode().isIndexed(this.filter_.getIndex()), 'Event snap not indexed'); + assert(viewCache.getServerCache().getNode().isIndexed(this.filter_.getIndex()), + 'Server snap not indexed'); + }; + + /** + * @param {!ViewCache} oldViewCache + * @param {!fb.core.Operation} operation + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteCache + * @return {!ProcessorResult} + */ + applyOperation(oldViewCache, operation, writesCache, optCompleteCache) { + var accumulator = new ChildChangeAccumulator(); + var newViewCache, filterServerNode; + if (operation.type === OperationType.OVERWRITE) { + var overwrite = /** @type {!fb.core.operation.Overwrite} */ (operation); + if (overwrite.source.fromUser) { + newViewCache = this.applyUserOverwrite_(oldViewCache, overwrite.path, overwrite.snap, + writesCache, optCompleteCache, accumulator); + } else { + assert(overwrite.source.fromServer, 'Unknown source.'); + // We filter the node if it's a tagged update or the node has been previously filtered and the + // update is not at the root in which case it is ok (and necessary) to mark the node unfiltered + // again + filterServerNode = overwrite.source.tagged || + (oldViewCache.getServerCache().isFiltered() && !overwrite.path.isEmpty()); + newViewCache = this.applyServerOverwrite_(oldViewCache, overwrite.path, overwrite.snap, writesCache, + optCompleteCache, filterServerNode, accumulator); + } + } else if (operation.type === OperationType.MERGE) { + var merge = /** @type {!fb.core.operation.Merge} */ (operation); + if (merge.source.fromUser) { + newViewCache = this.applyUserMerge_(oldViewCache, merge.path, merge.children, writesCache, + optCompleteCache, accumulator); + } else { + assert(merge.source.fromServer, 'Unknown source.'); + // We filter the node if it's a tagged update or the node has been previously filtered + filterServerNode = merge.source.tagged || oldViewCache.getServerCache().isFiltered(); + newViewCache = this.applyServerMerge_(oldViewCache, merge.path, merge.children, writesCache, optCompleteCache, + filterServerNode, accumulator); + } + } else if (operation.type === OperationType.ACK_USER_WRITE) { + var ackUserWrite = /** @type {!fb.core.operation.AckUserWrite} */ (operation); + if (!ackUserWrite.revert) { + newViewCache = this.ackUserWrite_(oldViewCache, ackUserWrite.path, ackUserWrite.affectedTree, writesCache, + optCompleteCache, accumulator); + } else { + newViewCache = this.revertUserWrite_(oldViewCache, ackUserWrite.path, writesCache, optCompleteCache, accumulator); + } + } else if (operation.type === OperationType.LISTEN_COMPLETE) { + newViewCache = this.listenComplete_(oldViewCache, operation.path, writesCache, optCompleteCache, accumulator); + } else { + throw assertionError('Unknown operation type: ' + operation.type); + } + var changes = accumulator.getChanges(); + this.maybeAddValueEvent_(oldViewCache, newViewCache, changes); + return new ProcessorResult(newViewCache, changes); + }; + + /** + * @param {!ViewCache} oldViewCache + * @param {!ViewCache} newViewCache + * @param {!Array.} accumulator + * @private + */ + maybeAddValueEvent_(oldViewCache, newViewCache, accumulator) { + var eventSnap = newViewCache.getEventCache(); + if (eventSnap.isFullyInitialized()) { + var isLeafOrEmpty = eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty(); + var oldCompleteSnap = oldViewCache.getCompleteEventSnap(); + if (accumulator.length > 0 || + !oldViewCache.getEventCache().isFullyInitialized() || + (isLeafOrEmpty && !eventSnap.getNode().equals(/** @type {!fb.core.snap.Node} */ (oldCompleteSnap))) || + !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority())) { + accumulator.push(Change.valueChange( + /** @type {!fb.core.snap.Node} */ (newViewCache.getCompleteEventSnap()))); + } + } + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} changePath + * @param {!fb.core.WriteTreeRef} writesCache + * @param {!fb.core.view.CompleteChildSource} source + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + generateEventCacheAfterServerEvent_(viewCache, changePath, writesCache, source, accumulator) { + var oldEventSnap = viewCache.getEventCache(); + if (writesCache.shadowingWrite(changePath) != null) { + // we have a shadowing write, ignore changes + return viewCache; + } else { + var newEventCache, serverNode; + if (changePath.isEmpty()) { + // TODO: figure out how this plays with "sliding ack windows" + assert(viewCache.getServerCache().isFullyInitialized(), + 'If change path is empty, we must have complete server data'); + if (viewCache.getServerCache().isFiltered()) { + // We need to special case this, because we need to only apply writes to complete children, or + // we might end up raising events for incomplete children. If the server data is filtered deep + // writes cannot be guaranteed to be complete + var serverCache = viewCache.getCompleteServerSnap(); + var completeChildren = (serverCache instanceof ChildrenNode) ? serverCache : + ChildrenNode.EMPTY_NODE; + var completeEventChildren = writesCache.calcCompleteEventChildren(completeChildren); + newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeEventChildren, + accumulator); + } else { + var completeNode = /** @type {!fb.core.snap.Node} */ + (writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap())); + newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeNode, accumulator); + } + } else { + var childKey = changePath.getFront(); + if (childKey == '.priority') { + assert(changePath.getLength() == 1, "Can't have a priority with additional path components"); + var oldEventNode = oldEventSnap.getNode(); + serverNode = viewCache.getServerCache().getNode(); + // we might have overwrites for this priority + var updatedPriority = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventNode, serverNode); + if (updatedPriority != null) { + newEventCache = this.filter_.updatePriority(oldEventNode, updatedPriority); + } else { + // priority didn't change, keep old node + newEventCache = oldEventSnap.getNode(); + } + } else { + var childChangePath = changePath.popFront(); + // update child + var newEventChild; + if (oldEventSnap.isCompleteForChild(childKey)) { + serverNode = viewCache.getServerCache().getNode(); + var eventChildUpdate = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventSnap.getNode(), + serverNode); + if (eventChildUpdate != null) { + newEventChild = oldEventSnap.getNode().getImmediateChild(childKey).updateChild(childChangePath, + eventChildUpdate); + } else { + // Nothing changed, just keep the old child + newEventChild = oldEventSnap.getNode().getImmediateChild(childKey); + } + } else { + newEventChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); + } + if (newEventChild != null) { + newEventCache = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newEventChild, childChangePath, + source, accumulator); + } else { + // no complete child available or no change + newEventCache = oldEventSnap.getNode(); + } + } + } + return viewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized() || changePath.isEmpty(), + this.filter_.filtersNodes()); + } + }; + + /** + * @param {!ViewCache} oldViewCache + * @param {!Path} changePath + * @param {!fb.core.snap.Node} changedSnap + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteCache + * @param {boolean} filterServerNode + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + applyServerOverwrite_(oldViewCache, changePath, changedSnap, + writesCache, optCompleteCache, filterServerNode, + accumulator) { + var oldServerSnap = oldViewCache.getServerCache(); + var newServerCache; + var serverFilter = filterServerNode ? this.filter_ : this.filter_.getIndexedFilter(); + if (changePath.isEmpty()) { + newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), changedSnap, null); + } else if (serverFilter.filtersNodes() && !oldServerSnap.isFiltered()) { + // we want to filter the server node, but we didn't filter the server node yet, so simulate a full update + var newServerNode = oldServerSnap.getNode().updateChild(changePath, changedSnap); + newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), newServerNode, null); + } else { + var childKey = changePath.getFront(); + if (!oldServerSnap.isCompleteForPath(changePath) && changePath.getLength() > 1) { + // We don't update incomplete nodes with updates intended for other listeners + return oldViewCache; + } + var childChangePath = changePath.popFront(); + var childNode = oldServerSnap.getNode().getImmediateChild(childKey); + var newChildNode = childNode.updateChild(childChangePath, changedSnap); + if (childKey == '.priority') { + newServerCache = serverFilter.updatePriority(oldServerSnap.getNode(), newChildNode); + } else { + newServerCache = serverFilter.updateChild(oldServerSnap.getNode(), childKey, newChildNode, childChangePath, + NO_COMPLETE_CHILD_SOURCE, null); + } + } + var newViewCache = oldViewCache.updateServerSnap(newServerCache, + oldServerSnap.isFullyInitialized() || changePath.isEmpty(), serverFilter.filtersNodes()); + var source = new WriteTreeCompleteChildSource(writesCache, newViewCache, optCompleteCache); + return this.generateEventCacheAfterServerEvent_(newViewCache, changePath, writesCache, source, accumulator); + }; + + /** + * @param {!ViewCache} oldViewCache + * @param {!Path} changePath + * @param {!fb.core.snap.Node} changedSnap + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + applyUserOverwrite_(oldViewCache, changePath, changedSnap, writesCache, + optCompleteCache, accumulator) { + var oldEventSnap = oldViewCache.getEventCache(); + var newViewCache, newEventCache; + var source = new WriteTreeCompleteChildSource(writesCache, oldViewCache, optCompleteCache); + if (changePath.isEmpty()) { + newEventCache = this.filter_.updateFullNode(oldViewCache.getEventCache().getNode(), changedSnap, accumulator); + newViewCache = oldViewCache.updateEventSnap(newEventCache, true, this.filter_.filtersNodes()); + } else { + var childKey = changePath.getFront(); + if (childKey === '.priority') { + newEventCache = this.filter_.updatePriority(oldViewCache.getEventCache().getNode(), changedSnap); + newViewCache = oldViewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized(), + oldEventSnap.isFiltered()); + } else { + var childChangePath = changePath.popFront(); + var oldChild = oldEventSnap.getNode().getImmediateChild(childKey); + var newChild; + if (childChangePath.isEmpty()) { + // Child overwrite, we can replace the child + newChild = changedSnap; + } else { + var childNode = source.getCompleteChild(childKey); + if (childNode != null) { + if (childChangePath.getBack() === '.priority' && + childNode.getChild(/** @type {!Path} */ (childChangePath.parent())).isEmpty()) { + // This is a priority update on an empty node. If this node exists on the server, the + // server will send down the priority in the update, so ignore for now + newChild = childNode; + } else { + newChild = childNode.updateChild(childChangePath, changedSnap); + } + } else { + // There is no complete child node available + newChild = ChildrenNode.EMPTY_NODE; + } + } + if (!oldChild.equals(newChild)) { + var newEventSnap = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newChild, childChangePath, + source, accumulator); + newViewCache = oldViewCache.updateEventSnap(newEventSnap, oldEventSnap.isFullyInitialized(), + this.filter_.filtersNodes()); + } else { + newViewCache = oldViewCache; + } + } + } + return newViewCache; + }; + + /** + * @param {!ViewCache} viewCache + * @param {string} childKey + * @return {boolean} + * @private + */ + static cacheHasChild_(viewCache, childKey) { + return viewCache.getEventCache().isCompleteForChild(childKey); + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} path + * @param {ImmutableTree.} changedChildren + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} serverCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + applyUserMerge_(viewCache, path, changedChildren, writesCache, + serverCache, accumulator) { + // HACK: In the case of a limit query, there may be some changes that bump things out of the + // window leaving room for new items. It's important we process these changes first, so we + // iterate the changes twice, first processing any that affect items currently in view. + // TODO: I consider an item "in view" if cacheHasChild is true, which checks both the server + // and event snap. I'm not sure if this will result in edge cases when a child is in one but + // not the other. + var self = this; + var curViewCache = viewCache; + changedChildren.foreach(function(relativePath, childNode) { + var writePath = path.child(relativePath); + if (ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { + curViewCache = self.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, + serverCache, accumulator); + } + }); + + changedChildren.foreach(function(relativePath, childNode) { + var writePath = path.child(relativePath); + if (!ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { + curViewCache = self.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, + serverCache, accumulator); + } + }); + + return curViewCache; + }; + + /** + * @param {!fb.core.snap.Node} node + * @param {ImmutableTree.} merge + * @return {!fb.core.snap.Node} + * @private + */ + applyMerge_(node, merge) { + merge.foreach(function(relativePath, childNode) { + node = node.updateChild(relativePath, childNode); + }); + return node; + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} path + * @param {!ImmutableTree.} changedChildren + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} serverCache + * @param {boolean} filterServerNode + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + applyServerMerge_(viewCache, path, changedChildren, writesCache, serverCache, filterServerNode, accumulator) { + // If we don't have a cache yet, this merge was intended for a previously listen in the same location. Ignore it and + // wait for the complete data update coming soon. + if (viewCache.getServerCache().getNode().isEmpty() && !viewCache.getServerCache().isFullyInitialized()) { + return viewCache; + } + + // HACK: In the case of a limit query, there may be some changes that bump things out of the + // window leaving room for new items. It's important we process these changes first, so we + // iterate the changes twice, first processing any that affect items currently in view. + // TODO: I consider an item "in view" if cacheHasChild is true, which checks both the server + // and event snap. I'm not sure if this will result in edge cases when a child is in one but + // not the other. + var curViewCache = viewCache; + var viewMergeTree; + if (path.isEmpty()) { + viewMergeTree = changedChildren; + } else { + viewMergeTree = ImmutableTree.Empty.setTree(path, changedChildren); + } + var serverNode = viewCache.getServerCache().getNode(); + var self = this; + viewMergeTree.children.inorderTraversal(function(childKey, childTree) { + if (serverNode.hasChild(childKey)) { + var serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + var newChild = self.applyMerge_(serverChild, childTree); + curViewCache = self.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, + writesCache, serverCache, filterServerNode, accumulator); + } + }); + viewMergeTree.children.inorderTraversal(function(childKey, childMergeTree) { + var isUnknownDeepMerge = !viewCache.getServerCache().isCompleteForChild(childKey) && childMergeTree.value == null; + if (!serverNode.hasChild(childKey) && !isUnknownDeepMerge) { + var serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + var newChild = self.applyMerge_(serverChild, childMergeTree); + curViewCache = self.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, writesCache, + serverCache, filterServerNode, accumulator); + } + }); + + return curViewCache; + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} ackPath + * @param {!ImmutableTree} affectedTree + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + ackUserWrite_(viewCache, ackPath, affectedTree, writesCache, + optCompleteCache, accumulator) { + if (writesCache.shadowingWrite(ackPath) != null) { + return viewCache; + } + + // Only filter server node if it is currently filtered + var filterServerNode = viewCache.getServerCache().isFiltered(); + + // Essentially we'll just get our existing server cache for the affected paths and re-apply it as a server update + // now that it won't be shadowed. + var serverCache = viewCache.getServerCache(); + if (affectedTree.value != null) { + // This is an overwrite. + if ((ackPath.isEmpty() && serverCache.isFullyInitialized()) || serverCache.isCompleteForPath(ackPath)) { + return this.applyServerOverwrite_(viewCache, ackPath, serverCache.getNode().getChild(ackPath), + writesCache, optCompleteCache, filterServerNode, accumulator); + } else if (ackPath.isEmpty()) { + // This is a goofy edge case where we are acking data at this location but don't have full data. We + // should just re-apply whatever we have in our cache as a merge. + var changedChildren = /** @type {ImmutableTree} */ + (ImmutableTree.Empty); + serverCache.getNode().forEachChild(KEY_INDEX, function(name, node) { + changedChildren = changedChildren.set(new Path(name), node); + }); + return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, optCompleteCache, + filterServerNode, accumulator); + } else { + return viewCache; + } + } else { + // This is a merge. + var changedChildren = /** @type {ImmutableTree} */ + (ImmutableTree.Empty); + affectedTree.foreach(function(mergePath, value) { + var serverCachePath = ackPath.child(mergePath); + if (serverCache.isCompleteForPath(serverCachePath)) { + changedChildren = changedChildren.set(mergePath, serverCache.getNode().getChild(serverCachePath)); + } + }); + return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, optCompleteCache, + filterServerNode, accumulator); + } + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} path + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} optCompleteServerCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + revertUserWrite_(viewCache, path, writesCache, optCompleteServerCache, + accumulator) { + var complete; + if (writesCache.shadowingWrite(path) != null) { + return viewCache; + } else { + var source = new WriteTreeCompleteChildSource(writesCache, viewCache, optCompleteServerCache); + var oldEventCache = viewCache.getEventCache().getNode(); + var newEventCache; + if (path.isEmpty() || path.getFront() === '.priority') { + var newNode; + if (viewCache.getServerCache().isFullyInitialized()) { + newNode = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); + } else { + var serverChildren = viewCache.getServerCache().getNode(); + assert(serverChildren instanceof ChildrenNode, + 'serverChildren would be complete if leaf node'); + newNode = writesCache.calcCompleteEventChildren(/** @type {!ChildrenNode} */ (serverChildren)); + } + newNode = /** @type {!fb.core.snap.Node} newNode */ (newNode); + newEventCache = this.filter_.updateFullNode(oldEventCache, newNode, accumulator); + } else { + var childKey = path.getFront(); + var newChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); + if (newChild == null && viewCache.getServerCache().isCompleteForChild(childKey)) { + newChild = oldEventCache.getImmediateChild(childKey); + } + if (newChild != null) { + newEventCache = this.filter_.updateChild(oldEventCache, childKey, newChild, path.popFront(), source, + accumulator); + } else if (viewCache.getEventCache().getNode().hasChild(childKey)) { + // No complete child available, delete the existing one, if any + newEventCache = this.filter_.updateChild(oldEventCache, childKey, ChildrenNode.EMPTY_NODE, path.popFront(), + source, accumulator); + } else { + newEventCache = oldEventCache; + } + if (newEventCache.isEmpty() && viewCache.getServerCache().isFullyInitialized()) { + // We might have reverted all child writes. Maybe the old event was a leaf node + complete = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); + if (complete.isLeafNode()) { + newEventCache = this.filter_.updateFullNode(newEventCache, complete, accumulator); + } + } + } + complete = viewCache.getServerCache().isFullyInitialized() || + writesCache.shadowingWrite(Path.Empty) != null; + return viewCache.updateEventSnap(newEventCache, complete, this.filter_.filtersNodes()); + } + }; + + /** + * @param {!ViewCache} viewCache + * @param {!Path} path + * @param {!fb.core.WriteTreeRef} writesCache + * @param {?fb.core.snap.Node} serverCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + listenComplete_(viewCache, path, writesCache, serverCache, + accumulator) { + var oldServerNode = viewCache.getServerCache(); + var newViewCache = viewCache.updateServerSnap(oldServerNode.getNode(), + oldServerNode.isFullyInitialized() || path.isEmpty(), oldServerNode.isFiltered()); + return this.generateEventCacheAfterServerEvent_(newViewCache, path, writesCache, + NO_COMPLETE_CHILD_SOURCE, accumulator); + }; +} + diff --git a/src/database/core/view/filter/IndexedFilter.ts b/src/database/core/view/filter/IndexedFilter.ts new file mode 100644 index 00000000000..f8b627a0708 --- /dev/null +++ b/src/database/core/view/filter/IndexedFilter.ts @@ -0,0 +1,123 @@ +import { assert } from "../../../../utils/assert"; +import { Change } from "../Change"; +import { ChildrenNode } from "../../snap/ChildrenNode"; +import { PRIORITY_INDEX } from "../../snap/indexes/PriorityIndex"; +import { NodeFilter } from './NodeFilter'; +import { Index } from '../../snap/indexes/Index'; +import { Path } from '../../util/Path'; +import { CompleteChildSource } from '../CompleteChildSource'; +import { ChildChangeAccumulator } from '../ChildChangeAccumulator'; +import { Node } from '../../snap/Node'; + +/** + * Doesn't really filter nodes but applies an index to the node and keeps track of any changes + * + * @constructor + * @implements {NodeFilter} + * @param {!Index} index + */ +export class IndexedFilter implements NodeFilter { + constructor(private readonly index_: Index) { + } + + updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null): Node { + assert(snap.isIndexed(this.index_), 'A node must be indexed if only a child is updated'); + const oldChild = snap.getImmediateChild(key); + // Check if anything actually changed. + if (oldChild.getChild(affectedPath).equals(newChild.getChild(affectedPath))) { + // There's an edge case where a child can enter or leave the view because affectedPath was set to null. + // In this case, affectedPath will appear null in both the old and new snapshots. So we need + // to avoid treating these cases as "nothing changed." + if (oldChild.isEmpty() == newChild.isEmpty()) { + // Nothing changed. + + // This assert should be valid, but it's expensive (can dominate perf testing) so don't actually do it. + //assert(oldChild.equals(newChild), 'Old and new snapshots should be equal.'); + return snap; + } + } + + if (optChangeAccumulator != null) { + if (newChild.isEmpty()) { + if (snap.hasChild(key)) { + optChangeAccumulator.trackChildChange(Change.childRemovedChange(key, oldChild)); + } else { + assert(snap.isLeafNode(), 'A child remove without an old child only makes sense on a leaf node'); + } + } else if (oldChild.isEmpty()) { + optChangeAccumulator.trackChildChange(Change.childAddedChange(key, newChild)); + } else { + optChangeAccumulator.trackChildChange(Change.childChangedChange(key, newChild, oldChild)); + } + } + if (snap.isLeafNode() && newChild.isEmpty()) { + return snap; + } else { + // Make sure the node is indexed + return snap.updateImmediateChild(key, newChild).withIndex(this.index_); + } + }; + + /** + * @inheritDoc + */ + updateFullNode(oldSnap: Node, newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null): Node { + if (optChangeAccumulator != null) { + if (!oldSnap.isLeafNode()) { + oldSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { + if (!newSnap.hasChild(key)) { + optChangeAccumulator.trackChildChange(Change.childRemovedChange(key, childNode)); + } + }); + } + if (!newSnap.isLeafNode()) { + newSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { + if (oldSnap.hasChild(key)) { + const oldChild = oldSnap.getImmediateChild(key); + if (!oldChild.equals(childNode)) { + optChangeAccumulator.trackChildChange(Change.childChangedChange(key, childNode, oldChild)); + } + } else { + optChangeAccumulator.trackChildChange(Change.childAddedChange(key, childNode)); + } + }); + } + } + return newSnap.withIndex(this.index_); + }; + + /** + * @inheritDoc + */ + updatePriority(oldSnap: Node, newPriority: Node): Node { + if (oldSnap.isEmpty()) { + return ChildrenNode.EMPTY_NODE; + } else { + return oldSnap.updatePriority(newPriority); + } + }; + + /** + * @inheritDoc + */ + filtersNodes(): boolean { + return false; + }; + + /** + * @inheritDoc + */ + getIndexedFilter(): IndexedFilter { + return this; + }; + + /** + * @inheritDoc + */ + getIndex(): Index { + return this.index_; + }; +} diff --git a/src/database/core/view/filter/LimitedFilter.ts b/src/database/core/view/filter/LimitedFilter.ts new file mode 100644 index 00000000000..9acc36cb7e1 --- /dev/null +++ b/src/database/core/view/filter/LimitedFilter.ts @@ -0,0 +1,268 @@ +import { RangedFilter } from "./RangedFilter"; +import { ChildrenNode } from "../../snap/ChildrenNode"; +import { Node, NamedNode } from "../../snap/Node"; +import { assert } from "../../../../utils/assert"; +import { Change } from "../Change"; +/** + * Applies a limit and a range to a node and uses RangedFilter to do the heavy lifting where possible + * + * @constructor + * @implements {NodeFilter} + * @param {!QueryParams} params + */ +export class LimitedFilter { + /** + * @const + * @type {RangedFilter} + * @private + */ + private rangedFilter_; + + /** + * @const + * @type {!Index} + * @private + */ + private index_; + + /** + * @const + * @type {number} + * @private + */ + private limit_; + + /** + * @const + * @type {boolean} + * @private + */ + private reverse_; + + constructor(params) { + /** + * @const + * @type {RangedFilter} + * @private + */ + this.rangedFilter_ = new RangedFilter(params); + + /** + * @const + * @type {!Index} + * @private + */ + this.index_ = params.getIndex(); + + /** + * @const + * @type {number} + * @private + */ + this.limit_ = params.getLimit(); + + /** + * @const + * @type {boolean} + * @private + */ + this.reverse_ = !params.isViewFromLeft(); + }; + /** + * @inheritDoc + */ + updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator) { + if (!this.rangedFilter_.matches(new NamedNode(key, newChild))) { + newChild = ChildrenNode.EMPTY_NODE; + } + if (snap.getImmediateChild(key).equals(newChild)) { + // No change + return snap; + } else if (snap.numChildren() < this.limit_) { + return this.rangedFilter_.getIndexedFilter().updateChild(snap, key, newChild, affectedPath, source, + optChangeAccumulator); + } else { + return this.fullLimitUpdateChild_(snap, key, newChild, source, optChangeAccumulator); + } + }; + + /** + * @inheritDoc + */ + updateFullNode(oldSnap, newSnap, optChangeAccumulator) { + var filtered; + if (newSnap.isLeafNode() || newSnap.isEmpty()) { + // Make sure we have a children node with the correct index, not a leaf node; + filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); + } else { + if (this.limit_ * 2 < newSnap.numChildren() && newSnap.isIndexed(this.index_)) { + // Easier to build up a snapshot, since what we're given has more than twice the elements we want + filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); + // anchor to the startPost, endPost, or last element as appropriate + var iterator; + newSnap = /** @type {!ChildrenNode} */ (newSnap); + if (this.reverse_) { + iterator = newSnap.getReverseIteratorFrom(this.rangedFilter_.getEndPost(), this.index_); + } else { + iterator = newSnap.getIteratorFrom(this.rangedFilter_.getStartPost(), this.index_); + } + var count = 0; + while (iterator.hasNext() && count < this.limit_) { + var next = iterator.getNext(); + var inRange; + if (this.reverse_) { + inRange = this.index_.compare(this.rangedFilter_.getStartPost(), next) <= 0; + } else { + inRange = this.index_.compare(next, this.rangedFilter_.getEndPost()) <= 0; + } + if (inRange) { + filtered = filtered.updateImmediateChild(next.name, next.node); + count++; + } else { + // if we have reached the end post, we cannot keep adding elemments + break; + } + } + } else { + // The snap contains less than twice the limit. Faster to delete from the snap than build up a new one + filtered = newSnap.withIndex(this.index_); + // Don't support priorities on queries + filtered = /** @type {!ChildrenNode} */ (filtered.updatePriority(ChildrenNode.EMPTY_NODE)); + var startPost; + var endPost; + var cmp; + if (this.reverse_) { + iterator = filtered.getReverseIterator(this.index_); + startPost = this.rangedFilter_.getEndPost(); + endPost = this.rangedFilter_.getStartPost(); + var indexCompare = this.index_.getCompare(); + cmp = function(a, b) { return indexCompare(b, a); }; + } else { + iterator = filtered.getIterator(this.index_); + startPost = this.rangedFilter_.getStartPost(); + endPost = this.rangedFilter_.getEndPost(); + cmp = this.index_.getCompare(); + } + + count = 0; + var foundStartPost = false; + while (iterator.hasNext()) { + next = iterator.getNext(); + if (!foundStartPost && cmp(startPost, next) <= 0) { + // start adding + foundStartPost = true; + } + inRange = foundStartPost && count < this.limit_ && cmp(next, endPost) <= 0; + if (inRange) { + count++; + } else { + filtered = filtered.updateImmediateChild(next.name, ChildrenNode.EMPTY_NODE); + } + } + } + } + return this.rangedFilter_.getIndexedFilter().updateFullNode(oldSnap, filtered, optChangeAccumulator); + }; + + /** + * @inheritDoc + */ + updatePriority(oldSnap, newPriority) { + // Don't support priorities on queries + return oldSnap; + }; + + /** + * @inheritDoc + */ + filtersNodes() { + return true; + }; + + /** + * @inheritDoc + */ + getIndexedFilter() { + return this.rangedFilter_.getIndexedFilter(); + }; + + /** + * @inheritDoc + */ + getIndex() { + return this.index_; + }; + + /** + * @param {!Node} snap + * @param {string} childKey + * @param {!Node} childSnap + * @param {!CompleteChildSource} source + * @param {?ChildChangeAccumulator} optChangeAccumulator + * @return {!Node} + * @private + */ + fullLimitUpdateChild_(snap: Node, childKey: string, childSnap: Node, source, changeAccumulator?) { + // TODO: rename all cache stuff etc to general snap terminology + var cmp; + if (this.reverse_) { + var indexCmp = this.index_.getCompare(); + cmp = function(a, b) { return indexCmp(b, a); }; + } else { + cmp = this.index_.getCompare(); + } + var oldEventCache = snap; + assert(oldEventCache.numChildren() == this.limit_, ''); + var newChildNamedNode = new NamedNode(childKey, childSnap); + var windowBoundary = (this.reverse_ ? oldEventCache.getFirstChild(this.index_) : oldEventCache.getLastChild(this.index_)); + var inRange = this.rangedFilter_.matches(newChildNamedNode); + if (oldEventCache.hasChild(childKey)) { + var oldChildSnap = oldEventCache.getImmediateChild(childKey); + var nextChild = source.getChildAfterChild(this.index_, windowBoundary, this.reverse_); + while (nextChild != null && (nextChild.name == childKey || oldEventCache.hasChild(nextChild.name))) { + // There is a weird edge case where a node is updated as part of a merge in the write tree, but hasn't + // been applied to the limited filter yet. Ignore this next child which will be updated later in + // the limited filter... + nextChild = source.getChildAfterChild(this.index_, nextChild, this.reverse_); + } + var compareNext = nextChild == null ? 1 : cmp(nextChild, newChildNamedNode); + var remainsInWindow = inRange && !childSnap.isEmpty() && compareNext >= 0; + if (remainsInWindow) { + if (changeAccumulator != null) { + changeAccumulator.trackChildChange(Change.childChangedChange(childKey, childSnap, oldChildSnap)); + } + return oldEventCache.updateImmediateChild(childKey, childSnap); + } else { + if (changeAccumulator != null) { + changeAccumulator.trackChildChange(Change.childRemovedChange(childKey, oldChildSnap)); + } + var newEventCache = oldEventCache.updateImmediateChild(childKey, ChildrenNode.EMPTY_NODE); + var nextChildInRange = nextChild != null && this.rangedFilter_.matches(nextChild); + if (nextChildInRange) { + if (changeAccumulator != null) { + changeAccumulator.trackChildChange(Change.childAddedChange(nextChild.name, nextChild.node)); + } + return newEventCache.updateImmediateChild(nextChild.name, nextChild.node); + } else { + return newEventCache; + } + } + } else if (childSnap.isEmpty()) { + // we're deleting a node, but it was not in the window, so ignore it + return snap; + } else if (inRange) { + if (cmp(windowBoundary, newChildNamedNode) >= 0) { + if (changeAccumulator != null) { + changeAccumulator.trackChildChange(Change.childRemovedChange(windowBoundary.name, windowBoundary.node)); + changeAccumulator.trackChildChange(Change.childAddedChange(childKey, childSnap)); + } + return oldEventCache.updateImmediateChild(childKey, childSnap).updateImmediateChild(windowBoundary.name, + ChildrenNode.EMPTY_NODE); + } else { + return snap; + } + } else { + return snap; + } + }; +} diff --git a/src/database/core/view/filter/NodeFilter.ts b/src/database/core/view/filter/NodeFilter.ts new file mode 100644 index 00000000000..eb0dea1cb52 --- /dev/null +++ b/src/database/core/view/filter/NodeFilter.ts @@ -0,0 +1,69 @@ +import { Node } from '../../snap/Node'; +import { Path } from '../../util/Path'; +import { CompleteChildSource } from '../CompleteChildSource'; +import { ChildChangeAccumulator } from '../ChildChangeAccumulator'; +import { Index } from '../../snap/indexes/Index'; + +/** + * NodeFilter is used to update nodes and complete children of nodes while applying queries on the fly and keeping + * track of any child changes. This class does not track value changes as value changes depend on more + * than just the node itself. Different kind of queries require different kind of implementations of this interface. + * @interface + */ +export interface NodeFilter { + + /** + * Update a single complete child in the snap. If the child equals the old child in the snap, this is a no-op. + * The method expects an indexed snap. + * + * @param {!Node} snap + * @param {string} key + * @param {!Node} newChild + * @param {!Path} affectedPath + * @param {!CompleteChildSource} source + * @param {?ChildChangeAccumulator} optChangeAccumulator + * @return {!Node} + */ + updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null): Node; + + /** + * Update a node in full and output any resulting change from this complete update. + * + * @param {!Node} oldSnap + * @param {!Node} newSnap + * @param {?ChildChangeAccumulator} optChangeAccumulator + * @return {!Node} + */ + updateFullNode(oldSnap: Node, newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null): Node; + + /** + * Update the priority of the root node + * + * @param {!Node} oldSnap + * @param {!Node} newPriority + * @return {!Node} + */ + updatePriority(oldSnap: Node, newPriority: Node): Node; + + /** + * Returns true if children might be filtered due to query criteria + * + * @return {boolean} + */ + filtersNodes(): boolean; + + /** + * Returns the index filter that this filter uses to get a NodeFilter that doesn't filter any children. + * @return {!NodeFilter} + */ + getIndexedFilter(): NodeFilter; + + /** + * Returns the index that this filter uses + * @return {!Index} + */ + getIndex(): Index; +} diff --git a/src/database/core/view/filter/RangedFilter.ts b/src/database/core/view/filter/RangedFilter.ts new file mode 100644 index 00000000000..ea471486445 --- /dev/null +++ b/src/database/core/view/filter/RangedFilter.ts @@ -0,0 +1,156 @@ +import { IndexedFilter } from "./IndexedFilter"; +import { PRIORITY_INDEX } from "../../../core/snap/indexes/PriorityIndex"; +import { NamedNode } from "../../../core/snap/Node"; +import { ChildrenNode } from "../../../core/snap/ChildrenNode"; +/** + * Filters nodes by range and uses an IndexFilter to track any changes after filtering the node + * + * @constructor + * @implements {NodeFilter} + * @param {!fb.core.view.QueryParams} params + */ +export class RangedFilter { + /** + * @type {!IndexedFilter} + * @const + * @private + */ + private indexedFilter_: IndexedFilter; + + /** + * @const + * @type {!Index} + * @private + */ + private index_; + + /** + * @const + * @type {!NamedNode} + * @private + */ + private startPost_; + + /** + * @const + * @type {!NamedNode} + * @private + */ + private endPost_; + + constructor(params) { + this.indexedFilter_ = new IndexedFilter(params.getIndex()); + this.index_ = params.getIndex(); + this.startPost_ = this.getStartPost_(params); + this.endPost_ = this.getEndPost_(params); + }; + + /** + * @return {!NamedNode} + */ + getStartPost() { + return this.startPost_; + }; + + /** + * @return {!NamedNode} + */ + getEndPost() { + return this.endPost_; + }; + + /** + * @param {!NamedNode} node + * @return {boolean} + */ + matches(node) { + return (this.index_.compare(this.getStartPost(), node) <= 0 && this.index_.compare(node, this.getEndPost()) <= 0); + }; + + /** + * @inheritDoc + */ + updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator) { + if (!this.matches(new NamedNode(key, newChild))) { + newChild = ChildrenNode.EMPTY_NODE; + } + return this.indexedFilter_.updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator); + }; + + /** + * @inheritDoc + */ + updateFullNode(oldSnap, newSnap, optChangeAccumulator) { + if (newSnap.isLeafNode()) { + // Make sure we have a children node with the correct index, not a leaf node; + newSnap = ChildrenNode.EMPTY_NODE; + } + var filtered = newSnap.withIndex(this.index_); + // Don't support priorities on queries + filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE); + var self = this; + newSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { + if (!self.matches(new NamedNode(key, childNode))) { + filtered = filtered.updateImmediateChild(key, ChildrenNode.EMPTY_NODE); + } + }); + return this.indexedFilter_.updateFullNode(oldSnap, filtered, optChangeAccumulator); + }; + + /** + * @inheritDoc + */ + updatePriority(oldSnap, newPriority) { + // Don't support priorities on queries + return oldSnap; + }; + + /** + * @inheritDoc + */ + filtersNodes() { + return true; + }; + + /** + * @inheritDoc + */ + getIndexedFilter() { + return this.indexedFilter_; + }; + + /** + * @inheritDoc + */ + getIndex() { + return this.index_; + }; + + /** + * @param {!fb.core.view.QueryParams} params + * @return {!NamedNode} + * @private + */ + getStartPost_(params) { + if (params.hasStart()) { + var startName = params.getIndexStartName(); + return params.getIndex().makePost(params.getIndexStartValue(), startName); + } else { + return params.getIndex().minPost(); + } + }; + + /** + * @param {!fb.core.view.QueryParams} params + * @return {!NamedNode} + * @private + */ + getEndPost_(params) { + if (params.hasEnd()) { + var endName = params.getIndexEndName(); + return params.getIndex().makePost(params.getIndexEndValue(), endName); + } else { + return params.getIndex().maxPost(); + } + }; +} diff --git a/src/database/realtime/BrowserPollConnection.ts b/src/database/realtime/BrowserPollConnection.ts new file mode 100644 index 00000000000..94082b272a3 --- /dev/null +++ b/src/database/realtime/BrowserPollConnection.ts @@ -0,0 +1,700 @@ +import { + base64Encode, + executeWhenDOMReady, + isChromeExtensionContentScript, + isWindowsStoreApp, + log, + logWrapper, + LUIDGenerator, + splitStringBySize +} from "../core/util/util"; +import { CountedSet } from "../core/util/CountedSet"; +import { StatsManager } from "../core/stats/StatsManager"; +import { PacketReceiver } from "./polling/PacketReceiver"; +import { CONSTANTS } from "./Constants"; +import { stringify } from "../../utils/json"; +import { isNodeSdk } from "../../utils/environment"; +import { Transport } from './Transport'; +import { RepoInfo } from '../core/RepoInfo'; + +// URL query parameters associated with longpolling +const FIREBASE_LONGPOLL_START_PARAM = 'start'; +const FIREBASE_LONGPOLL_CLOSE_COMMAND = 'close'; +const FIREBASE_LONGPOLL_COMMAND_CB_NAME = 'pLPCommand'; +const FIREBASE_LONGPOLL_DATA_CB_NAME = 'pRTLPCB'; +const FIREBASE_LONGPOLL_ID_PARAM = 'id'; +const FIREBASE_LONGPOLL_PW_PARAM = 'pw'; +const FIREBASE_LONGPOLL_SERIAL_PARAM = 'ser'; +const FIREBASE_LONGPOLL_CALLBACK_ID_PARAM = 'cb'; +const FIREBASE_LONGPOLL_SEGMENT_NUM_PARAM = 'seg'; +const FIREBASE_LONGPOLL_SEGMENTS_IN_PACKET = 'ts'; +const FIREBASE_LONGPOLL_DATA_PARAM = 'd'; +const FIREBASE_LONGPOLL_DISCONN_FRAME_PARAM = 'disconn'; +const FIREBASE_LONGPOLL_DISCONN_FRAME_REQUEST_PARAM = 'dframe'; + +//Data size constants. +//TODO: Perf: the maximum length actually differs from browser to browser. +// We should check what browser we're on and set accordingly. +const MAX_URL_DATA_SIZE = 1870; +const SEG_HEADER_SIZE = 30; //ie: &seg=8299234&ts=982389123&d= +const MAX_PAYLOAD_SIZE = MAX_URL_DATA_SIZE - SEG_HEADER_SIZE; + +/** + * Keepalive period + * send a fresh request at minimum every 25 seconds. Opera has a maximum request + * length of 30 seconds that we can't exceed. + * @const + * @type {number} + */ +const KEEPALIVE_REQUEST_INTERVAL = 25000; + +/** + * How long to wait before aborting a long-polling connection attempt. + * @const + * @type {number} + */ +const LP_CONNECT_TIMEOUT = 30000; + +/** + * This class manages a single long-polling connection. + * + * @constructor + * @implements {Transport} + * @param {string} connId An identifier for this connection, used for logging + * @param {RepoInfo} repoInfo The info for the endpoint to send data to. + * @param {string=} opt_transportSessionId Optional transportSessionid if we are reconnecting for an existing + * transport session + * @param {string=} opt_lastSessionId Optional lastSessionId if the PersistentConnection has already created a + * connection previously + */ +export class BrowserPollConnection implements Transport { + repoInfo; + bytesSent; + bytesReceived; + transportSessionId; + lastSessionId; + urlFn; + scriptTagHolder; + myDisconnFrame; + curSegmentNum; + myPacketOrderer; + id; + password; + private log_; + private stats_; + private everConnected_; + private connectTimeoutTimer_; + private onDisconnect_; + private isClosed_; + + constructor(public connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) { + this.log_ = logWrapper(connId); + this.repoInfo = repoInfo; + this.bytesSent = 0; + this.bytesReceived = 0; + this.stats_ = StatsManager.getCollection(repoInfo); + this.transportSessionId = transportSessionId; + this.everConnected_ = false; + this.lastSessionId = lastSessionId; + this.urlFn = (params) => repoInfo.connectionURL(CONSTANTS.LONG_POLLING, params); + }; + + /** + * + * @param {function(Object)} onMessage Callback when messages arrive + * @param {function()} onDisconnect Callback with connection lost. + */ + open(onMessage: (msg: Object) => any, onDisconnect: () => any) { + this.curSegmentNum = 0; + this.onDisconnect_ = onDisconnect; + this.myPacketOrderer = new PacketReceiver(onMessage); + this.isClosed_ = false; + + this.connectTimeoutTimer_ = setTimeout(() => { + this.log_('Timed out trying to connect.'); + // Make sure we clear the host cache + this.onClosed_(); + this.connectTimeoutTimer_ = null; + }, Math.floor(LP_CONNECT_TIMEOUT)); + + // Ensure we delay the creation of the iframe until the DOM is loaded. + executeWhenDOMReady(() => { + if (this.isClosed_) + return; + + //Set up a callback that gets triggered once a connection is set up. + this.scriptTagHolder = new FirebaseIFrameScriptHolder((command, arg1, arg2, arg3, arg4) => { + this.incrementIncomingBytes_(arguments); + if (!this.scriptTagHolder) + return; // we closed the connection. + + if (this.connectTimeoutTimer_) { + clearTimeout(this.connectTimeoutTimer_); + this.connectTimeoutTimer_ = null; + } + this.everConnected_ = true; + if (command == FIREBASE_LONGPOLL_START_PARAM) { + this.id = arg1; + this.password = arg2; + } else if (command === FIREBASE_LONGPOLL_CLOSE_COMMAND) { + // Don't clear the host cache. We got a response from the server, so we know it's reachable + if (arg1) { + // We aren't expecting any more data (other than what the server's already in the process of sending us + // through our already open polls), so don't send any more. + this.scriptTagHolder.sendNewPolls = false; + + // arg1 in this case is the last response number sent by the server. We should try to receive + // all of the responses up to this one before closing + this.myPacketOrderer.closeAfter(arg1, () => { this.onClosed_(); }); + } else { + this.onClosed_(); + } + } else { + throw new Error('Unrecognized command received: ' + command); + } + }, (pN, data) => { + this.incrementIncomingBytes_(arguments); + this.myPacketOrderer.handleResponse(pN, data); + }, () => { + this.onClosed_(); + }, this.urlFn); + + //Send the initial request to connect. The serial number is simply to keep the browser from pulling previous results + //from cache. + const urlParams = {}; + urlParams[FIREBASE_LONGPOLL_START_PARAM] = 't'; + urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = Math.floor(Math.random() * 100000000); + if (this.scriptTagHolder.uniqueCallbackIdentifier) + urlParams[FIREBASE_LONGPOLL_CALLBACK_ID_PARAM] = this.scriptTagHolder.uniqueCallbackIdentifier; + urlParams[CONSTANTS.VERSION_PARAM] = CONSTANTS.PROTOCOL_VERSION; + if (this.transportSessionId) { + urlParams[CONSTANTS.TRANSPORT_SESSION_PARAM] = this.transportSessionId; + } + if (this.lastSessionId) { + urlParams[CONSTANTS.LAST_SESSION_PARAM] = this.lastSessionId; + } + if (!isNodeSdk() && + typeof location !== 'undefined' && + location.href && + location.href.indexOf(CONSTANTS.FORGE_DOMAIN) !== -1) { + urlParams[CONSTANTS.REFERER_PARAM] = CONSTANTS.FORGE_REF; + } + const connectURL = this.urlFn(urlParams); + this.log_('Connecting via long-poll to ' + connectURL); + this.scriptTagHolder.addTag(connectURL, () => { /* do nothing */ }); + }); + }; + + /** + * Call this when a handshake has completed successfully and we want to consider the connection established + */ + start() { + this.scriptTagHolder.startLongPoll(this.id, this.password); + this.addDisconnectPingFrame(this.id, this.password); + }; + + private static forceAllow_; + + /** + * Forces long polling to be considered as a potential transport + */ + static forceAllow() { + BrowserPollConnection.forceAllow_ = true; + }; + + private static forceDisallow_; + + /** + * Forces longpolling to not be considered as a potential transport + */ + static forceDisallow() { + BrowserPollConnection.forceDisallow_ = true; + }; + + // Static method, use string literal so it can be accessed in a generic way + static isAvailable() { + // NOTE: In React-Native there's normally no 'document', but if you debug a React-Native app in + // the Chrome debugger, 'document' is defined, but document.createElement is null (2015/06/08). + return BrowserPollConnection.forceAllow_ || ( + !BrowserPollConnection.forceDisallow_ && + typeof document !== 'undefined' && document.createElement != null && + !isChromeExtensionContentScript() && + !isWindowsStoreApp() && + !isNodeSdk() + ); + }; + + /** + * No-op for polling + */ + markConnectionHealthy() { }; + + /** + * Stops polling and cleans up the iframe + * @private + */ + private shutdown_() { + this.isClosed_ = true; + + if (this.scriptTagHolder) { + this.scriptTagHolder.close(); + this.scriptTagHolder = null; + } + + //remove the disconnect frame, which will trigger an XHR call to the server to tell it we're leaving. + if (this.myDisconnFrame) { + document.body.removeChild(this.myDisconnFrame); + this.myDisconnFrame = null; + } + + if (this.connectTimeoutTimer_) { + clearTimeout(this.connectTimeoutTimer_); + this.connectTimeoutTimer_ = null; + } + }; + + /** + * Triggered when this transport is closed + * @private + */ + private onClosed_() { + if (!this.isClosed_) { + this.log_('Longpoll is closing itself'); + this.shutdown_(); + + if (this.onDisconnect_) { + this.onDisconnect_(this.everConnected_); + this.onDisconnect_ = null; + } + } + }; + + /** + * External-facing close handler. RealTime has requested we shut down. Kill our connection and tell the server + * that we've left. + */ + close() { + if (!this.isClosed_) { + this.log_('Longpoll is being closed.'); + this.shutdown_(); + } + }; + + /** + * Send the JSON object down to the server. It will need to be stringified, base64 encoded, and then + * broken into chunks (since URLs have a small maximum length). + * @param {!Object} data The JSON data to transmit. + */ + send(data: Object) { + const dataStr = stringify(data); + this.bytesSent += dataStr.length; + this.stats_.incrementCounter('bytes_sent', dataStr.length); + + //first, lets get the base64-encoded data + const base64data = base64Encode(dataStr); + + //We can only fit a certain amount in each URL, so we need to split this request + //up into multiple pieces if it doesn't fit in one request. + const dataSegs = splitStringBySize(base64data, MAX_PAYLOAD_SIZE); + + //Enqueue each segment for transmission. We assign each chunk a sequential ID and a total number + //of segments so that we can reassemble the packet on the server. + for (let i = 0; i < dataSegs.length; i++) { + this.scriptTagHolder.enqueueSegment(this.curSegmentNum, dataSegs.length, dataSegs[i]); + this.curSegmentNum++; + } + }; + + /** + * This is how we notify the server that we're leaving. + * We aren't able to send requests with DHTML on a window close event, but we can + * trigger XHR requests in some browsers (everything but Opera basically). + * @param {!string} id + * @param {!string} pw + */ + addDisconnectPingFrame(id: string, pw: string) { + if (isNodeSdk()) return; + this.myDisconnFrame = document.createElement('iframe'); + const urlParams = {}; + urlParams[FIREBASE_LONGPOLL_DISCONN_FRAME_REQUEST_PARAM] = 't'; + urlParams[FIREBASE_LONGPOLL_ID_PARAM] = id; + urlParams[FIREBASE_LONGPOLL_PW_PARAM] = pw; + this.myDisconnFrame.src = this.urlFn(urlParams); + this.myDisconnFrame.style.display = 'none'; + + document.body.appendChild(this.myDisconnFrame); + }; + + /** + * Used to track the bytes received by this client + * @param {*} args + * @private + */ + private incrementIncomingBytes_(args: any) { + // TODO: This is an annoying perf hit just to track the number of incoming bytes. Maybe it should be opt-in. + const bytesReceived = stringify(args).length; + this.bytesReceived += bytesReceived; + this.stats_.incrementCounter('bytes_received', bytesReceived); + }; +} + +interface IFrameElement extends HTMLIFrameElement { + doc: Document; +} + +/********************************************************************************************* + * A wrapper around an iframe that is used as a long-polling script holder. + * @constructor + * @param commandCB - The callback to be called when control commands are recevied from the server. + * @param onMessageCB - The callback to be triggered when responses arrive from the server. + * @param onDisconnect - The callback to be triggered when this tag holder is closed + * @param urlFn - A function that provides the URL of the endpoint to send data to. + *********************************************************************************************/ +class FirebaseIFrameScriptHolder { + //We maintain a count of all of the outstanding requests, because if we have too many active at once it can cause + //problems in some browsers. + /** + * @type {CountedSet.} + */ + outstandingRequests = new CountedSet(); + + //A queue of the pending segments waiting for transmission to the server. + pendingSegs = []; + + //A serial number. We use this for two things: + // 1) A way to ensure the browser doesn't cache responses to polls + // 2) A way to make the server aware when long-polls arrive in a different order than we started them. The + // server needs to release both polls in this case or it will cause problems in Opera since Opera can only execute + // JSONP code in the order it was added to the iframe. + currentSerial = Math.floor(Math.random() * 100000000); + + // This gets set to false when we're "closing down" the connection (e.g. we're switching transports but there's still + // incoming data from the server that we're waiting for). + sendNewPolls = true; + + uniqueCallbackIdentifier: number; + myIFrame: IFrameElement; + alive: boolean; + myID: string; + myPW: string; + commandCB; + onMessageCB; + + constructor(commandCB, onMessageCB, public onDisconnect, public urlFn) { + if (!isNodeSdk()) { + //Each script holder registers a couple of uniquely named callbacks with the window. These are called from the + //iframes where we put the long-polling script tags. We have two callbacks: + // 1) Command Callback - Triggered for control issues, like starting a connection. + // 2) Message Callback - Triggered when new data arrives. + this.uniqueCallbackIdentifier = LUIDGenerator(); + window[FIREBASE_LONGPOLL_COMMAND_CB_NAME + this.uniqueCallbackIdentifier] = commandCB; + window[FIREBASE_LONGPOLL_DATA_CB_NAME + this.uniqueCallbackIdentifier] = onMessageCB; + + //Create an iframe for us to add script tags to. + this.myIFrame = FirebaseIFrameScriptHolder.createIFrame_(); + + // Set the iframe's contents. + let script = ''; + // if we set a javascript url, it's IE and we need to set the document domain. The javascript url is sufficient + // for ie9, but ie8 needs to do it again in the document itself. + if (this.myIFrame.src && this.myIFrame.src.substr(0, 'javascript:'.length) === 'javascript:') { + const currentDomain = document.domain; + script = ''; + } + const iframeContents = '' + script + ''; + try { + this.myIFrame.doc.open(); + this.myIFrame.doc.write(iframeContents); + this.myIFrame.doc.close(); + } catch (e) { + log('frame writing exception'); + if (e.stack) { + log(e.stack); + } + log(e); + } + } else { + this.commandCB = commandCB; + this.onMessageCB = onMessageCB; + } + } + + /** + * Each browser has its own funny way to handle iframes. Here we mush them all together into one object that I can + * actually use. + * @private + * @return {Element} + */ + private static createIFrame_(): IFrameElement { + const iframe = document.createElement('iframe'); + iframe.style.display = 'none'; + + // This is necessary in order to initialize the document inside the iframe + if (document.body) { + document.body.appendChild(iframe); + try { + // If document.domain has been modified in IE, this will throw an error, and we need to set the + // domain of the iframe's document manually. We can do this via a javascript: url as the src attribute + // Also note that we must do this *after* the iframe has been appended to the page. Otherwise it doesn't work. + const a = iframe.contentWindow.document; + if (!a) { + // Apologies for the log-spam, I need to do something to keep closure from optimizing out the assignment above. + log('No IE domain setting required'); + } + } catch (e) { + const domain = document.domain; + iframe.src = 'javascript:void((function(){document.open();document.domain=\'' + domain + + '\';document.close();})())'; + } + } else { + // LongPollConnection attempts to delay initialization until the document is ready, so hopefully this + // never gets hit. + throw 'Document body has not initialized. Wait to initialize Firebase until after the document is ready.'; + } + + // Get the document of the iframe in a browser-specific way. + if (iframe.contentDocument) { + (iframe as any).doc = iframe.contentDocument; // Firefox, Opera, Safari + } else if (iframe.contentWindow) { + (iframe as any).doc = iframe.contentWindow.document; // Internet Explorer + } else if ((iframe as any).document) { + (iframe as any).doc = (iframe as any).document; //others? + } + + return iframe; + } + + /** + * Cancel all outstanding queries and remove the frame. + */ + close() { + //Mark this iframe as dead, so no new requests are sent. + this.alive = false; + + if (this.myIFrame) { + //We have to actually remove all of the html inside this iframe before removing it from the + //window, or IE will continue loading and executing the script tags we've already added, which + //can lead to some errors being thrown. Setting innerHTML seems to be the easiest way to do this. + this.myIFrame.doc.body.innerHTML = ''; + setTimeout(() => { + if (this.myIFrame !== null) { + document.body.removeChild(this.myIFrame); + this.myIFrame = null; + } + }, Math.floor(0)); + } + + if (isNodeSdk() && this.myID) { + var urlParams = {}; + urlParams[FIREBASE_LONGPOLL_DISCONN_FRAME_PARAM] = 't'; + urlParams[FIREBASE_LONGPOLL_ID_PARAM] = this.myID; + urlParams[FIREBASE_LONGPOLL_PW_PARAM] = this.myPW; + var theURL = this.urlFn(urlParams); + (FirebaseIFrameScriptHolder).nodeRestRequest(theURL); + } + + // Protect from being called recursively. + const onDisconnect = this.onDisconnect; + if (onDisconnect) { + this.onDisconnect = null; + onDisconnect(); + } + } + + /** + * Actually start the long-polling session by adding the first script tag(s) to the iframe. + * @param {!string} id - The ID of this connection + * @param {!string} pw - The password for this connection + */ + startLongPoll(id: string, pw: string) { + this.myID = id; + this.myPW = pw; + this.alive = true; + + //send the initial request. If there are requests queued, make sure that we transmit as many as we are currently able to. + while (this.newRequest_()) {} + }; + + /** + * This is called any time someone might want a script tag to be added. It adds a script tag when there aren't + * too many outstanding requests and we are still alive. + * + * If there are outstanding packet segments to send, it sends one. If there aren't, it sends a long-poll anyways if + * needed. + */ + private newRequest_() { + // We keep one outstanding request open all the time to receive data, but if we need to send data + // (pendingSegs.length > 0) then we create a new request to send the data. The server will automatically + // close the old request. + if (this.alive && this.sendNewPolls && this.outstandingRequests.count() < (this.pendingSegs.length > 0 ? 2 : 1)) { + //construct our url + this.currentSerial++; + const urlParams = {}; + urlParams[FIREBASE_LONGPOLL_ID_PARAM] = this.myID; + urlParams[FIREBASE_LONGPOLL_PW_PARAM] = this.myPW; + urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = this.currentSerial; + let theURL = this.urlFn(urlParams); + //Now add as much data as we can. + let curDataString = ''; + let i = 0; + + while (this.pendingSegs.length > 0) { + //first, lets see if the next segment will fit. + const nextSeg = this.pendingSegs[0]; + if (nextSeg.d.length + SEG_HEADER_SIZE + curDataString.length <= MAX_URL_DATA_SIZE) { + //great, the segment will fit. Lets append it. + const theSeg = this.pendingSegs.shift(); + curDataString = curDataString + '&' + FIREBASE_LONGPOLL_SEGMENT_NUM_PARAM + i + '=' + theSeg.seg + + '&' + FIREBASE_LONGPOLL_SEGMENTS_IN_PACKET + i + '=' + theSeg.ts + '&' + FIREBASE_LONGPOLL_DATA_PARAM + i + '=' + theSeg.d; + i++; + } else { + break; + } + } + + theURL = theURL + curDataString; + this.addLongPollTag_(theURL, this.currentSerial); + + return true; + } else { + return false; + } + }; + + /** + * Queue a packet for transmission to the server. + * @param segnum - A sequential id for this packet segment used for reassembly + * @param totalsegs - The total number of segments in this packet + * @param data - The data for this segment. + */ + enqueueSegment(segnum, totalsegs, data) { + //add this to the queue of segments to send. + this.pendingSegs.push({seg: segnum, ts: totalsegs, d: data}); + + //send the data immediately if there isn't already data being transmitted, unless + //startLongPoll hasn't been called yet. + if (this.alive) { + this.newRequest_(); + } + }; + + /** + * Add a script tag for a regular long-poll request. + * @param {!string} url - The URL of the script tag. + * @param {!number} serial - The serial number of the request. + * @private + */ + private addLongPollTag_(url: string, serial: number) { + //remember that we sent this request. + this.outstandingRequests.add(serial, 1); + + const doNewRequest = () => { + this.outstandingRequests.remove(serial); + this.newRequest_(); + }; + + // If this request doesn't return on its own accord (by the server sending us some data), we'll + // create a new one after the KEEPALIVE interval to make sure we always keep a fresh request open. + const keepaliveTimeout = setTimeout(doNewRequest, Math.floor(KEEPALIVE_REQUEST_INTERVAL)); + + const readyStateCB = () => { + // Request completed. Cancel the keepalive. + clearTimeout(keepaliveTimeout); + + // Trigger a new request so we can continue receiving data. + doNewRequest(); + }; + + this.addTag(url, readyStateCB); + }; + + /** + * Add an arbitrary script tag to the iframe. + * @param {!string} url - The URL for the script tag source. + * @param {!function()} loadCB - A callback to be triggered once the script has loaded. + */ + addTag(url: string, loadCB: () => any) { + if (isNodeSdk()) { + (this).doNodeLongPoll(url, loadCB); + } else { + setTimeout(() => { + try { + // if we're already closed, don't add this poll + if (!this.sendNewPolls) return; + const newScript = this.myIFrame.doc.createElement('script'); + newScript.type = 'text/javascript'; + newScript.async = true; + newScript.src = url; + newScript.onload = (newScript).onreadystatechange = function () { + const rstate = (newScript).readyState; + if (!rstate || rstate === 'loaded' || rstate === 'complete') { + newScript.onload = (newScript).onreadystatechange = null; + if (newScript.parentNode) { + newScript.parentNode.removeChild(newScript); + } + loadCB(); + } + }; + newScript.onerror = () => { + log('Long-poll script failed to load: ' + url); + this.sendNewPolls = false; + this.close(); + }; + this.myIFrame.doc.body.appendChild(newScript); + } catch (e) { + // TODO: we should make this error visible somehow + } + }, Math.floor(1)); + } + } +} + +if (isNodeSdk()) { + /** + * @type {?function({url: string, forever: boolean}, function(Error, number, string))} + */ + (FirebaseIFrameScriptHolder as any).request = null; + + /** + * @param {{url: string, forever: boolean}} req + * @param {function(string)=} onComplete + */ + (FirebaseIFrameScriptHolder as any).nodeRestRequest = function(req, onComplete) { + if (!(FirebaseIFrameScriptHolder as any).request) + (FirebaseIFrameScriptHolder as any).request = + /** @type {function({url: string, forever: boolean}, function(Error, number, string))} */ (require('request')); + + (FirebaseIFrameScriptHolder as any).request(req, function(error, response, body) { + if (error) + throw 'Rest request for ' + req.url + ' failed.'; + + if (onComplete) + onComplete(body); + }); + }; + + /** + * @param {!string} url + * @param {function()} loadCB + */ + (FirebaseIFrameScriptHolder.prototype).doNodeLongPoll = function(url, loadCB) { + var self = this; + (FirebaseIFrameScriptHolder as any).nodeRestRequest({ url: url, forever: true }, function(body) { + self.evalBody(body); + loadCB(); + }); + }; + + /** + * Evaluates the string contents of a jsonp response. + * @param {!string} body + */ + (FirebaseIFrameScriptHolder.prototype).evalBody = function(body) { + var jsonpCB; + //jsonpCB is externed in firebase-extern.js + eval('jsonpCB = function(' + FIREBASE_LONGPOLL_COMMAND_CB_NAME + ', ' + FIREBASE_LONGPOLL_DATA_CB_NAME + ') {' + + body + + '}'); + jsonpCB(this.commandCB, this.onMessageCB); + }; +} \ No newline at end of file diff --git a/src/database/realtime/Connection.ts b/src/database/realtime/Connection.ts new file mode 100644 index 00000000000..1896ed2946a --- /dev/null +++ b/src/database/realtime/Connection.ts @@ -0,0 +1,541 @@ +import { + error, + logWrapper, + requireKey, + setTimeoutNonBlocking, + warn, +} from '../core/util/util'; +import { PersistentStorage } from '../core/storage/storage'; +import { CONSTANTS } from './Constants'; +import { TransportManager } from './TransportManager'; +import { RepoInfo } from '../core/RepoInfo'; + +// Abort upgrade attempt if it takes longer than 60s. +const UPGRADE_TIMEOUT = 60000; + +// For some transports (WebSockets), we need to "validate" the transport by exchanging a few requests and responses. +// If we haven't sent enough requests within 5s, we'll start sending noop ping requests. +const DELAY_BEFORE_SENDING_EXTRA_REQUESTS = 5000; + +// If the initial data sent triggers a lot of bandwidth (i.e. it's a large put or a listen for a large amount of data) +// then we may not be able to exchange our ping/pong requests within the healthy timeout. So if we reach the timeout +// but we've sent/received enough bytes, we don't cancel the connection. +const BYTES_SENT_HEALTHY_OVERRIDE = 10 * 1024; +const BYTES_RECEIVED_HEALTHY_OVERRIDE = 100 * 1024; + + +const REALTIME_STATE_CONNECTING = 0; +const REALTIME_STATE_CONNECTED = 1; +const REALTIME_STATE_DISCONNECTED = 2; + +const MESSAGE_TYPE = 't'; +const MESSAGE_DATA = 'd'; +const CONTROL_SHUTDOWN = 's'; +const CONTROL_RESET = 'r'; +const CONTROL_ERROR = 'e'; +const CONTROL_PONG = 'o'; +const SWITCH_ACK = 'a'; +const END_TRANSMISSION = 'n'; +const PING = 'p'; + +const SERVER_HELLO = 'h'; + +/** + * Creates a new real-time connection to the server using whichever method works + * best in the current browser. + * + * @constructor + * @param {!string} connId - an id for this connection + * @param {!RepoInfo} repoInfo - the info for the endpoint to connect to + * @param {function(Object)} onMessage - the callback to be triggered when a server-push message arrives + * @param {function(number, string)} onReady - the callback to be triggered when this connection is ready to send messages. + * @param {function()} onDisconnect - the callback to be triggered when a connection was lost + * @param {function(string)} onKill - the callback to be triggered when this connection has permanently shut down. + * @param {string=} lastSessionId - last session id in persistent connection. is used to clean up old session in real-time server + + */ +export class Connection { + connectionCount; + id; + lastSessionId; + pendingDataMessages; + sessionId; + + private conn_; + private healthyTimeout_; + private isHealthy_; + private log_; + private onDisconnect_; + private onKill_; + private onMessage_; + private onReady_; + private primaryResponsesRequired_; + private repoInfo_; + private rx_; + private secondaryConn_; + private secondaryResponsesRequired_; + private state_; + private transportManager_; + private tx_; + + constructor(connId: string, + repoInfo: RepoInfo, + onMessage: (a: Object) => any, + onReady: (a: number, b: string) => any, + onDisconnect: () => any, + onKill: (a: string) => any, + lastSessionId?: string) { + this.id = connId; + this.log_ = logWrapper('c:' + this.id + ':'); + this.onMessage_ = onMessage; + this.onReady_ = onReady; + this.onDisconnect_ = onDisconnect; + this.onKill_ = onKill; + this.repoInfo_ = repoInfo; + this.pendingDataMessages = []; + this.connectionCount = 0; + this.transportManager_ = new TransportManager(repoInfo); + this.state_ = REALTIME_STATE_CONNECTING; + this.lastSessionId = lastSessionId; + this.log_('Connection created'); + this.start_(); + } + + /** + * Starts a connection attempt + * @private + */ + private start_() { + const conn = this.transportManager_.initialTransport(); + this.conn_ = new conn(this.nextTransportId_(), this.repoInfo_, /*transportSessionId=*/undefined, this.lastSessionId); + + // For certain transports (WebSockets), we need to send and receive several messages back and forth before we + // can consider the transport healthy. + this.primaryResponsesRequired_ = conn['responsesRequiredToBeHealthy'] || 0; + + const onMessageReceived = this.connReceiver_(this.conn_); + const onConnectionLost = this.disconnReceiver_(this.conn_); + this.tx_ = this.conn_; + this.rx_ = this.conn_; + this.secondaryConn_ = null; + this.isHealthy_ = false; + + const self = this; + /* + * Firefox doesn't like when code from one iframe tries to create another iframe by way of the parent frame. + * This can occur in the case of a redirect, i.e. we guessed wrong on what server to connect to and received a reset. + * Somehow, setTimeout seems to make this ok. That doesn't make sense from a security perspective, since you should + * still have the context of your originating frame. + */ + setTimeout(function () { + // self.conn_ gets set to null in some of the tests. Check to make sure it still exists before using it + self.conn_ && self.conn_.open(onMessageReceived, onConnectionLost); + }, Math.floor(0)); + + + const healthyTimeout_ms = conn['healthyTimeout'] || 0; + if (healthyTimeout_ms > 0) { + this.healthyTimeout_ = setTimeoutNonBlocking(function () { + self.healthyTimeout_ = null; + if (!self.isHealthy_) { + if (self.conn_ && self.conn_.bytesReceived > BYTES_RECEIVED_HEALTHY_OVERRIDE) { + self.log_('Connection exceeded healthy timeout but has received ' + self.conn_.bytesReceived + + ' bytes. Marking connection healthy.'); + self.isHealthy_ = true; + self.conn_.markConnectionHealthy(); + } else if (self.conn_ && self.conn_.bytesSent > BYTES_SENT_HEALTHY_OVERRIDE) { + self.log_('Connection exceeded healthy timeout but has sent ' + self.conn_.bytesSent + + ' bytes. Leaving connection alive.'); + // NOTE: We don't want to mark it healthy, since we have no guarantee that the bytes have made it to + // the server. + } else { + self.log_('Closing unhealthy connection after timeout.'); + self.close(); + } + } + }, Math.floor(healthyTimeout_ms)); + } + }; + + /** + * @return {!string} + * @private + */ + private nextTransportId_() { + return 'c:' + this.id + ':' + this.connectionCount++; + }; + + private disconnReceiver_(conn) { + const self = this; + return function (everConnected) { + if (conn === self.conn_) { + self.onConnectionLost_(everConnected); + } else if (conn === self.secondaryConn_) { + self.log_('Secondary connection lost.'); + self.onSecondaryConnectionLost_(); + } else { + self.log_('closing an old connection'); + } + } + }; + + private connReceiver_(conn) { + const self = this; + return function (message) { + if (self.state_ != REALTIME_STATE_DISCONNECTED) { + if (conn === self.rx_) { + self.onPrimaryMessageReceived_(message); + } else if (conn === self.secondaryConn_) { + self.onSecondaryMessageReceived_(message); + } else { + self.log_('message on old connection'); + } + } + }; + }; + + /** + * + * @param {Object} dataMsg An arbitrary data message to be sent to the server + */ + sendRequest(dataMsg) { + // wrap in a data message envelope and send it on + const msg = {'t': 'd', 'd': dataMsg}; + this.sendData_(msg); + }; + + tryCleanupConnection() { + if (this.tx_ === this.secondaryConn_ && this.rx_ === this.secondaryConn_) { + this.log_('cleaning up and promoting a connection: ' + this.secondaryConn_.connId); + this.conn_ = this.secondaryConn_; + this.secondaryConn_ = null; + // the server will shutdown the old connection + } + }; + + private onSecondaryControl_(controlData) { + if (MESSAGE_TYPE in controlData) { + const cmd = controlData[MESSAGE_TYPE]; + if (cmd === SWITCH_ACK) { + this.upgradeIfSecondaryHealthy_(); + } else if (cmd === CONTROL_RESET) { + // Most likely the session wasn't valid. Abandon the switch attempt + this.log_('Got a reset on secondary, closing it'); + this.secondaryConn_.close(); + // If we were already using this connection for something, than we need to fully close + if (this.tx_ === this.secondaryConn_ || this.rx_ === this.secondaryConn_) { + this.close(); + } + } else if (cmd === CONTROL_PONG) { + this.log_('got pong on secondary.'); + this.secondaryResponsesRequired_--; + this.upgradeIfSecondaryHealthy_(); + } + } + }; + + private onSecondaryMessageReceived_(parsedData) { + const layer = requireKey('t', parsedData); + const data = requireKey('d', parsedData); + if (layer == 'c') { + this.onSecondaryControl_(data); + } else if (layer == 'd') { + // got a data message, but we're still second connection. Need to buffer it up + this.pendingDataMessages.push(data); + } else { + throw new Error('Unknown protocol layer: ' + layer); + } + }; + + private upgradeIfSecondaryHealthy_() { + if (this.secondaryResponsesRequired_ <= 0) { + this.log_('Secondary connection is healthy.'); + this.isHealthy_ = true; + this.secondaryConn_.markConnectionHealthy(); + this.proceedWithUpgrade_(); + } else { + // Send a ping to make sure the connection is healthy. + this.log_('sending ping on secondary.'); + this.secondaryConn_.send({'t': 'c', 'd': {'t': PING, 'd': {}}}); + } + }; + + private proceedWithUpgrade_() { + // tell this connection to consider itself open + this.secondaryConn_.start(); + // send ack + this.log_('sending client ack on secondary'); + this.secondaryConn_.send({'t': 'c', 'd': {'t': SWITCH_ACK, 'd': {}}}); + + // send end packet on primary transport, switch to sending on this one + // can receive on this one, buffer responses until end received on primary transport + this.log_('Ending transmission on primary'); + this.conn_.send({'t': 'c', 'd': {'t': END_TRANSMISSION, 'd': {}}}); + this.tx_ = this.secondaryConn_; + + this.tryCleanupConnection(); + }; + + private onPrimaryMessageReceived_(parsedData) { + // Must refer to parsedData properties in quotes, so closure doesn't touch them. + const layer = requireKey('t', parsedData); + const data = requireKey('d', parsedData); + if (layer == 'c') { + this.onControl_(data); + } else if (layer == 'd') { + this.onDataMessage_(data); + } + }; + + private onDataMessage_(message) { + this.onPrimaryResponse_(); + + // We don't do anything with data messages, just kick them up a level + this.onMessage_(message); + }; + + private onPrimaryResponse_() { + if (!this.isHealthy_) { + this.primaryResponsesRequired_--; + if (this.primaryResponsesRequired_ <= 0) { + this.log_('Primary connection is healthy.'); + this.isHealthy_ = true; + this.conn_.markConnectionHealthy(); + } + } + }; + + private onControl_(controlData) { + const cmd = requireKey(MESSAGE_TYPE, controlData); + if (MESSAGE_DATA in controlData) { + const payload = controlData[MESSAGE_DATA]; + if (cmd === SERVER_HELLO) { + this.onHandshake_(payload); + } else if (cmd === END_TRANSMISSION) { + this.log_('recvd end transmission on primary'); + this.rx_ = this.secondaryConn_; + for (let i = 0; i < this.pendingDataMessages.length; ++i) { + this.onDataMessage_(this.pendingDataMessages[i]); + } + this.pendingDataMessages = []; + this.tryCleanupConnection(); + } else if (cmd === CONTROL_SHUTDOWN) { + // This was previously the 'onKill' callback passed to the lower-level connection + // payload in this case is the reason for the shutdown. Generally a human-readable error + this.onConnectionShutdown_(payload); + } else if (cmd === CONTROL_RESET) { + // payload in this case is the host we should contact + this.onReset_(payload); + } else if (cmd === CONTROL_ERROR) { + error('Server Error: ' + payload); + } else if (cmd === CONTROL_PONG) { + this.log_('got pong on primary.'); + this.onPrimaryResponse_(); + this.sendPingOnPrimaryIfNecessary_(); + } else { + error('Unknown control packet command: ' + cmd); + } + } + }; + + /** + * + * @param {Object} handshake The handshake data returned from the server + * @private + */ + private onHandshake_(handshake) { + const timestamp = handshake['ts']; + const version = handshake['v']; + const host = handshake['h']; + this.sessionId = handshake['s']; + this.repoInfo_.updateHost(host); + // if we've already closed the connection, then don't bother trying to progress further + if (this.state_ == REALTIME_STATE_CONNECTING) { + this.conn_.start(); + this.onConnectionEstablished_(this.conn_, timestamp); + if (CONSTANTS.PROTOCOL_VERSION !== version) { + warn('Protocol version mismatch detected'); + } + // TODO: do we want to upgrade? when? maybe a delay? + this.tryStartUpgrade_(); + } + }; + + private tryStartUpgrade_() { + const conn = this.transportManager_.upgradeTransport(); + if (conn) { + this.startUpgrade_(conn); + } + }; + + private startUpgrade_(conn) { + this.secondaryConn_ = new conn(this.nextTransportId_(), + this.repoInfo_, this.sessionId); + // For certain transports (WebSockets), we need to send and receive several messages back and forth before we + // can consider the transport healthy. + this.secondaryResponsesRequired_ = conn['responsesRequiredToBeHealthy'] || 0; + + const onMessage = this.connReceiver_(this.secondaryConn_); + const onDisconnect = this.disconnReceiver_(this.secondaryConn_); + this.secondaryConn_.open(onMessage, onDisconnect); + + // If we haven't successfully upgraded after UPGRADE_TIMEOUT, give up and kill the secondary. + const self = this; + setTimeoutNonBlocking(function () { + if (self.secondaryConn_) { + self.log_('Timed out trying to upgrade.'); + self.secondaryConn_.close(); + } + }, Math.floor(UPGRADE_TIMEOUT)); + }; + + private onReset_(host) { + this.log_('Reset packet received. New host: ' + host); + this.repoInfo_.updateHost(host); + // TODO: if we're already "connected", we need to trigger a disconnect at the next layer up. + // We don't currently support resets after the connection has already been established + if (this.state_ === REALTIME_STATE_CONNECTED) { + this.close(); + } else { + // Close whatever connections we have open and start again. + this.closeConnections_(); + this.start_(); + } + }; + + private onConnectionEstablished_(conn, timestamp) { + this.log_('Realtime connection established.'); + this.conn_ = conn; + this.state_ = REALTIME_STATE_CONNECTED; + + if (this.onReady_) { + this.onReady_(timestamp, this.sessionId); + this.onReady_ = null; + } + + const self = this; + // If after 5 seconds we haven't sent enough requests to the server to get the connection healthy, + // send some pings. + if (this.primaryResponsesRequired_ === 0) { + this.log_('Primary connection is healthy.'); + this.isHealthy_ = true; + } else { + setTimeoutNonBlocking(function () { + self.sendPingOnPrimaryIfNecessary_(); + }, Math.floor(DELAY_BEFORE_SENDING_EXTRA_REQUESTS)); + } + }; + + private sendPingOnPrimaryIfNecessary_() { + // If the connection isn't considered healthy yet, we'll send a noop ping packet request. + if (!this.isHealthy_ && this.state_ === REALTIME_STATE_CONNECTED) { + this.log_('sending ping on primary.'); + this.sendData_({'t': 'c', 'd': {'t': PING, 'd': {}}}); + } + }; + + private onSecondaryConnectionLost_() { + const conn = this.secondaryConn_; + this.secondaryConn_ = null; + if (this.tx_ === conn || this.rx_ === conn) { + // we are relying on this connection already in some capacity. Therefore, a failure is real + this.close(); + } + }; + + /** + * + * @param {boolean} everConnected Whether or not the connection ever reached a server. Used to determine if + * we should flush the host cache + * @private + */ + private onConnectionLost_(everConnected) { + this.conn_ = null; + + // NOTE: IF you're seeing a Firefox error for this line, I think it might be because it's getting + // called on window close and REALTIME_STATE_CONNECTING is no longer defined. Just a guess. + if (!everConnected && this.state_ === REALTIME_STATE_CONNECTING) { + this.log_('Realtime connection failed.'); + // Since we failed to connect at all, clear any cached entry for this namespace in case the machine went away + if (this.repoInfo_.isCacheableHost()) { + PersistentStorage.remove('host:' + this.repoInfo_.host); + // reset the internal host to what we would show the user, i.e. .firebaseio.com + this.repoInfo_.internalHost = this.repoInfo_.host; + } + } else if (this.state_ === REALTIME_STATE_CONNECTED) { + this.log_('Realtime connection lost.'); + } + + this.close(); + }; + + /** + * + * @param {string} reason + * @private + */ + private onConnectionShutdown_(reason) { + this.log_('Connection shutdown command received. Shutting down...'); + + if (this.onKill_) { + this.onKill_(reason); + this.onKill_ = null; + } + + // We intentionally don't want to fire onDisconnect (kill is a different case), + // so clear the callback. + this.onDisconnect_ = null; + + this.close(); + }; + + + private sendData_(data) { + if (this.state_ !== REALTIME_STATE_CONNECTED) { + throw 'Connection is not connected'; + } else { + this.tx_.send(data); + } + }; + + /** + * Cleans up this connection, calling the appropriate callbacks + */ + close() { + if (this.state_ !== REALTIME_STATE_DISCONNECTED) { + this.log_('Closing realtime connection.'); + this.state_ = REALTIME_STATE_DISCONNECTED; + + this.closeConnections_(); + + if (this.onDisconnect_) { + this.onDisconnect_(); + this.onDisconnect_ = null; + } + } + }; + + /** + * + * @private + */ + private closeConnections_() { + this.log_('Shutting down all connections'); + if (this.conn_) { + this.conn_.close(); + this.conn_ = null; + } + + if (this.secondaryConn_) { + this.secondaryConn_.close(); + this.secondaryConn_ = null; + } + + if (this.healthyTimeout_) { + clearTimeout(this.healthyTimeout_); + this.healthyTimeout_ = null; + } + }; +} + + diff --git a/src/database/realtime/Constants.ts b/src/database/realtime/Constants.ts new file mode 100644 index 00000000000..650ead1001c --- /dev/null +++ b/src/database/realtime/Constants.ts @@ -0,0 +1,20 @@ +export const CONSTANTS = { + + /** @const */ PROTOCOL_VERSION: '5', + + /** @const */ VERSION_PARAM: 'v', + + /** @const */ TRANSPORT_SESSION_PARAM: 's', + + /** @const */ REFERER_PARAM: 'r', + + /** @const */ FORGE_REF: 'f', + + /** @const */ FORGE_DOMAIN: 'firebaseio.com', + + /** @const */ LAST_SESSION_PARAM: 'ls', + + /** @const */ WEBSOCKET: 'websocket', + + /** @const */ LONG_POLLING: 'long_polling' +}; diff --git a/src/database/realtime/Transport.ts b/src/database/realtime/Transport.ts new file mode 100644 index 00000000000..b0d0b066738 --- /dev/null +++ b/src/database/realtime/Transport.ts @@ -0,0 +1,40 @@ +import { RepoInfo } from '../core/RepoInfo'; + +export abstract class Transport { + /** + * Bytes received since connection started. + * @type {number} + */ + abstract bytesReceived: number; + + /** + * Bytes sent since connection started. + * @type {number} + */ + abstract bytesSent: number; + + /** + * + * @param {string} connId An identifier for this connection, used for logging + * @param {RepoInfo} repoInfo The info for the endpoint to send data to. + * @param {string=} transportSessionId Optional transportSessionId if this is connecting to an existing transport session + * @param {string=} lastSessionId Optional lastSessionId if there was a previous connection + * @interface + */ + constructor(connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) {} + + /** + * @param {function(Object)} onMessage Callback when messages arrive + * @param {function()} onDisconnect Callback with connection lost. + */ + abstract open(onMessage: (a: Object) => any, onDisconnect: () => any); + + abstract start(); + + abstract close(); + + /** + * @param {!Object} data The JSON data to transmit + */ + abstract send(data: Object); +} \ No newline at end of file diff --git a/src/database/realtime/TransportManager.ts b/src/database/realtime/TransportManager.ts new file mode 100644 index 00000000000..f9c7096488e --- /dev/null +++ b/src/database/realtime/TransportManager.ts @@ -0,0 +1,81 @@ +import { BrowserPollConnection } from "./BrowserPollConnection"; +import { WebSocketConnection } from "./WebSocketConnection"; +import { warn, each } from "../core/util/util"; + +/** + * Currently simplistic, this class manages what transport a Connection should use at various stages of its + * lifecycle. + * + * It starts with longpolling in a browser, and httppolling on node. It then upgrades to websockets if + * they are available. + * @constructor + * @param {!RepoInfo} repoInfo Metadata around the namespace we're connecting to + */ +export class TransportManager { + transports_: Array; + /** + * @const + * @type {!Array.} + */ + static get ALL_TRANSPORTS() { + return [ + BrowserPollConnection, + WebSocketConnection + ]; + } + constructor(repoInfo) { + this.initTransports_(repoInfo); + }; + + /** + * @param {!RepoInfo} repoInfo + * @private + */ + initTransports_(repoInfo) { + const isWebSocketsAvailable = WebSocketConnection && WebSocketConnection['isAvailable'](); + let isSkipPollConnection = isWebSocketsAvailable && !WebSocketConnection.previouslyFailed(); + + if (repoInfo.webSocketOnly) { + if (!isWebSocketsAvailable) + warn('wss:// URL used, but browser isn\'t known to support websockets. Trying anyway.'); + + isSkipPollConnection = true; + } + + if (isSkipPollConnection) { + this.transports_ = [WebSocketConnection]; + } else { + const transports = this.transports_ = []; + each(TransportManager.ALL_TRANSPORTS, function(i, transport) { + if (transport && transport['isAvailable']()) { + transports.push(transport); + } + }); + } + } + + /** + * @return {function(new:Transport, !string, !RepoInfo, string=, string=)} The constructor for the + * initial transport to use + */ + initialTransport() { + if (this.transports_.length > 0) { + return this.transports_[0]; + } else { + throw new Error('No transports available'); + } + } + + /** + * @return {?function(new:Transport, function(),function(), string=)} The constructor for the next + * transport, or null + */ + upgradeTransport() { + if (this.transports_.length > 1) { + return this.transports_[1]; + } else { + return null; + } + } +} + diff --git a/src/database/realtime/WebSocketConnection.ts b/src/database/realtime/WebSocketConnection.ts new file mode 100644 index 00000000000..009c643af65 --- /dev/null +++ b/src/database/realtime/WebSocketConnection.ts @@ -0,0 +1,388 @@ +import { RepoInfo } from '../core/RepoInfo'; +declare const MozWebSocket; + +import firebase from "../../app"; +import { assert } from '../../utils/assert'; +import { logWrapper, splitStringBySize } from '../core/util/util'; +import { StatsManager } from '../core/stats/StatsManager'; +import { CONSTANTS } from './Constants'; +import { CONSTANTS as ENV_CONSTANTS } from "../../utils/constants"; +import { PersistentStorage } from '../core/storage/storage'; +import { jsonEval, stringify } from '../../utils/json'; +import { isNodeSdk } from "../../utils/environment"; +import { Transport } from './Transport'; + +const WEBSOCKET_MAX_FRAME_SIZE = 16384; +const WEBSOCKET_KEEPALIVE_INTERVAL = 45000; + +let WebSocketImpl = null; +if (isNodeSdk()) { + WebSocketImpl = require('faye-websocket')['Client']; +} else if (typeof MozWebSocket !== 'undefined') { + WebSocketImpl = MozWebSocket; +} else if (typeof WebSocket !== 'undefined') { + WebSocketImpl = WebSocket; +} + +/** + * Create a new websocket connection with the given callbacks. + * @constructor + * @implements {Transport} + * @param {string} connId identifier for this transport + * @param {RepoInfo} repoInfo The info for the websocket endpoint. + * @param {string=} opt_transportSessionId Optional transportSessionId if this is connecting to an existing transport + * session + * @param {string=} opt_lastSessionId Optional lastSessionId if there was a previous connection + */ +export class WebSocketConnection implements Transport { + keepaliveTimer; + frames; + totalFrames: number; + bytesSent: number; + bytesReceived: number; + connURL; + onDisconnect; + onMessage; + mySock; + private log_; + private stats_; + private everConnected_: boolean; + private isClosed_: boolean; + + constructor(public connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) { + this.log_ = logWrapper(this.connId); + this.keepaliveTimer = null; + this.frames = null; + this.totalFrames = 0; + this.bytesSent = 0; + this.bytesReceived = 0; + this.stats_ = StatsManager.getCollection(repoInfo); + this.connURL = WebSocketConnection.connectionURL_(repoInfo, transportSessionId, lastSessionId); + } + + /** + * @param {RepoInfo} repoInfo The info for the websocket endpoint. + * @param {string=} transportSessionId Optional transportSessionId if this is connecting to an existing transport + * session + * @param {string=} lastSessionId Optional lastSessionId if there was a previous connection + * @return {string} connection url + * @private + */ + private static connectionURL_(repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string): string { + const urlParams = {}; + urlParams[CONSTANTS.VERSION_PARAM] = CONSTANTS.PROTOCOL_VERSION; + + if (!isNodeSdk() && + typeof location !== 'undefined' && + location.href && + location.href.indexOf(CONSTANTS.FORGE_DOMAIN) !== -1) { + urlParams[CONSTANTS.REFERER_PARAM] = CONSTANTS.FORGE_REF; + } + if (transportSessionId) { + urlParams[CONSTANTS.TRANSPORT_SESSION_PARAM] = transportSessionId; + } + if (lastSessionId) { + urlParams[CONSTANTS.LAST_SESSION_PARAM] = lastSessionId; + } + return repoInfo.connectionURL(CONSTANTS.WEBSOCKET, urlParams); + } + + /** + * + * @param onMessage Callback when messages arrive + * @param onDisconnect Callback with connection lost. + */ + open(onMessage: (msg: Object) => any, onDisconnect: () => any) { + this.onDisconnect = onDisconnect; + this.onMessage = onMessage; + + this.log_('Websocket connecting to ' + this.connURL); + + this.everConnected_ = false; + // Assume failure until proven otherwise. + PersistentStorage.set('previous_websocket_failure', true); + + try { + if (isNodeSdk()) { + const device = ENV_CONSTANTS.NODE_ADMIN ? 'AdminNode' : 'Node'; + // UA Format: Firebase//// + const options = { + 'headers': { + 'User-Agent': 'Firebase/' + CONSTANTS.PROTOCOL_VERSION + '/' + firebase.SDK_VERSION + '/' + process.platform + '/' + device + }}; + + // Plumb appropriate http_proxy environment variable into faye-websocket if it exists. + const env = process['env']; + const proxy = (this.connURL.indexOf("wss://") == 0) + ? (env['HTTPS_PROXY'] || env['https_proxy']) + : (env['HTTP_PROXY'] || env['http_proxy']); + + if (proxy) { + options['proxy'] = { origin: proxy }; + } + + this.mySock = new WebSocketImpl(this.connURL, [], options); + } + else { + this.mySock = new WebSocketImpl(this.connURL); + } + this.mySock = new WebSocketImpl(this.connURL); + } catch (e) { + this.log_('Error instantiating WebSocket.'); + const error = e.message || e.data; + if (error) { + this.log_(error); + } + this.onClosed_(); + return; + } + + this.mySock.onopen = () => { + this.log_('Websocket connected.'); + this.everConnected_ = true; + }; + + this.mySock.onclose = () => { + this.log_('Websocket connection was disconnected.'); + this.mySock = null; + this.onClosed_(); + }; + + this.mySock.onmessage = (m) => { + this.handleIncomingFrame(m); + }; + + this.mySock.onerror = (e) => { + this.log_('WebSocket error. Closing connection.'); + const error = e.message || e.data; + if (error) { + this.log_(error); + } + this.onClosed_(); + }; + } + + /** + * No-op for websockets, we don't need to do anything once the connection is confirmed as open + */ + start() {}; + + static forceDisallow_: Boolean; + + static forceDisallow() { + WebSocketConnection.forceDisallow_ = true; + } + + static isAvailable(): boolean { + let isOldAndroid = false; + if (typeof navigator !== 'undefined' && navigator.userAgent) { + const oldAndroidRegex = /Android ([0-9]{0,}\.[0-9]{0,})/; + const oldAndroidMatch = navigator.userAgent.match(oldAndroidRegex); + if (oldAndroidMatch && oldAndroidMatch.length > 1) { + if (parseFloat(oldAndroidMatch[1]) < 4.4) { + isOldAndroid = true; + } + } + } + + return !isOldAndroid && WebSocketImpl !== null && !WebSocketConnection.forceDisallow_; + } + + /** + * Number of response before we consider the connection "healthy." + * @type {number} + * + * NOTE: 'responsesRequiredToBeHealthy' shouldn't need to be quoted, but closure removed it for some reason otherwise! + */ + static responsesRequiredToBeHealthy = 2; + + /** + * Time to wait for the connection te become healthy before giving up. + * @type {number} + * + * NOTE: 'healthyTimeout' shouldn't need to be quoted, but closure removed it for some reason otherwise! + */ + static healthyTimeout = 30000; + + /** + * Returns true if we previously failed to connect with this transport. + * @return {boolean} + */ + static previouslyFailed(): boolean { + // If our persistent storage is actually only in-memory storage, + // we default to assuming that it previously failed to be safe. + return PersistentStorage.isInMemoryStorage || + PersistentStorage.get('previous_websocket_failure') === true; + }; + + markConnectionHealthy() { + PersistentStorage.remove('previous_websocket_failure'); + }; + + private appendFrame_(data) { + this.frames.push(data); + if (this.frames.length == this.totalFrames) { + const fullMess = this.frames.join(''); + this.frames = null; + const jsonMess = jsonEval(fullMess); + + //handle the message + this.onMessage(jsonMess); + } + } + + /** + * @param {number} frameCount The number of frames we are expecting from the server + * @private + */ + private handleNewFrameCount_(frameCount: number) { + this.totalFrames = frameCount; + this.frames = []; + } + + /** + * Attempts to parse a frame count out of some text. If it can't, assumes a value of 1 + * @param {!String} data + * @return {?String} Any remaining data to be process, or null if there is none + * @private + */ + private extractFrameCount_(data: string): string | null { + assert(this.frames === null, 'We already have a frame buffer'); + // TODO: The server is only supposed to send up to 9999 frames (i.e. length <= 4), but that isn't being enforced + // currently. So allowing larger frame counts (length <= 6). See https://app.asana.com/0/search/8688598998380/8237608042508 + if (data.length <= 6) { + const frameCount = Number(data); + if (!isNaN(frameCount)) { + this.handleNewFrameCount_(frameCount); + return null; + } + } + this.handleNewFrameCount_(1); + return data; + }; + + /** + * Process a websocket frame that has arrived from the server. + * @param mess The frame data + */ + handleIncomingFrame(mess) { + if (this.mySock === null) + return; // Chrome apparently delivers incoming packets even after we .close() the connection sometimes. + const data = mess['data']; + this.bytesReceived += data.length; + this.stats_.incrementCounter('bytes_received', data.length); + + this.resetKeepAlive(); + + if (this.frames !== null) { + // we're buffering + this.appendFrame_(data); + } else { + // try to parse out a frame count, otherwise, assume 1 and process it + const remainingData = this.extractFrameCount_(data); + if (remainingData !== null) { + this.appendFrame_(remainingData); + } + } + }; + + /** + * Send a message to the server + * @param {Object} data The JSON object to transmit + */ + send(data: Object) { + + this.resetKeepAlive(); + + const dataStr = stringify(data); + this.bytesSent += dataStr.length; + this.stats_.incrementCounter('bytes_sent', dataStr.length); + + //We can only fit a certain amount in each websocket frame, so we need to split this request + //up into multiple pieces if it doesn't fit in one request. + + const dataSegs = splitStringBySize(dataStr, WEBSOCKET_MAX_FRAME_SIZE); + + //Send the length header + if (dataSegs.length > 1) { + this.sendString_(String(dataSegs.length)); + } + + //Send the actual data in segments. + for (let i = 0; i < dataSegs.length; i++) { + this.sendString_(dataSegs[i]); + } + }; + + private shutdown_() { + this.isClosed_ = true; + if (this.keepaliveTimer) { + clearInterval(this.keepaliveTimer); + this.keepaliveTimer = null; + } + + if (this.mySock) { + this.mySock.close(); + this.mySock = null; + } + }; + + private onClosed_() { + if (!this.isClosed_) { + this.log_('WebSocket is closing itself'); + this.shutdown_(); + + // since this is an internal close, trigger the close listener + if (this.onDisconnect) { + this.onDisconnect(this.everConnected_); + this.onDisconnect = null; + } + } + }; + + /** + * External-facing close handler. + * Close the websocket and kill the connection. + */ + close() { + if (!this.isClosed_) { + this.log_('WebSocket is being closed'); + this.shutdown_(); + } + }; + + /** + * Kill the current keepalive timer and start a new one, to ensure that it always fires N seconds after + * the last activity. + */ + resetKeepAlive() { + clearInterval(this.keepaliveTimer); + this.keepaliveTimer = setInterval(() => { + //If there has been no websocket activity for a while, send a no-op + if (this.mySock) { + this.sendString_('0'); + } + this.resetKeepAlive(); + }, Math.floor(WEBSOCKET_KEEPALIVE_INTERVAL)); + }; + + /** + * Send a string over the websocket. + * + * @param {string} str String to send. + * @private + */ + private sendString_(str: string) { + // Firefox seems to sometimes throw exceptions (NS_ERROR_UNEXPECTED) from websocket .send() + // calls for some unknown reason. We treat these as an error and disconnect. + // See https://app.asana.com/0/58926111402292/68021340250410 + try { + this.mySock.send(str); + } catch (e) { + this.log_('Exception thrown from WebSocket.send():', e.message || e.data, 'Closing connection.'); + setTimeout(this.onClosed_.bind(this), 0); + } + }; +} + + diff --git a/src/database/realtime/polling/PacketReceiver.ts b/src/database/realtime/polling/PacketReceiver.ts new file mode 100644 index 00000000000..ab23a83a967 --- /dev/null +++ b/src/database/realtime/polling/PacketReceiver.ts @@ -0,0 +1,58 @@ +import { exceptionGuard } from '../../core/util/util'; + +/** + * This class ensures the packets from the server arrive in order + * This class takes data from the server and ensures it gets passed into the callbacks in order. + * @param onMessage + * @constructor + */ +export class PacketReceiver { + pendingResponses = []; + currentResponseNum = 0; + closeAfterResponse = -1; + onClose = null; + + constructor(private onMessage_: any) { + } + + closeAfter(responseNum, callback) { + this.closeAfterResponse = responseNum; + this.onClose = callback; + if (this.closeAfterResponse < this.currentResponseNum) { + this.onClose(); + this.onClose = null; + } + }; + + /** + * Each message from the server comes with a response number, and an array of data. The responseNumber + * allows us to ensure that we process them in the right order, since we can't be guaranteed that all + * browsers will respond in the same order as the requests we sent + * @param {number} requestNum + * @param {Array} data + */ + handleResponse(requestNum, data) { + this.pendingResponses[requestNum] = data; + while (this.pendingResponses[this.currentResponseNum]) { + const toProcess = this.pendingResponses[this.currentResponseNum]; + delete this.pendingResponses[this.currentResponseNum]; + for (let i = 0; i < toProcess.length; ++i) { + if (toProcess[i]) { + exceptionGuard(() => { + this.onMessage_(toProcess[i]); + }); + } + } + if (this.currentResponseNum === this.closeAfterResponse) { + if (this.onClose) { + clearTimeout(this.onClose); + this.onClose(); + this.onClose = null; + } + break; + } + this.currentResponseNum++; + } + } +} + diff --git a/src/firebase-browser.ts b/src/firebase-browser.ts new file mode 100644 index 00000000000..ae744edee2b --- /dev/null +++ b/src/firebase-browser.ts @@ -0,0 +1,24 @@ +/** +* Copyright 2017 Google Inc. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +import firebase from "./app"; +import './auth'; +import './database'; +import './storage'; +import './messaging'; + +// Export the single instance of firebase +export default firebase; diff --git a/src/firebase-node.ts b/src/firebase-node.ts new file mode 100644 index 00000000000..d1d6e24088d --- /dev/null +++ b/src/firebase-node.ts @@ -0,0 +1,37 @@ +/** +* Copyright 2017 Google Inc. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +import firebase from "./app"; +import './auth'; +import './database'; +import './database/nodePatches'; + + +var Storage = require('dom-storage'); +var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest; + +firebase.INTERNAL.extendNamespace({ + 'INTERNAL': { + 'node': { + 'localStorage': new Storage(null, { strict: true }), + 'sessionStorage': new Storage(null, { strict: true }), + 'XMLHttpRequest': XMLHttpRequest + } + } +}); + +// Export the single instance of firebase +export default firebase; diff --git a/src/firebase-react-native.ts b/src/firebase-react-native.ts new file mode 100644 index 00000000000..0e3092ee86a --- /dev/null +++ b/src/firebase-react-native.ts @@ -0,0 +1,32 @@ +/** +* Copyright 2017 Google Inc. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +import firebase from "./app"; +import './auth'; +import './database'; +import './storage'; + +var AsyncStorage = require('react-native').AsyncStorage; +firebase.INTERNAL.extendNamespace({ + 'INTERNAL': { + 'reactNative': { + 'AsyncStorage': AsyncStorage + } + } +}); + +// Export the single instance of firebase +export default firebase; diff --git a/src/firebase.ts b/src/firebase.ts deleted file mode 100644 index ca7d396b6b5..00000000000 --- a/src/firebase.ts +++ /dev/null @@ -1,64 +0,0 @@ -/** -* Copyright 2017 Google Inc. -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ -// Declare build time variable -declare const TARGET_ENVIRONMENT; - -import firebase from "./app"; -import './auth'; -// Import instance of FirebaseApp from ./app - -if (TARGET_ENVIRONMENT === 'node') { - // TARGET_ENVIRONMENT is a build-time variable that is injected to create - // all of the variable environment outputs - require('./database-node'); - - var Storage = require('dom-storage'); - var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest; - - firebase.INTERNAL.extendNamespace({ - 'INTERNAL': { - 'node': { - 'localStorage': new Storage(null, { strict: true }), - 'sessionStorage': new Storage(null, { strict: true }), - 'XMLHttpRequest': XMLHttpRequest - } - } - }); -} - -if (TARGET_ENVIRONMENT !== 'node') { - require('./database'); - require('./storage'); -} - -if (TARGET_ENVIRONMENT === 'react-native') { - var AsyncStorage = require('react-native').AsyncStorage; - firebase.INTERNAL.extendNamespace({ - 'INTERNAL': { - 'reactNative': { - 'AsyncStorage': AsyncStorage - } - } - }); -} - - -if (TARGET_ENVIRONMENT !== 'node' && TARGET_ENVIRONMENT !== 'react-native') { - require('./messaging'); -} - -// Export the single instance of firebase -export default firebase; diff --git a/src/utils/Sha1.ts b/src/utils/Sha1.ts new file mode 100644 index 00000000000..969db1ed161 --- /dev/null +++ b/src/utils/Sha1.ts @@ -0,0 +1,272 @@ +import { Hash } from './hash'; + +/** + * @fileoverview SHA-1 cryptographic hash. + * Variable names follow the notation in FIPS PUB 180-3: + * http://csrc.nist.gov/publications/fips/fips180-3/fips180-3_final.pdf. + * + * Usage: + * var sha1 = new sha1(); + * sha1.update(bytes); + * var hash = sha1.digest(); + * + * Performance: + * Chrome 23: ~400 Mbit/s + * Firefox 16: ~250 Mbit/s + * + */ + +/** + * SHA-1 cryptographic hash constructor. + * + * The properties declared here are discussed in the above algorithm document. + * @constructor + * @extends {Hash} + * @final + * @struct + */ +export class Sha1 extends Hash { + /** + * Holds the previous values of accumulated variables a-e in the compress_ + * function. + * @type {!Array} + * @private + */ + private chain_: Array = []; + + /** + * A buffer holding the partially computed hash result. + * @type {!Array} + * @private + */ + private buf_: Array = []; + + /** + * An array of 80 bytes, each a part of the message to be hashed. Referred to + * as the message schedule in the docs. + * @type {!Array} + * @private + */ + private W_: Array = []; + + /** + * Contains data needed to pad messages less than 64 bytes. + * @type {!Array} + * @private + */ + private pad_: Array = []; + + /** + * @private {number} + */ + private inbuf_: number = 0; + + /** + * @private {number} + */ + private total_: number = 0; + + constructor() { + super(); + + this.blockSize = 512 / 8; + + this.pad_[0] = 128; + for (var i = 1; i < this.blockSize; ++i) { + this.pad_[i] = 0; + } + + this.reset(); + } + + reset() { + this.chain_[0] = 0x67452301; + this.chain_[1] = 0xefcdab89; + this.chain_[2] = 0x98badcfe; + this.chain_[3] = 0x10325476; + this.chain_[4] = 0xc3d2e1f0; + + this.inbuf_ = 0; + this.total_ = 0; + } + + + /** + * Internal compress helper function. + * @param {!Array|!Uint8Array|string} buf Block to compress. + * @param {number=} opt_offset Offset of the block in the buffer. + * @private + */ + compress_(buf, opt_offset?) { + if (!opt_offset) { + opt_offset = 0; + } + + var W = this.W_; + + // get 16 big endian words + if (typeof buf === 'string') { + for (var i = 0; i < 16; i++) { + // TODO(user): [bug 8140122] Recent versions of Safari for Mac OS and iOS + // have a bug that turns the post-increment ++ operator into pre-increment + // during JIT compilation. We have code that depends heavily on SHA-1 for + // correctness and which is affected by this bug, so I've removed all uses + // of post-increment ++ in which the result value is used. We can revert + // this change once the Safari bug + // (https://bugs.webkit.org/show_bug.cgi?id=109036) has been fixed and + // most clients have been updated. + W[i] = (buf.charCodeAt(opt_offset) << 24) | + (buf.charCodeAt(opt_offset + 1) << 16) | + (buf.charCodeAt(opt_offset + 2) << 8) | + (buf.charCodeAt(opt_offset + 3)); + opt_offset += 4; + } + } else { + for (var i = 0; i < 16; i++) { + W[i] = (buf[opt_offset] << 24) | + (buf[opt_offset + 1] << 16) | + (buf[opt_offset + 2] << 8) | + (buf[opt_offset + 3]); + opt_offset += 4; + } + } + + // expand to 80 words + for (var i = 16; i < 80; i++) { + var t = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]; + W[i] = ((t << 1) | (t >>> 31)) & 0xffffffff; + } + + var a = this.chain_[0]; + var b = this.chain_[1]; + var c = this.chain_[2]; + var d = this.chain_[3]; + var e = this.chain_[4]; + var f, k; + + // TODO(user): Try to unroll this loop to speed up the computation. + for (var i = 0; i < 80; i++) { + if (i < 40) { + if (i < 20) { + f = d ^ (b & (c ^ d)); + k = 0x5a827999; + } else { + f = b ^ c ^ d; + k = 0x6ed9eba1; + } + } else { + if (i < 60) { + f = (b & c) | (d & (b | c)); + k = 0x8f1bbcdc; + } else { + f = b ^ c ^ d; + k = 0xca62c1d6; + } + } + + var t = (((a << 5) | (a >>> 27)) + f + e + k + W[i]) & 0xffffffff; + e = d; + d = c; + c = ((b << 30) | (b >>> 2)) & 0xffffffff; + b = a; + a = t; + } + + this.chain_[0] = (this.chain_[0] + a) & 0xffffffff; + this.chain_[1] = (this.chain_[1] + b) & 0xffffffff; + this.chain_[2] = (this.chain_[2] + c) & 0xffffffff; + this.chain_[3] = (this.chain_[3] + d) & 0xffffffff; + this.chain_[4] = (this.chain_[4] + e) & 0xffffffff; + } + + update(bytes, opt_length?) { + // TODO(johnlenz): tighten the function signature and remove this check + if (bytes == null) { + return; + } + + if (opt_length === undefined) { + opt_length = bytes.length; + } + + var lengthMinusBlock = opt_length - this.blockSize; + var n = 0; + // Using local instead of member variables gives ~5% speedup on Firefox 16. + var buf = this.buf_; + var inbuf = this.inbuf_; + + // The outer while loop should execute at most twice. + while (n < opt_length) { + // When we have no data in the block to top up, we can directly process the + // input buffer (assuming it contains sufficient data). This gives ~25% + // speedup on Chrome 23 and ~15% speedup on Firefox 16, but requires that + // the data is provided in large chunks (or in multiples of 64 bytes). + if (inbuf == 0) { + while (n <= lengthMinusBlock) { + this.compress_(bytes, n); + n += this.blockSize; + } + } + + if (typeof bytes === 'string') { + while (n < opt_length) { + buf[inbuf] = bytes.charCodeAt(n); + ++inbuf; + ++n; + if (inbuf == this.blockSize) { + this.compress_(buf); + inbuf = 0; + // Jump to the outer loop so we use the full-block optimization. + break; + } + } + } else { + while (n < opt_length) { + buf[inbuf] = bytes[n]; + ++inbuf; + ++n; + if (inbuf == this.blockSize) { + this.compress_(buf); + inbuf = 0; + // Jump to the outer loop so we use the full-block optimization. + break; + } + } + } + } + + this.inbuf_ = inbuf; + this.total_ += opt_length; + } + + + /** @override */ + digest() { + var digest = []; + var totalBits = this.total_ * 8; + + // Add pad 0x80 0x00*. + if (this.inbuf_ < 56) { + this.update(this.pad_, 56 - this.inbuf_); + } else { + this.update(this.pad_, this.blockSize - (this.inbuf_ - 56)); + } + + // Add # bits. + for (var i = this.blockSize - 1; i >= 56; i--) { + this.buf_[i] = totalBits & 255; + totalBits /= 256; // Don't use bit-shifting here! + } + + this.compress_(this.buf_); + + var n = 0; + for (var i = 0; i < 5; i++) { + for (var j = 24; j >= 0; j -= 8) { + digest[n] = (this.chain_[i] >> j) & 255; + ++n; + } + } + return digest; + } +} \ No newline at end of file diff --git a/src/utils/assert.ts b/src/utils/assert.ts new file mode 100644 index 00000000000..367179993a9 --- /dev/null +++ b/src/utils/assert.ts @@ -0,0 +1,21 @@ +import { CONSTANTS } from "./constants"; + +/** + * Throws an error if the provided assertion is falsy + * @param {*} assertion The assertion to be tested for falsiness + * @param {!string} message The message to display if the check fails + */ +export const assert = function(assertion, message) { + if (!assertion) { + throw assertionError(message); + } +}; + +/** + * Returns an Error object suitable for throwing. + * @param {string} message + * @return {!Error} + */ +export const assertionError = function(message) { + return new Error('Firebase Database (' + CONSTANTS.SDK_VERSION + ') INTERNAL ASSERT FAILED: ' + message); +}; diff --git a/src/utils/constants.ts b/src/utils/constants.ts new file mode 100644 index 00000000000..00501d44760 --- /dev/null +++ b/src/utils/constants.ts @@ -0,0 +1,19 @@ +/** + * @fileoverview Firebase constants. Some of these (@defines) can be overridden at compile-time. + */ + +export const CONSTANTS = { + /** + * @define {boolean} Whether this is the client Node.js SDK. + */ + NODE_CLIENT: false, + /** + * @define {boolean} Whether this is the Admin Node.js SDK. + */ + NODE_ADMIN: false, + + /** + * Firebase SDK Version + */ + SDK_VERSION: '${JSCORE_VERSION}' +} \ No newline at end of file diff --git a/src/utils/crypt.ts b/src/utils/crypt.ts new file mode 100644 index 00000000000..7ec1148d074 --- /dev/null +++ b/src/utils/crypt.ts @@ -0,0 +1,298 @@ +import { globalScope } from './globalScope'; + +const stringToByteArray = function(str) { + var output = [], p = 0; + for (var i = 0;i < str.length;i++) { + var c = str.charCodeAt(i); + while (c > 255) { + output[p++] = c & 255; + c >>= 8; + } + output[p++] = c; + } + return output; +}; + +/** + * Turns an array of numbers into the string given by the concatenation of the + * characters to which the numbers correspond. + * @param {Array} bytes Array of numbers representing characters. + * @return {string} Stringification of the array. + */ +const byteArrayToString = function(bytes) { + var CHUNK_SIZE = 8192; + + // Special-case the simple case for speed's sake. + if (bytes.length < CHUNK_SIZE) { + return String.fromCharCode.apply(null, bytes); + } + + // The remaining logic splits conversion by chunks since + // Function#apply() has a maximum parameter count. + // See discussion: http://goo.gl/LrWmZ9 + + var str = ''; + for (var i = 0; i < bytes.length; i += CHUNK_SIZE) { + var chunk = bytes.slice(i, i + CHUNK_SIZE); + str += String.fromCharCode.apply(null, chunk); + } + return str; +}; + +// Static lookup maps, lazily populated by init_() +export const base64 = { + /** + * Maps bytes to characters. + * @type {Object} + * @private + */ + byteToCharMap_: null, + + /** + * Maps characters to bytes. + * @type {Object} + * @private + */ + charToByteMap_: null, + + /** + * Maps bytes to websafe characters. + * @type {Object} + * @private + */ + byteToCharMapWebSafe_: null, + + + /** + * Maps websafe characters to bytes. + * @type {Object} + * @private + */ + charToByteMapWebSafe_: null, + + + /** + * Our default alphabet, shared between + * ENCODED_VALS and ENCODED_VALS_WEBSAFE + * @type {string} + */ + ENCODED_VALS_BASE: + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + + 'abcdefghijklmnopqrstuvwxyz' + + '0123456789', + + /** + * Our default alphabet. Value 64 (=) is special; it means "nothing." + * @type {string} + */ + get ENCODED_VALS() { + return this.ENCODED_VALS_BASE + '+/='; + }, + + /** + * Our websafe alphabet. + * @type {string} + */ + get ENCODED_VALS_WEBSAFE() { + return this.ENCODED_VALS_BASE + '-_.' + }, + + /** + * Whether this browser supports the atob and btoa functions. This extension + * started at Mozilla but is now implemented by many browsers. We use the + * ASSUME_* variables to avoid pulling in the full useragent detection library + * but still allowing the standard per-browser compilations. + * + * @type {boolean} + */ + HAS_NATIVE_SUPPORT: typeof globalScope.atob === 'function', + + /** + * Base64-encode an array of bytes. + * + * @param {Array|Uint8Array} input An array of bytes (numbers with + * value in [0, 255]) to encode. + * @param {boolean=} opt_webSafe Boolean indicating we should use the + * alternative alphabet. + * @return {string} The base64 encoded string. + */ + encodeByteArray(input, opt_webSafe?) { + if (!Array.isArray(input)) { + throw Error('encodeByteArray takes an array as a parameter'); + } + + this.init_(); + + var byteToCharMap = opt_webSafe ? + this.byteToCharMapWebSafe_ : + this.byteToCharMap_; + + var output = []; + + for (var i = 0; i < input.length; i += 3) { + var byte1 = input[i]; + var haveByte2 = i + 1 < input.length; + var byte2 = haveByte2 ? input[i + 1] : 0; + var haveByte3 = i + 2 < input.length; + var byte3 = haveByte3 ? input[i + 2] : 0; + + var outByte1 = byte1 >> 2; + var outByte2 = ((byte1 & 0x03) << 4) | (byte2 >> 4); + var outByte3 = ((byte2 & 0x0F) << 2) | (byte3 >> 6); + var outByte4 = byte3 & 0x3F; + + if (!haveByte3) { + outByte4 = 64; + + if (!haveByte2) { + outByte3 = 64; + } + } + + output.push(byteToCharMap[outByte1], + byteToCharMap[outByte2], + byteToCharMap[outByte3], + byteToCharMap[outByte4]); + } + + return output.join(''); + }, + + + /** + * Base64-encode a string. + * + * @param {string} input A string to encode. + * @param {boolean=} opt_webSafe If true, we should use the + * alternative alphabet. + * @return {string} The base64 encoded string. + */ + encodeString(input, opt_webSafe) { + // Shortcut for Mozilla browsers that implement + // a native base64 encoder in the form of "btoa/atob" + if (this.HAS_NATIVE_SUPPORT && !opt_webSafe) { + return btoa(input); + } + return this.encodeByteArray( + stringToByteArray(input), opt_webSafe); + }, + + + /** + * Base64-decode a string. + * + * @param {string} input to decode. + * @param {boolean=} opt_webSafe True if we should use the + * alternative alphabet. + * @return {string} string representing the decoded value. + */ + decodeString(input, opt_webSafe) { + // Shortcut for Mozilla browsers that implement + // a native base64 encoder in the form of "btoa/atob" + if (this.HAS_NATIVE_SUPPORT && !opt_webSafe) { + return atob(input); + } + return byteArrayToString(this.decodeStringToByteArray(input, opt_webSafe)); + }, + + + /** + * Base64-decode a string. + * + * In base-64 decoding, groups of four characters are converted into three + * bytes. If the encoder did not apply padding, the input length may not + * be a multiple of 4. + * + * In this case, the last group will have fewer than 4 characters, and + * padding will be inferred. If the group has one or two characters, it decodes + * to one byte. If the group has three characters, it decodes to two bytes. + * + * @param {string} input Input to decode. + * @param {boolean=} opt_webSafe True if we should use the web-safe alphabet. + * @return {!Array} bytes representing the decoded value. + */ + decodeStringToByteArray(input, opt_webSafe) { + this.init_(); + + var charToByteMap = opt_webSafe ? + this.charToByteMapWebSafe_ : + this.charToByteMap_; + + var output = []; + + for (var i = 0; i < input.length; ) { + var byte1 = charToByteMap[input.charAt(i++)]; + + var haveByte2 = i < input.length; + var byte2 = haveByte2 ? charToByteMap[input.charAt(i)] : 0; + ++i; + + var haveByte3 = i < input.length; + var byte3 = haveByte3 ? charToByteMap[input.charAt(i)] : 64; + ++i; + + var haveByte4 = i < input.length; + var byte4 = haveByte4 ? charToByteMap[input.charAt(i)] : 64; + ++i; + + if (byte1 == null || byte2 == null || + byte3 == null || byte4 == null) { + throw Error(); + } + + var outByte1 = (byte1 << 2) | (byte2 >> 4); + output.push(outByte1); + + if (byte3 != 64) { + var outByte2 = ((byte2 << 4) & 0xF0) | (byte3 >> 2); + output.push(outByte2); + + if (byte4 != 64) { + var outByte3 = ((byte3 << 6) & 0xC0) | byte4; + output.push(outByte3); + } + } + } + + return output; + }, + + + /** + * Lazy static initialization function. Called before + * accessing any of the static map variables. + * @private + */ + init_() { + if (!this.byteToCharMap_) { + this.byteToCharMap_ = {}; + this.charToByteMap_ = {}; + this.byteToCharMapWebSafe_ = {}; + this.charToByteMapWebSafe_ = {}; + + // We want quick mappings back and forth, so we precompute two maps. + for (var i = 0; i < this.ENCODED_VALS.length; i++) { + this.byteToCharMap_[i] = + this.ENCODED_VALS.charAt(i); + this.charToByteMap_[this.byteToCharMap_[i]] = i; + this.byteToCharMapWebSafe_[i] = + this.ENCODED_VALS_WEBSAFE.charAt(i); + this.charToByteMapWebSafe_[ + this.byteToCharMapWebSafe_[i]] = i; + + // Be forgiving when decoding and correctly decode both encodings. + if (i >= this.ENCODED_VALS_BASE.length) { + this.charToByteMap_[ + this.ENCODED_VALS_WEBSAFE.charAt(i)] = i; + this.charToByteMapWebSafe_[ + this.ENCODED_VALS.charAt(i)] = i; + } + } + } + } +}; + + + + + \ No newline at end of file diff --git a/src/utils/deep_copy.ts b/src/utils/deep_copy.ts new file mode 100644 index 00000000000..85040385f26 --- /dev/null +++ b/src/utils/deep_copy.ts @@ -0,0 +1,61 @@ +/** + * Do a deep-copy of basic JavaScript Objects or Arrays. + */ +export function deepCopy(value: T): T { + return deepExtend(undefined, value); +} + +/** + * Copy properties from source to target (recursively allows extension + * of Objects and Arrays). Scalar values in the target are over-written. + * If target is undefined, an object of the appropriate type will be created + * (and returned). + * + * We recursively copy all child properties of plain Objects in the source- so + * that namespace- like dictionaries are merged. + * + * Note that the target can be a function, in which case the properties in + * the source Object are copied onto it as static properties of the Function. + */ +export function deepExtend(target: any, source: any): any { + if (!(source instanceof Object)) { + return source; + } + + switch (source.constructor) { + case Date: + // Treat Dates like scalars; if the target date object had any child + // properties - they will be lost! + let dateValue = (source as any) as Date; + return new Date(dateValue.getTime()); + + case Object: + if (target === undefined) { + target = {}; + } + break; + + case Array: + // Always copy the array source and overwrite the target. + target = []; + break; + + default: + // Not a plain Object - treat it as a scalar. + return source; + } + + for (let prop in source) { + if (!source.hasOwnProperty(prop)) { + continue; + } + target[prop] = deepExtend(target[prop], source[prop]); + } + + return target; +} + +// TODO: Really needed (for JSCompiler type checking)? +export function patchProperty(obj: any, prop: string, value: any) { + obj[prop] = value; +} \ No newline at end of file diff --git a/src/utils/environment.ts b/src/utils/environment.ts new file mode 100644 index 00000000000..c173ebaf00b --- /dev/null +++ b/src/utils/environment.ts @@ -0,0 +1,48 @@ +import { CONSTANTS } from "./constants"; + +/** + * Returns navigator.userAgent string or '' if it's not defined. + * @return {string} user agent string + */ +export const getUA = function() { + if (typeof navigator !== 'undefined' && + typeof navigator['userAgent'] === 'string') { + return navigator['userAgent']; + } else { + return ''; + } +}; + +/** + * Detect Cordova / PhoneGap / Ionic frameworks on a mobile device. + * + * Deliberately does not rely on checking `file://` URLs (as this fails PhoneGap in the Ripple emulator) nor + * Cordova `onDeviceReady`, which would normally wait for a callback. + * + * @return {boolean} isMobileCordova + */ +export const isMobileCordova = function() { + return typeof window !== 'undefined' && + !!(window['cordova'] || window['phonegap'] || window['PhoneGap']) && + /ios|iphone|ipod|ipad|android|blackberry|iemobile/i.test(getUA()); +}; + + +/** + * Detect React Native. + * + * @return {boolean} True if ReactNative environment is detected. + */ +export const isReactNative = function() { + return typeof navigator === 'object' && navigator['product'] === 'ReactNative'; +}; + + +/** + * Detect Node.js. + * + * @return {boolean} True if Node.js environment is detected. + */ +export const isNodeSdk = function() { + return CONSTANTS.NODE_CLIENT === true || CONSTANTS.NODE_ADMIN === true; +}; diff --git a/src/utils/globalScope.ts b/src/utils/globalScope.ts new file mode 100644 index 00000000000..4ea25c7f5bf --- /dev/null +++ b/src/utils/globalScope.ts @@ -0,0 +1,15 @@ +let scope; + +if (typeof global !== 'undefined') { + scope = global; +} else if (typeof self !== 'undefined') { + scope = self; +} else { + try { + scope = Function('return this')(); + } catch (e) { + throw new Error('polyfill failed because global object is unavailable in this environment'); + } +} + +export const globalScope = scope; \ No newline at end of file diff --git a/src/utils/hash.ts b/src/utils/hash.ts new file mode 100644 index 00000000000..82261cfdda5 --- /dev/null +++ b/src/utils/hash.ts @@ -0,0 +1,36 @@ +// Copyright 2011 The Closure Library Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS-IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * @fileoverview Abstract cryptographic hash interface. + * + * See Sha1 and Md5 for sample implementations. + * + */ + +/** + * Create a cryptographic hash instance. + * + * @constructor + * @struct + */ +export class Hash { + /** + * The block size for the hasher. + * @type {number} + */ + blockSize: number = -1; + + constructor() {} +} \ No newline at end of file diff --git a/src/utils/json.ts b/src/utils/json.ts new file mode 100644 index 00000000000..da1917bd148 --- /dev/null +++ b/src/utils/json.ts @@ -0,0 +1,19 @@ +/** + * Evaluates a JSON string into a javascript object. + * + * @param {string} str A string containing JSON. + * @return {*} The javascript object representing the specified JSON. + */ +export const jsonEval = function(str) { + return JSON.parse(str); +}; + + +/** + * Returns JSON representing a javascript object. + * @param {*} data Javascript object to be stringified. + * @return {string} The JSON contents of the object. + */ +export const stringify = function(data) { + return JSON.stringify(data); +}; diff --git a/src/utils/jwt.ts b/src/utils/jwt.ts new file mode 100644 index 00000000000..edb8cb2b827 --- /dev/null +++ b/src/utils/jwt.ts @@ -0,0 +1,124 @@ +import { base64Decode } from "../database/core/util/util"; +import { jsonEval } from "./json"; + +/** + * Decodes a Firebase auth. token into constituent parts. + * + * Notes: + * - May return with invalid / incomplete claims if there's no native base64 decoding support. + * - Doesn't check if the token is actually valid. + * + * @param {?string} token + * @return {{header: *, claims: *, data: *, signature: string}} + */ +export const decode = function(token) { + var header = {}, + claims = {}, + data = {}, + signature = ''; + + try { + var parts = token.split('.'); + header = jsonEval(base64Decode(parts[0]) || ''); + claims = jsonEval(base64Decode(parts[1]) || ''); + signature = parts[2]; + data = claims['d'] || {}; + delete claims['d']; + } catch (e) {} + + return { + header: header, + claims: claims, + data: data, + signature: signature + }; +}; + +/** + * Decodes a Firebase auth. token and checks the validity of its time-based claims. Will return true if the + * token is within the time window authorized by the 'nbf' (not-before) and 'iat' (issued-at) claims. + * + * Notes: + * - May return a false negative if there's no native base64 decoding support. + * - Doesn't check if the token is actually valid. + * + * @param {?string} token + * @return {boolean} + */ +export const isValidTimestamp = function(token) { + var claims = decode(token).claims, + now = Math.floor(new Date().getTime() / 1000), + validSince, validUntil; + + if (typeof claims === 'object') { + if (claims.hasOwnProperty('nbf')) { + validSince = claims['nbf']; + } else if (claims.hasOwnProperty('iat')) { + validSince = claims['iat']; + } + + if (claims.hasOwnProperty('exp')) { + validUntil = claims['exp']; + } else { + // token will expire after 24h by default + validUntil = validSince + 86400; + } + } + + return now && validSince && validUntil && + (now >= validSince) && (now <= validUntil); +}; + +/** + * Decodes a Firebase auth. token and returns its issued at time if valid, null otherwise. + * + * Notes: + * - May return null if there's no native base64 decoding support. + * - Doesn't check if the token is actually valid. + * + * @param {?string} token + * @return {?number} + */ +export const issuedAtTime = function(token) { + var claims = decode(token).claims; + if (typeof claims === 'object' && claims.hasOwnProperty('iat')) { + return claims['iat']; + } + return null; +}; + +/** + * Decodes a Firebase auth. token and checks the validity of its format. Expects a valid issued-at time and non-empty + * signature. + * + * Notes: + * - May return a false negative if there's no native base64 decoding support. + * - Doesn't check if the token is actually valid. + * + * @param {?string} token + * @return {boolean} + */ +export const isValidFormat = function(token) { + var decoded = decode(token), + claims = decoded.claims; + + return !!decoded.signature && + !!claims && + (typeof claims === 'object') && + claims.hasOwnProperty('iat'); +}; + +/** + * Attempts to peer into an auth token and determine if it's an admin auth token by looking at the claims portion. + * + * Notes: + * - May return a false negative if there's no native base64 decoding support. + * - Doesn't check if the token is actually valid. + * + * @param {?string} token + * @return {boolean} + */ +export const isAdmin = function(token) { + var claims = decode(token).claims; + return (typeof claims === 'object' && claims['admin'] === true); +}; diff --git a/src/utils/nodePatches.ts b/src/utils/nodePatches.ts new file mode 100644 index 00000000000..4ea503703f7 --- /dev/null +++ b/src/utils/nodePatches.ts @@ -0,0 +1,123 @@ +import { CONSTANTS } from "./constants"; + +// Overriding the constant (we should be the only ones doing this) +CONSTANTS.NODE_CLIENT = true; + +/** + * @suppress {es5Strict} + */ +(function() { + var version = process['version']; + if (version === 'v0.10.22' || version === 'v0.10.23' || version === 'v0.10.24') { + /** + * The following duplicates much of `/lib/_stream_writable.js` at + * b922b5e90d2c14dd332b95827c2533e083df7e55, applying the fix for + * https://github.com/joyent/node/issues/6506. Note that this fix also + * needs to be applied to `Duplex.prototype.write()` (in + * `/lib/_stream_duplex.js`) as well. + */ + var Writable = require('_stream_writable'); + + Writable['prototype']['write'] = function(chunk, encoding, cb) { + var state = this['_writableState']; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer['isBuffer'](chunk)) + encoding = 'buffer'; + else if (!encoding) + encoding = state['defaultEncoding']; + + if (typeof cb !== 'function') + cb = function() {}; + + if (state['ended']) + writeAfterEnd(this, state, cb); + else if (validChunk(this, state, chunk, cb)) + ret = writeOrBuffer(this, state, chunk, encoding, cb); + + return ret; + }; + + function writeAfterEnd(stream, state, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream['emit']('error', er); + process['nextTick'](function() { + cb(er); + }); + } + + function validChunk(stream, state, chunk, cb) { + var valid = true; + if (!Buffer['isBuffer'](chunk) && + 'string' !== typeof chunk && + chunk !== null && + chunk !== undefined && + !state['objectMode']) { + var er = new TypeError('Invalid non-string/buffer chunk'); + stream['emit']('error', er); + process['nextTick'](function() { + cb(er); + }); + valid = false; + } + return valid; + } + + function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + if (Buffer['isBuffer'](chunk)) + encoding = 'buffer'; + var len = state['objectMode'] ? 1 : chunk['length']; + + state['length'] += len; + + var ret = state['length'] < state['highWaterMark']; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) + state['needDrain'] = true; + + if (state['writing']) + state['buffer']['push'](new WriteReq(chunk, encoding, cb)); + else + doWrite(stream, state, len, chunk, encoding, cb); + + return ret; + } + + function decodeChunk(state, chunk, encoding) { + if (!state['objectMode'] && + state['decodeStrings'] !== false && + typeof chunk === 'string') { + chunk = new Buffer(chunk, encoding); + } + return chunk; + } + + /** + * @constructor + */ + function WriteReq(chunk, encoding, cb) { + this['chunk'] = chunk; + this['encoding'] = encoding; + this['callback'] = cb; + } + + function doWrite(stream, state, len, chunk, encoding, cb) { + state['writelen'] = len; + state['writecb'] = cb; + state['writing'] = true; + state['sync'] = true; + stream['_write'](chunk, encoding, state['onwrite']); + state['sync'] = false; + } + + var Duplex = require('_stream_duplex'); + Duplex['prototype']['write'] = Writable['prototype']['write']; + } +})(); diff --git a/src/utils/obj.ts b/src/utils/obj.ts new file mode 100644 index 00000000000..3019cf945f5 --- /dev/null +++ b/src/utils/obj.ts @@ -0,0 +1,132 @@ +// See http://www.devthought.com/2012/01/18/an-object-is-not-a-hash/ + +export const contains = function(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +}; + +export const safeGet = function(obj, key) { + if (Object.prototype.hasOwnProperty.call(obj, key)) + return obj[key]; + // else return undefined. +}; + +/** + * Enumerates the keys/values in an object, excluding keys defined on the prototype. + * + * @param {?Object.} obj Object to enumerate. + * @param {!function(K, V)} fn Function to call for each key and value. + * @template K,V + */ +export const forEach = function(obj, fn) { + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + fn(key, obj[key]); + } + } +}; + +/** + * Copies all the (own) properties from one object to another. + * @param {!Object} objTo + * @param {!Object} objFrom + * @return {!Object} objTo + */ +export const extend = function(objTo, objFrom) { + forEach(objFrom, function(key, value) { + objTo[key] = value; + }); + return objTo; +} + + +/** + * Returns a clone of the specified object. + * @param {!Object} obj + * @return {!Object} cloned obj. + */ +export const clone = function(obj) { + return extend({}, obj); +}; + + +/** + * Returns true if obj has typeof "object" and is not null. Unlike goog.isObject(), does not return true + * for functions. + * + * @param obj {*} A potential object. + * @returns {boolean} True if it's an object. + */ +export const isNonNullObject = function(obj) { + return typeof obj === 'object' && obj !== null; +}; + +export const isEmpty = function(obj) { + for (var key in obj) { + return false; + } + return true; +} + +export const getCount = function(obj) { + var rv = 0; + for (var key in obj) { + rv++; + } + return rv; +} + +export const map = function(obj, f, opt_obj?) { + var res = {}; + for (var key in obj) { + res[key] = f.call(opt_obj, obj[key], key, obj); + } + return res; +}; + +export const findKey = function(obj, fn, opt_this?) { + for (var key in obj) { + if (fn.call(opt_this, obj[key], key, obj)) { + return key; + } + } + return undefined; +}; + +export const findValue = function(obj, fn, opt_this?) { + var key = findKey(obj, fn, opt_this); + return key && obj[key]; +}; + +export const getAnyKey = function(obj) { + for (var key in obj) { + return key; + } +}; + +export const getValues = function(obj) { + var res = []; + var i = 0; + for (var key in obj) { + res[i++] = obj[key]; + } + return res; +}; + +/** + * Tests whether every key/value pair in an object pass the test implemented + * by the provided function + * + * @param {?Object.} obj Object to test. + * @param {!function(K, V)} fn Function to call for each key and value. + * @template K,V + */ +export const every = function(obj: Object, fn: (k: string, v?: V) => boolean): boolean { + for (let key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + if (!fn(key, obj[key])) { + return false; + } + } + } + return true; +}; diff --git a/src/utils/promise.ts b/src/utils/promise.ts new file mode 100644 index 00000000000..db3477f4a1e --- /dev/null +++ b/src/utils/promise.ts @@ -0,0 +1,73 @@ +import { globalScope } from '../utils/globalScope'; + +export const PromiseImpl = globalScope.Promise || require('promise-polyfill'); + +/** + * A deferred promise implementation. + */ +export class Deferred { + resolve; + reject; + promise; + + /** @constructor */ + constructor() { + var self = this; + this.resolve = null; + this.reject = null; + this.promise = new PromiseImpl(function(resolve, reject) { + self.resolve = resolve; + self.reject = reject; + }); + } + + /** + * Our API internals are not promiseified and cannot because our callback APIs have subtle expectations around + * invoking promises inline, which Promises are forbidden to do. This method accepts an optional node-style callback + * and returns a node-style callback which will resolve or reject the Deferred's promise. + * @param {((?function(?(Error)): (?|undefined))| (?function(?(Error),?=): (?|undefined)))=} opt_nodeCallback + * @return {!function(?(Error), ?=)} + */ + wrapCallback(opt_nodeCallback?) { + var self = this; + /** + * @param {?Error} error + * @param {?=} opt_value + */ + function meta(error, opt_value) { + if (error) { + self.reject(error); + } else { + self.resolve(opt_value); + } + if (typeof opt_nodeCallback === 'function') { + attachDummyErrorHandler(self.promise); + + // Some of our callbacks don't expect a value and our own tests + // assert that the parameter length is 1 + if (opt_nodeCallback.length === 1) { + opt_nodeCallback(error); + } else { + opt_nodeCallback(error, opt_value); + } + } + } + return meta; + } +}; + + +/** + * Chrome (and maybe other browsers) report an Error in the console if you reject a promise + * and nobody handles the error. This is normally a good thing, but this will confuse devs who + * never intended to use promises in the first place. So in some cases (in particular, if the + * developer attached a callback), we should attach a dummy resolver to the promise to suppress + * this error. + * + * Note: We can't do this all the time, since it breaks the Promise spec (though in the obscure + * 3.3.3 section related to upgrading non-compliant promises). + * @param {!firebase.Promise} promise + */ +export const attachDummyErrorHandler = function(promise) { + promise.catch(() => {}); +}; \ No newline at end of file diff --git a/src/utils/utf8.ts b/src/utils/utf8.ts new file mode 100644 index 00000000000..369bc14416b --- /dev/null +++ b/src/utils/utf8.ts @@ -0,0 +1,75 @@ +import { assert } from "./assert"; + +// Code originally came from goog.crypt.stringToUtf8ByteArray, but for some reason they +// automatically replaced '\r\n' with '\n', and they didn't handle surrogate pairs, +// so it's been modified. + +// Note that not all Unicode characters appear as single characters in JavaScript strings. +// fromCharCode returns the UTF-16 encoding of a character - so some Unicode characters +// use 2 characters in Javascript. All 4-byte UTF-8 characters begin with a first +// character in the range 0xD800 - 0xDBFF (the first character of a so-called surrogate +// pair). +// See http://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3 + + +/** + * @param {string} str + * @return {Array} + */ +export const stringToByteArray = function(str) { + var out = [], p = 0; + for (var i = 0; i < str.length; i++) { + var c = str.charCodeAt(i); + + // Is this the lead surrogate in a surrogate pair? + if (c >= 0xd800 && c <= 0xdbff) { + var high = c - 0xd800; // the high 10 bits. + i++; + assert(i < str.length, 'Surrogate pair missing trail surrogate.'); + var low = str.charCodeAt(i) - 0xdc00; // the low 10 bits. + c = 0x10000 + (high << 10) + low; + } + + if (c < 128) { + out[p++] = c; + } else if (c < 2048) { + out[p++] = (c >> 6) | 192; + out[p++] = (c & 63) | 128; + } else if (c < 65536) { + out[p++] = (c >> 12) | 224; + out[p++] = ((c >> 6) & 63) | 128; + out[p++] = (c & 63) | 128; + } else { + out[p++] = (c >> 18) | 240; + out[p++] = ((c >> 12) & 63) | 128; + out[p++] = ((c >> 6) & 63) | 128; + out[p++] = (c & 63) | 128; + } + } + return out; +}; + + +/** + * Calculate length without actually converting; useful for doing cheaper validation. + * @param {string} str + * @return {number} + */ +export const stringLength = function(str) { + var p = 0; + for (var i = 0; i < str.length; i++) { + var c = str.charCodeAt(i); + if (c < 128) { + p++; + } else if (c < 2048) { + p += 2; + } else if (c >= 0xd800 && c <= 0xdbff) { + // Lead surrogate of a surrogate pair. The pair together will take 4 bytes to represent. + p += 4; + i++; // skip trail surrogate. + } else { + p += 3; + } + } + return p; +}; diff --git a/src/utils/util.ts b/src/utils/util.ts new file mode 100644 index 00000000000..edb896e49a5 --- /dev/null +++ b/src/utils/util.ts @@ -0,0 +1,43 @@ +import { forEach } from "./obj"; + +/** + * Returns a querystring-formatted string (e.g. &arg=val&arg2=val2) from a params + * object (e.g. {arg: 'val', arg2: 'val2'}) + * Note: You must prepend it with ? when adding it to a URL. + * + * @param {!Object} querystringParams + * @return {string} + */ +export const querystring = function(querystringParams) { + var params = []; + forEach(querystringParams, function(key, value) { + if (Array.isArray(value)) { + value.forEach(function(arrayVal) { + params.push(encodeURIComponent(key) + '=' + encodeURIComponent(arrayVal)); + }); + } else { + params.push(encodeURIComponent(key) + '=' + encodeURIComponent(value)); + } + }); + return (params.length) ? '&' + params.join('&') : ''; +}; + + +/** + * Decodes a querystring (e.g. ?arg=val&arg2=val2) into a params object (e.g. {arg: 'val', arg2: 'val2'}) + * + * @param {string} querystring + * @return {!Object} + */ +export const querystringDecode = function(querystring) { + var obj = {}; + var tokens = querystring.replace(/^\?/, '').split('&'); + + tokens.forEach(function(token) { + if (token) { + var key = token.split('='); + obj[key[0]] = key[1]; + } + }); + return obj; +}; \ No newline at end of file diff --git a/src/utils/validation.ts b/src/utils/validation.ts new file mode 100644 index 00000000000..dd85f3196b0 --- /dev/null +++ b/src/utils/validation.ts @@ -0,0 +1,87 @@ +/** + * Check to make sure the appropriate number of arguments are provided for a public function. + * Throws an error if it fails. + * + * @param {!string} fnName The function name + * @param {!number} minCount The minimum number of arguments to allow for the function call + * @param {!number} maxCount The maximum number of argument to allow for the function call + * @param {!number} argCount The actual number of arguments provided. + */ +export const validateArgCount = function(fnName, minCount, maxCount, argCount) { + var argError; + if (argCount < minCount) { + argError = 'at least ' + minCount; + } else if (argCount > maxCount) { + argError = (maxCount === 0) ? 'none' : ('no more than ' + maxCount); + } + if (argError) { + var error = fnName + ' failed: Was called with ' + argCount + + ((argCount === 1) ? ' argument.' : ' arguments.') + + ' Expects ' + argError + '.'; + throw new Error(error); + } +}; + +/** + * Generates a string to prefix an error message about failed argument validation + * + * @param {!string} fnName The function name + * @param {!number} argumentNumber The index of the argument + * @param {boolean} optional Whether or not the argument is optional + * @return {!string} The prefix to add to the error thrown for validation. + */ +export function errorPrefix(fnName, argumentNumber, optional) { + var argName = ''; + switch (argumentNumber) { + case 1: + argName = optional ? 'first' : 'First'; + break; + case 2: + argName = optional ? 'second' : 'Second'; + break; + case 3: + argName = optional ? 'third' : 'Third'; + break; + case 4: + argName = optional ? 'fourth' : 'Fourth'; + break; + default: + throw new Error('errorPrefix called with argumentNumber > 4. Need to update it?'); + } + + var error = fnName + ' failed: '; + + error += argName + ' argument '; + return error; +}; + +/** + * @param {!string} fnName + * @param {!number} argumentNumber + * @param {!string} namespace + * @param {boolean} optional + */ +export const validateNamespace = function(fnName, argumentNumber, namespace, optional) { + if (optional && !(namespace)) + return; + if (typeof namespace !== 'string') { + //TODO: I should do more validation here. We only allow certain chars in namespaces. + throw new Error(errorPrefix(fnName, argumentNumber, optional) + + 'must be a valid firebase namespace.'); + } +}; + +export const validateCallback = function(fnName, argumentNumber, callback, optional) { + if (optional && !(callback)) + return; + if (typeof callback !== 'function') + throw new Error(errorPrefix(fnName, argumentNumber, optional) + 'must be a valid function.'); +}; + +export const validateContextObject = function(fnName, argumentNumber, context, optional) { + if (optional && !(context)) + return; + if (typeof context !== 'object' || context === null) + throw new Error(errorPrefix(fnName, argumentNumber, optional) + + 'must be a valid context object.'); +}; diff --git a/tests/config/project.json b/tests/config/project.json new file mode 100644 index 00000000000..10dc41ebbbc --- /dev/null +++ b/tests/config/project.json @@ -0,0 +1 @@ +{"apiKey":"AIzaSyBNHCyZ-bpv-WA-HpXTmigJm2aq3z1kaH8","authDomain":"jscore-sandbox-141b5.firebaseapp.com","databaseURL":"https://jscore-sandbox-141b5.firebaseio.com","projectId":"jscore-sandbox-141b5","storageBucket":"jscore-sandbox-141b5.appspot.com","messagingSenderId":"280127633210"}