From 6e34e1d6a2cd6fcd14f6608ec694253a74175919 Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 19 Sep 2025 13:59:34 +0800 Subject: [PATCH] =?UTF-8?q?=E5=88=9D=E5=A7=8B=E6=8F=90=E4=BA=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .airtap.yml | 19 + .github/codecov.yml | 6 + .github/dependabot.yml | 23 + .github/workflows/release.yml | 17 + .github/workflows/test.yml | 65 + .gitignore | 3 + CHANGELOG.md | 133 ++ LICENSE | 21 + README.md | 1924 +++++++++++++++++++++++ UPGRADING.md | 669 ++++++++ abstract-chained-batch.js | 397 +++++ abstract-iterator.js | 404 +++++ abstract-level.js | 1060 +++++++++++++ abstract-snapshot.js | 84 + index.d.ts | 52 + index.js | 9 + lib/abstract-sublevel-iterator.js | 141 ++ lib/abstract-sublevel.js | 286 ++++ lib/common.js | 33 + lib/default-chained-batch.js | 29 + lib/default-kv-iterator.js | 74 + lib/deferred-iterator.js | 110 ++ lib/deferred-queue.js | 84 + lib/errors.js | 21 + lib/event-monitor.js | 30 + lib/hooks.js | 78 + lib/prefixes.js | 21 + lib/prewrite-batch.js | 96 ++ lib/range-options.js | 26 + package.json | 65 + test/async-iterator-test.js | 132 ++ test/batch-test.js | 214 +++ test/chained-batch-test.js | 315 ++++ test/clear-range-test.js | 262 +++ test/clear-test.js | 110 ++ test/common.js | 90 ++ test/deferred-open-test.js | 209 +++ test/del-test.js | 65 + test/encoding-buffer-test.js | 240 +++ test/encoding-custom-test.js | 86 + test/encoding-decode-error-test.js | 67 + test/encoding-json-test.js | 69 + test/encoding-test.js | 119 ++ test/events/write.js | 132 ++ test/factory-test.js | 36 + test/get-many-test.js | 157 ++ test/get-test.js | 83 + test/has-many-test.js | 144 ++ test/has-test.js | 81 + test/hooks/newsub.js | 57 + test/hooks/postopen.js | 229 +++ test/hooks/prewrite.js | 816 ++++++++++ test/hooks/shared.js | 38 + test/index.js | 82 + test/iterator-explicit-snapshot-test.js | 370 +++++ test/iterator-no-snapshot-test.js | 49 + test/iterator-range-test.js | 277 ++++ test/iterator-seek-test.js | 335 ++++ test/iterator-snapshot-test.js | 56 + test/iterator-test.js | 621 ++++++++ test/manifest-test.js | 21 + test/open-create-if-missing-test.js | 39 + test/open-error-if-exists-test.js | 25 + test/open-test.js | 263 ++++ test/put-get-del-test.js | 86 + test/put-test.js | 70 + test/self.js | 905 +++++++++++ test/self/abstract-iterator-test.js | 182 +++ test/self/async-iterator-test.js | 242 +++ test/self/attach-resource-test.js | 74 + test/self/defer-test.js | 140 ++ test/self/deferred-iterator-test.js | 314 ++++ test/self/deferred-operations-test.js | 86 + test/self/deferred-queue-test.js | 93 ++ test/self/encoding-test.js | 391 +++++ test/self/errors-test.js | 11 + test/self/iterator-test.js | 1050 +++++++++++++ test/self/sublevel-test.js | 1039 ++++++++++++ test/sublevel-test.js | 209 +++ test/traits/closed.js | 42 + test/traits/index.js | 4 + test/traits/open.js | 62 + test/util.js | 269 ++++ tsconfig.json | 7 + types/abstract-chained-batch.d.ts | 126 ++ types/abstract-iterator.d.ts | 241 +++ types/abstract-level.d.ts | 634 ++++++++ types/abstract-snapshot.d.ts | 30 + types/abstract-sublevel.d.ts | 72 + types/interfaces.d.ts | 42 + 90 files changed, 18290 insertions(+) create mode 100644 .airtap.yml create mode 100644 .github/codecov.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/test.yml create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 UPGRADING.md create mode 100644 abstract-chained-batch.js create mode 100644 abstract-iterator.js create mode 100644 abstract-level.js create mode 100644 abstract-snapshot.js create mode 100644 index.d.ts create mode 100644 index.js create mode 100644 lib/abstract-sublevel-iterator.js create mode 100644 lib/abstract-sublevel.js create mode 100644 lib/common.js create mode 100644 lib/default-chained-batch.js create mode 100644 lib/default-kv-iterator.js create mode 100644 lib/deferred-iterator.js create mode 100644 lib/deferred-queue.js create mode 100644 lib/errors.js create mode 100644 lib/event-monitor.js create mode 100644 lib/hooks.js create mode 100644 lib/prefixes.js create mode 100644 lib/prewrite-batch.js create mode 100644 lib/range-options.js create mode 100644 package.json create mode 100644 test/async-iterator-test.js create mode 100644 test/batch-test.js create mode 100644 test/chained-batch-test.js create mode 100644 test/clear-range-test.js create mode 100644 test/clear-test.js create mode 100644 test/common.js create mode 100644 test/deferred-open-test.js create mode 100644 test/del-test.js create mode 100644 test/encoding-buffer-test.js create mode 100644 test/encoding-custom-test.js create mode 100644 test/encoding-decode-error-test.js create mode 100644 test/encoding-json-test.js create mode 100644 test/encoding-test.js create mode 100644 test/events/write.js create mode 100644 test/factory-test.js create mode 100644 test/get-many-test.js create mode 100644 test/get-test.js create mode 100644 test/has-many-test.js create mode 100644 test/has-test.js create mode 100644 test/hooks/newsub.js create mode 100644 test/hooks/postopen.js create mode 100644 test/hooks/prewrite.js create mode 100644 test/hooks/shared.js create mode 100644 test/index.js create mode 100644 test/iterator-explicit-snapshot-test.js create mode 100644 test/iterator-no-snapshot-test.js create mode 100644 test/iterator-range-test.js create mode 100644 test/iterator-seek-test.js create mode 100644 test/iterator-snapshot-test.js create mode 100644 test/iterator-test.js create mode 100644 test/manifest-test.js create mode 100644 test/open-create-if-missing-test.js create mode 100644 test/open-error-if-exists-test.js create mode 100644 test/open-test.js create mode 100644 test/put-get-del-test.js create mode 100644 test/put-test.js create mode 100644 test/self.js create mode 100644 test/self/abstract-iterator-test.js create mode 100644 test/self/async-iterator-test.js create mode 100644 test/self/attach-resource-test.js create mode 100644 test/self/defer-test.js create mode 100644 test/self/deferred-iterator-test.js create mode 100644 test/self/deferred-operations-test.js create mode 100644 test/self/deferred-queue-test.js create mode 100644 test/self/encoding-test.js create mode 100644 test/self/errors-test.js create mode 100644 test/self/iterator-test.js create mode 100644 test/self/sublevel-test.js create mode 100644 test/sublevel-test.js create mode 100644 test/traits/closed.js create mode 100644 test/traits/index.js create mode 100644 test/traits/open.js create mode 100644 test/util.js create mode 100644 tsconfig.json create mode 100644 types/abstract-chained-batch.d.ts create mode 100644 types/abstract-iterator.d.ts create mode 100644 types/abstract-level.d.ts create mode 100644 types/abstract-snapshot.d.ts create mode 100644 types/abstract-sublevel.d.ts create mode 100644 types/interfaces.d.ts diff --git a/.airtap.yml b/.airtap.yml new file mode 100644 index 0000000..05d1af6 --- /dev/null +++ b/.airtap.yml @@ -0,0 +1,19 @@ +providers: + - airtap-playwright + +browsers: + - name: chromium + - name: firefox + - name: webkit + +presets: + electron: + providers: + - airtap-electron + browsers: + - name: electron + +# Until airtap switches to rollup +browserify: + - transform: babelify + presets: ["@babel/preset-env"] diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 0000000..1ecf896 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,6 @@ +coverage: + status: + project: + default: + threshold: 5% + patch: off diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..e0fb910 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,23 @@ +version: 2 +updates: + - package-ecosystem: npm + directory: / + schedule: + interval: monthly + ignore: + - dependency-name: standard + - dependency-name: ts-standard + - dependency-name: "@types/node" + - dependency-name: voxpelli/tsconfig + - dependency-name: typescript + - dependency-name: hallmark + - dependency-name: "@babel/preset-env" + - dependency-name: babelify + + # Stay on the 3rd or 4th oldest stable release, per + # https://www.electronjs.org/docs/latest/tutorial/electron-timelines#version-support-policy + - dependency-name: electron + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..f78711e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,17 @@ +name: Release +on: + push: + tags: ['*'] +permissions: + contents: write +jobs: + release: + name: Release + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Create GitHub release + uses: docker://antonyurchenko/git-release:v4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..04f870c --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,65 @@ +name: Test +on: [push, pull_request] +jobs: + node: + runs-on: ubuntu-latest + strategy: + matrix: + node: [18, 20, 22] + name: Node ${{ matrix.node }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Use node ${{ matrix.node }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node }} + - name: Install + run: npm install --ignore-scripts + - name: Test + run: npm test + - name: Coverage + run: npm run coverage + - name: Codecov + uses: codecov/codecov-action@v3 + with: + file: coverage/lcov.info + browsers: + name: Browsers + if: ${{ github.actor != 'dependabot[bot]' }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up node + uses: actions/setup-node@v4 + with: + node-version: lts/* + - name: Install + run: npm install --ignore-scripts + - name: Install Playwright dependencies + run: npx --no-install playwright install-deps + - name: Install Playwright + run: npx --no-install playwright install + - name: Test + run: npm run test-browsers + electron: + name: Electron + if: ${{ github.actor != 'dependabot[bot]' }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up node + uses: actions/setup-node@v4 + with: + node-version: lts/* + - name: Install + run: npm install --ignore-scripts + - name: Install Electron + run: npm run postinstall + working-directory: node_modules/electron + - name: Test + uses: GabrielBB/xvfb-action@v1 + with: + run: npm run test-electron diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1fd04da --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +node_modules +coverage +.nyc_output diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..9b22423 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,133 @@ +# Changelog + +## [3.0.1] - 2025-01-26 + +### Added + +- Test seeking outside of range options ([#113](https://github.com/Level/abstract-level/issues/113)) ([`90ee9b5`](https://github.com/Level/abstract-level/commit/90ee9b5)) (Vincent Weevers) + +## [3.0.0] - 2025-01-05 + +_Would you mind voting in this [community poll](https://github.com/orgs/Level/discussions/143)? Thank you! If you are upgrading, please see [`UPGRADING.md`](UPGRADING.md)._ + +### Changed + +- **Breaking:** use new language features ([#94](https://github.com/Level/abstract-level/issues/94)) ([`1fdb362`](https://github.com/Level/abstract-level/commit/1fdb362)) (Vincent Weevers) +- **Breaking:** make `iterator.seek()` a mandatory feature ([#105](https://github.com/Level/abstract-level/issues/105)) ([`daf2a88`](https://github.com/Level/abstract-level/commit/daf2a88)) (Vincent Weevers) +- **Breaking:** change `_checkKey` and `_checkValue` to assertions ([#108](https://github.com/Level/abstract-level/issues/108)) ([`ca3c368`](https://github.com/Level/abstract-level/commit/ca3c368)) (Vincent Weevers) + +### Added + +- Implement explicit snapshots ([#93](https://github.com/Level/abstract-level/issues/93)) ([`a8485a2`](https://github.com/Level/abstract-level/commit/a8485a2), [`f81d348`](https://github.com/Level/abstract-level/commit/f81d348), [`b5b583c`](https://github.com/Level/abstract-level/commit/b5b583c)) (Vincent Weevers) +- Implement `has()` and `hasMany()` ([#96](https://github.com/Level/abstract-level/issues/96)) ([`6684039`](https://github.com/Level/abstract-level/commit/6684039)) (Vincent Weevers) +- Implement `Symbol.asyncDispose` ([#95](https://github.com/Level/abstract-level/issues/95)) ([`eedeed9`](https://github.com/Level/abstract-level/commit/eedeed9)) (Vincent Weevers) +- Add docs and types for `attachResource()` & `detachResource()` ([#110](https://github.com/Level/abstract-level/issues/110)) ([`5f621d4`](https://github.com/Level/abstract-level/commit/5f621d4)) (Vincent Weevers) + +### Removed + +- **Breaking:** remove deprecated `put`, `del` & `batch` events ([#104](https://github.com/Level/abstract-level/issues/104)) ([`86bd271`](https://github.com/Level/abstract-level/commit/86bd271), [`7c32d39`](https://github.com/Level/abstract-level/commit/7c32d39)) (Vincent Weevers) +- **Breaking:** drop support of Node.js 16 ([#103](https://github.com/Level/abstract-level/issues/103)) ([`a05a8ea`](https://github.com/Level/abstract-level/commit/a05a8ea)) (Vincent Weevers) + +### Fixed + +- Close sublevels upon closing parent db ([#102](https://github.com/Level/abstract-level/issues/102)) ([`9eeb291`](https://github.com/Level/abstract-level/commit/9eeb291)) (Vincent Weevers) +- Avoid cloning option objects in more places ([#109](https://github.com/Level/abstract-level/issues/109)) ([`efd4175`](https://github.com/Level/abstract-level/commit/efd4175)) (Vincent Weevers) +- Refactor: use async/await in `closeResources()` ([#107](https://github.com/Level/abstract-level/issues/107)) ([`fdb7864`](https://github.com/Level/abstract-level/commit/fdb7864)) (Vincent Weevers) +- Refactor: restore use of spread operator ([#106](https://github.com/Level/abstract-level/issues/106)) ([`a5c2e52`](https://github.com/Level/abstract-level/commit/a5c2e52)) (Vincent Weevers) +- Fix skipped sublevel tests ([`f195d99`](https://github.com/Level/abstract-level/commit/f195d99)) (Vincent Weevers) + +## [2.0.2] - 2024-12-09 + +### Fixed + +- Fix TypeScript types of `get`, `getMany`, `nextv` and `all` ([#91](https://github.com/Level/abstract-level/issues/91)) ([`bbcfb04`](https://github.com/Level/abstract-level/commit/bbcfb04)) (Junxiao Shi) + +## [2.0.1] - 2024-10-21 + +### Fixed + +- Generalize prewrite test for memory-level ([#90](https://github.com/Level/abstract-level/issues/90)) ([`9ea8770`](https://github.com/Level/abstract-level/commit/9ea8770)) (Vincent Weevers) + +## [2.0.0] - 2024-02-03 + +_If you are upgrading, please see [`UPGRADING.md`](UPGRADING.md)._ + +### Changed + +- **Breaking:** remove callbacks in favor of promises ([#50](https://github.com/Level/abstract-level/issues/50)) ([`f97dbae`](https://github.com/Level/abstract-level/commit/f97dbae)) (Vincent Weevers) +- **Breaking:** use `undefined` instead of error for non-existing entries ([#49](https://github.com/Level/abstract-level/issues/49)) ([`1e08b30`](https://github.com/Level/abstract-level/commit/1e08b30)) (Vincent Weevers) +- **Breaking:** add hooks and deprecate `batch`, `put` & `del` events ([#45](https://github.com/Level/abstract-level/issues/45), [#53](https://github.com/Level/abstract-level/issues/53), [#81](https://github.com/Level/abstract-level/issues/81)) ([`bcb4192`](https://github.com/Level/abstract-level/commit/bcb4192), [`bee1085`](https://github.com/Level/abstract-level/commit/bee1085), [`dbcf7d7`](https://github.com/Level/abstract-level/commit/dbcf7d7)) (Vincent Weevers) +- **Breaking:** require snapshots to be created synchronously ([#54](https://github.com/Level/abstract-level/issues/54)) ([`d89e68e`](https://github.com/Level/abstract-level/commit/d89e68e)) (Vincent Weevers). + +### Added + +- Add experimental support of `AbortSignal` ([#55](https://github.com/Level/abstract-level/issues/55), [#59](https://github.com/Level/abstract-level/issues/59)) ([`b075a25`](https://github.com/Level/abstract-level/commit/b075a25), [`e3fba20`](https://github.com/Level/abstract-level/commit/e3fba20)) (Vincent Weevers) +- Expose path of sublevel ([#78](https://github.com/Level/abstract-level/issues/78)) ([`20974f6`](https://github.com/Level/abstract-level/commit/20974f6)) (Vincent Weevers). + +### Removed + +- **Breaking:** drop Node.js < 16 ([`9e8f561`](https://github.com/Level/abstract-level/commit/9e8f561)) (Vincent Weevers) +- **Breaking:** remove deferred chained batch ([#51](https://github.com/Level/abstract-level/issues/51), [#58](https://github.com/Level/abstract-level/issues/58)) ([`fc7be7b`](https://github.com/Level/abstract-level/commit/fc7be7b), [`e119cad`](https://github.com/Level/abstract-level/commit/e119cad)) (Vincent Weevers) +- **Breaking:** remove `ready` alias of `open` event ([#48](https://github.com/Level/abstract-level/issues/48)) ([`5f7b923`](https://github.com/Level/abstract-level/commit/5f7b923)) (Vincent Weevers) +- Remove compatibility checks for `levelup` & friends ([#52](https://github.com/Level/abstract-level/issues/52)) ([`def791f`](https://github.com/Level/abstract-level/commit/def791f)) (Vincent Weevers). + +### Fixed + +- Keep track of iterator end ([#56](https://github.com/Level/abstract-level/issues/56)) ([`9b78443`](https://github.com/Level/abstract-level/commit/9b78443)) (Vincent Weevers). + +## [1.0.4] - 2024-01-20 + +### Fixed + +- Fix TypeScript definitions of `all()` and `nextv()` ([#67](https://github.com/Level/abstract-level/issues/67)) ([`8e85993`](https://github.com/Level/abstract-level/commit/8e85993), [`9f17757`](https://github.com/Level/abstract-level/commit/9f17757)) (Bryan) + +## [1.0.3] - 2022-03-20 + +### Added + +- Document error codes of `classic-level` and `many-level` ([#20](https://github.com/Level/abstract-level/issues/20)) ([`4b3464c`](https://github.com/Level/abstract-level/commit/4b3464c)) (Vincent Weevers) + +### Fixed + +- Add hidden `abortOnClose` option to iterators ([`2935180`](https://github.com/Level/abstract-level/commit/2935180)) (Vincent Weevers) +- Make internal iterator decoding options enumerable ([`eb08363`](https://github.com/Level/abstract-level/commit/eb08363)) (Vincent Weevers) +- Restore Sauce Labs browser tests ([`90b8816`](https://github.com/Level/abstract-level/commit/90b8816)) (Vincent Weevers) + +## [1.0.2] - 2022-03-06 + +### Fixed + +- Fix TypeScript declaration of chained batch `write()` options ([`392b7f7`](https://github.com/Level/abstract-level/commit/392b7f7)) (Vincent Weevers) +- Document the return type of `db.batch()` and add example ([`9739bba`](https://github.com/Level/abstract-level/commit/9739bba)) (Vincent Weevers) + +## [1.0.1] - 2022-02-06 + +### Fixed + +- Add `highWaterMarkBytes` option to tests where it matters ([`6b25a91`](https://github.com/Level/abstract-level/commit/6b25a91)) (Vincent Weevers) +- Clarify the meaning of `db.status` ([`2e90b05`](https://github.com/Level/abstract-level/commit/2e90b05)) (Vincent Weevers) +- Use `new` in README examples ([`379503e`](https://github.com/Level/abstract-level/commit/379503e)) (Vincent Weevers). + +## [1.0.0] - 2022-01-30 + +_:seedling: Initial release. If you are upgrading from `abstract-leveldown` please see [`UPGRADING.md`](UPGRADING.md)_ + +[3.0.1]: https://github.com/Level/abstract-level/releases/tag/v3.0.1 + +[3.0.0]: https://github.com/Level/abstract-level/releases/tag/v3.0.0 + +[2.0.2]: https://github.com/Level/abstract-level/releases/tag/v2.0.2 + +[2.0.1]: https://github.com/Level/abstract-level/releases/tag/v2.0.1 + +[2.0.0]: https://github.com/Level/abstract-level/releases/tag/v2.0.0 + +[1.0.4]: https://github.com/Level/abstract-level/releases/tag/v1.0.4 + +[1.0.3]: https://github.com/Level/abstract-level/releases/tag/v1.0.3 + +[1.0.2]: https://github.com/Level/abstract-level/releases/tag/v1.0.2 + +[1.0.1]: https://github.com/Level/abstract-level/releases/tag/v1.0.1 + +[1.0.0]: https://github.com/Level/abstract-level/releases/tag/v1.0.0 diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..2b3ffdb --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright © 2013 Rod Vagg and the contributors to abstract-level. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..02e631c --- /dev/null +++ b/README.md @@ -0,0 +1,1924 @@ +# abstract-level + +**Abstract class for a lexicographically sorted key-value database.** Provides encodings, sublevels, events and hooks. If you are upgrading, please see [`UPGRADING.md`](UPGRADING.md). + +> :pushpin: Wondering what happened to `levelup`? Visit [Frequently Asked Questions](https://github.com/Level/community#faq). + +[![level badge][level-badge]](https://github.com/Level/awesome) +[![npm](https://img.shields.io/npm/v/abstract-level.svg)](https://www.npmjs.com/package/abstract-level) +[![Node version](https://img.shields.io/node/v/abstract-level.svg)](https://www.npmjs.com/package/abstract-level) +[![Test](https://img.shields.io/github/actions/workflow/status/Level/abstract-level/test.yml?branch=main\&label=test)](https://github.com/Level/abstract-level/actions/workflows/test.yml) +[![Coverage](https://img.shields.io/codecov/c/github/Level/abstract-level?label=\&logo=codecov\&logoColor=fff)](https://codecov.io/gh/Level/abstract-level) +[![Standard](https://img.shields.io/badge/standard-informational?logo=javascript\&logoColor=fff)](https://standardjs.com) +[![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org) +[![Donate](https://img.shields.io/badge/donate-orange?logo=open-collective\&logoColor=fff)](https://opencollective.com/level) + +## Usage + +This module exports an abstract class. End users should instead use modules like [`level`](https://github.com/Level/level) that export a concrete implementation. The purpose of the abstract class is to provide a common interface that looks like this: + +```js +// Create a database +const db = new Level('./db', { valueEncoding: 'json' }) + +// Add an entry with key 'a' and value 1 +await db.put('a', 1) + +// Add multiple entries +await db.batch([{ type: 'put', key: 'b', value: 2 }]) + +// Get value of key 'a': 1 +const value = await db.get('a') + +// Iterate entries with keys that are greater than 'a' +for await (const [key, value] of db.iterator({ gt: 'a' })) { + console.log(value) // 2 +} +``` + +Usage from TypeScript requires generic type parameters. + +
TypeScript example + +```ts +// Specify types of keys and values (any, in the case of json). +// The generic type parameters default to Level. +const db = new Level('./db', { valueEncoding: 'json' }) + +// All relevant methods then use those types +await db.put('a', { x: 123 }) + +// Specify different types when overriding encoding per operation +await db.get('a', { valueEncoding: 'utf8' }) + +// Though in some cases TypeScript can infer them +await db.get('a', { valueEncoding: db.valueEncoding('utf8') }) + +// It works the same for sublevels +const abc = db.sublevel('abc') +const xyz = db.sublevel('xyz', { valueEncoding: 'json' }) +``` + +
+ +TypeScript users can benefit from the `using` keyword because `abstract-level` implements [`Symbol.asyncDispose`](https://github.com/tc39/proposal-explicit-resource-management) on its resources. For example: + +
Using example + +```ts +await db.put('example', 'before') +await using snapshot = db.snapshot() +await db.put('example', 'after') +await db.get('example', { snapshot })) // Returns 'before' +``` + +The equivalent in JavaScript would be: + +```js +await db.put('example', 'before') +const snapshot = db.snapshot() + +try { + await db.put('example', 'after') + await db.get('example', { snapshot })) // Returns 'before' +} finally { + await snapshot.close() +} +``` + +
+ +## Install + +With [npm](https://npmjs.org) do: + +```shell +npm install abstract-level +``` + +## Supported Platforms + +We aim to support Active LTS and Current Node.js releases, as well as evergreen browsers that are based on Chromium, Firefox or Webkit. Features that the runtime must support include [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask#browser_compatibility), [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled#browser_compatibility), [`globalThis`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/globalThis#browser_compatibility) and [async generators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function*#browser_compatibility). Supported runtimes may differ per implementation. + +## Public API For Consumers + +This module has a public API for consumers of a database and a [private API](#private-api-for-implementors) for concrete implementations. The public API, as documented in this section, offers a simple yet rich interface that is common between all implementations. Implementations may have additional options or methods. TypeScript [type declarations](https://www.typescriptlang.org/docs/handbook/2/type-declarations.html) are [included](./index.d.ts) (and exported for reuse) only for the public API. + +An `abstract-level` database is at its core a [key-value database](https://en.wikipedia.org/wiki/Key%E2%80%93value_database). A key-value pair is referred to as an _entry_ here and typically returned as an array, comparable to [`Object.entries()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries). + +### `db = new Level(...[, options])` + +Creating a database is done by calling a class constructor. Implementations export a class that extends the [`AbstractLevel`](./abstract-level.js) class and has its own constructor with an implementation-specific signature. All constructors should have an `options` argument as the last. Typically, constructors take a `location` as their first argument, pointing to where the data will be stored. That may be a file path, URL, something else or none at all, since not all implementations are disk-based or persistent. Others take another database rather than a location as their first argument. + +The optional `options` object may contain: + +- `keyEncoding` (string or object, default `'utf8'`): encoding to use for keys +- `valueEncoding` (string or object, default `'utf8'`): encoding to use for values. + +See [Encodings](#encodings) for a full description of these options. Other `options` (except `passive`) are forwarded to `db.open()` which is automatically called in a next tick after the constructor returns. Any read & write operations are queued internally until the database has finished opening. If opening fails, those queued operations will yield errors. + +### `db.status` + +Getter that returns a string reflecting the current state of the database: + +- `'opening'` - waiting for the database to be opened +- `'open'` - successfully opened the database +- `'closing'` - waiting for the database to be closed +- `'closed'` - database is closed. + +### `db.open([options])` + +Open the database. Returns a promise. Options passed to `open()` take precedence over options passed to the database constructor. Not all implementations support the `createIfMissing` and `errorIfExists` options (notably [`memory-level`](https://github.com/Level/memory-level) and [`browser-level`](https://github.com/Level/browser-level)) and will indicate so via `db.supports.createIfMissing` and `db.supports.errorIfExists`. + +The optional `options` object may contain: + +- `createIfMissing` (boolean, default: `true`): If `true`, create an empty database if one doesn't already exist. If `false` and the database doesn't exist, opening will fail. +- `errorIfExists` (boolean, default: `false`): If `true` and the database already exists, opening will fail. +- `passive` (boolean, default: `false`): Wait for, but do not initiate, opening of the database. + +It's generally not necessary to call `open()` because it's automatically called by the database constructor. It may however be useful to capture an error from failure to open, that would otherwise not surface until another method like `db.get()` is called. It's also possible to reopen the database after it has been closed with [`close()`](#dbclose). Once `open()` has then been called, any read & write operations will again be queued internally until opening has finished. + +The `open()` and `close()` methods are idempotent. If the database is already open, the promise returned by `open()` will resolve without delay. If opening is already in progress, the promise will resolve when that has finished. If closing is in progress, the database will be reopened once closing has finished. Likewise, if `close()` is called after `open()`, the database will be closed once opening has finished. + +### `db.close()` + +Close the database. Returns a promise. + +A database may have associated resources like file handles and locks. When the database is no longer needed (for the remainder of a program) it's recommended to call `db.close()` to free up resources. + +After `db.close()` has been called, no further read & write operations are allowed unless and until `db.open()` is called again. For example, `db.get(key)` will yield an error with code [`LEVEL_DATABASE_NOT_OPEN`](#errors). Any unclosed iterators, snapshots and chained batches will be closed by `db.close()` and can then no longer be used even when `db.open()` is called again. + +### `db.get(key[, options])` + +Get a value from the database by `key`. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. +- `valueEncoding`: custom value encoding for this operation, used to decode the value. +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from. + +Returns a promise for the value. If the `key` was not found then the value will be `undefined`. + +### `db.getMany(keys[, options])` + +Get multiple values from the database by an array of `keys`. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `keys`. +- `valueEncoding`: custom value encoding for this operation, used to decode values. +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from. + +Returns a promise for an array of values with the same order as `keys`. If a key was not found, the relevant value will be `undefined`. + +### `db.has(key[, options])` + +Check if the database has an entry with the given `key`. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from. + +Returns a promise for a boolean. For example: + +```js +if (await db.has('fruit')) { + console.log('We have fruit') +} +``` + +If the value of the entry is needed, instead do: + +```js +const value = await db.get('fruit') + +if (value !== undefined) { + console.log('We have fruit: %o', value) +} +``` + +### `db.hasMany(keys[, options])` + +Check if the database has entries with the given keys. The `keys` argument must be an array. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `keys`. +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from. + +Returns a promise for an array of booleans with the same order as `keys`. For example: + +```js +await db.put('a', '123') +await db.hasMany(['a', 'b']) // [true, false] +``` + +### `db.put(key, value[, options])` + +Add a new entry or overwrite an existing entry. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. +- `valueEncoding`: custom value encoding for this operation, used to encode the `value`. + +Returns a promise. + +### `db.del(key[, options])` + +Delete an entry by `key`. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. + +Returns a promise. + +### `db.batch(operations[, options])` + +Perform multiple _put_ and/or _del_ operations in bulk. Returns a promise. The `operations` argument must be an array containing a list of operations to be executed sequentially, although as a whole they are performed as an atomic operation. + +Each operation must be an object with at least a `type` property set to either `'put'` or `'del'`. If the `type` is `'put'`, the operation must have `key` and `value` properties. It may optionally have `keyEncoding` and / or `valueEncoding` properties to encode keys or values with a custom encoding for just that operation. If the `type` is `'del'`, the operation must have a `key` property and may optionally have a `keyEncoding` property. + +An operation of either type may also have a `sublevel` property, to prefix the key of the operation with the prefix of that sublevel. This allows atomically committing data to multiple sublevels. The given `sublevel` must have the same _root_ (i.e. top-most) database as `db`. Keys and values will be encoded by the sublevel, to the same effect as a `sublevel.batch(..)` call. In the following example, the first `value` will be encoded with `'json'` rather than the default encoding of `db`: + +```js +const people = db.sublevel('people', { valueEncoding: 'json' }) +const nameIndex = db.sublevel('names') + +await db.batch([{ + type: 'put', + sublevel: people, + key: '123', + value: { + name: 'Alice' + } +}, { + type: 'put', + sublevel: nameIndex, + key: 'Alice', + value: '123' +}]) +``` + +The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this batch, used to encode keys. +- `valueEncoding`: custom value encoding for this batch, used to encode values. + +Encoding properties on individual operations take precedence. In the following example, the first value will be encoded with the `'utf8'` encoding and the second with `'json'`. + +```js +await db.batch([ + { type: 'put', key: 'a', value: 'foo' }, + { type: 'put', key: 'b', value: 123, valueEncoding: 'json' } +], { valueEncoding: 'utf8' }) +``` + +### `chainedBatch = db.batch()` + +Create a [chained batch](#chainedbatch), when `batch()` is called with zero arguments. A chained batch can be used to build and eventually commit an atomic batch of operations: + +```js +const chainedBatch = db.batch() + .del('bob') + .put('alice', 361) + .put('kim', 220) + +// Commit +await chainedBatch.write() +``` + +Depending on how it's used, it is possible to obtain greater overall performance with this form of `batch()`, mainly because its methods like `put()` can immediately copy the data of that singular operation to the underlying storage, rather than having to block the event loop while copying the data of multiple operations. However, on several `abstract-level` implementations, chained batch is just sugar and has no performance benefits. + +Due to its synchronous nature, it is not possible to create a chained batch before the database has finished opening. Be sure to call `await db.open()` before `chainedBatch = db.batch()`. This does not apply to other database methods. + +### `iterator = db.iterator([options])` + +Create an [iterator](#iterator). The optional `options` object may contain the following _range options_ to control the range of entries to be iterated: + +- `gt` (greater than) or `gte` (greater than or equal): define the lower bound of the range to be iterated. Only entries where the key is greater than (or equal to) this option will be included in the range. When `reverse` is true the order will be reversed, but the entries iterated will be the same. +- `lt` (less than) or `lte` (less than or equal): define the higher bound of the range to be iterated. Only entries where the key is less than (or equal to) this option will be included in the range. When `reverse` is true the order will be reversed, but the entries iterated will be the same. +- `reverse` (boolean, default: `false`): iterate entries in reverse order. Beware that a reverse seek can be slower than a forward seek. +- `limit` (number, default: `Infinity`): limit the number of entries yielded. This number represents a _maximum_ number of entries and will not be reached if the end of the range is reached first. A value of `Infinity` or `-1` means there is no limit. When `reverse` is true the entries with the highest keys will be returned instead of the lowest keys. + +The `gte` and `lte` range options take precedence over `gt` and `lt` respectively. If no range options are provided, the iterator will visit all entries of the database, starting at the lowest key and ending at the highest key (unless `reverse` is true). In addition to range options, the `options` object may contain: + +- `keys` (boolean, default: `true`): whether to return the key of each entry. If set to `false`, the iterator will yield keys that are `undefined`. Prefer to use `db.keys()` instead. +- `values` (boolean, default: `true`): whether to return the value of each entry. If set to `false`, the iterator will yield values that are `undefined`. Prefer to use `db.values()` instead. +- `keyEncoding`: custom key encoding for this iterator, used to encode range options, to encode `seek()` targets and to decode keys. +- `valueEncoding`: custom value encoding for this iterator, used to decode values. +- `signal`: an [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) to [abort read operations on the iterator](#aborting-iterators). +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from. + +Lastly, an implementation is free to add its own options. + +> :pushpin: To instead consume data using streams, see [`level-read-stream`](https://github.com/Level/read-stream) and [`level-web-stream`](https://github.com/Level/web-stream). + +### `keyIterator = db.keys([options])` + +Create a [key iterator](#keyiterator), having the same interface as `db.iterator()` except that it yields keys instead of entries. If only keys are needed, using `db.keys()` may increase performance because values won't have to fetched, copied or decoded. Options are the same as for `db.iterator()` except that `db.keys()` does not take `keys`, `values` and `valueEncoding` options. + +```js +// Iterate lazily +for await (const key of db.keys({ gt: 'a' })) { + console.log(key) +} + +// Get all at once. Setting a limit is recommended. +const keys = await db.keys({ gt: 'a', limit: 10 }).all() +``` + +### `valueIterator = db.values([options])` + +Create a [value iterator](#valueiterator), having the same interface as `db.iterator()` except that it yields values instead of entries. If only values are needed, using `db.values()` may increase performance because keys won't have to fetched, copied or decoded. Options are the same as for `db.iterator()` except that `db.values()` does not take `keys` and `values` options. Note that it _does_ take a `keyEncoding` option, relevant for the encoding of range options. + +```js +// Iterate lazily +for await (const value of db.values({ gt: 'a' })) { + console.log(value) +} + +// Get all at once. Setting a limit is recommended. +const values = await db.values({ gt: 'a', limit: 10 }).all() +``` + +### `db.clear([options])` + +Delete all entries or a range. Not guaranteed to be atomic. Returns a promise. Accepts the following options (with the same rules as on iterators): + +- `gt` (greater than) or `gte` (greater than or equal): define the lower bound of the range to be deleted. Only entries where the key is greater than (or equal to) this option will be included in the range. When `reverse` is true the order will be reversed, but the entries deleted will be the same. +- `lt` (less than) or `lte` (less than or equal): define the higher bound of the range to be deleted. Only entries where the key is less than (or equal to) this option will be included in the range. When `reverse` is true the order will be reversed, but the entries deleted will be the same. +- `reverse` (boolean, default: `false`): delete entries in reverse order. Only effective in combination with `limit`, to delete the last N entries. +- `limit` (number, default: `Infinity`): limit the number of entries to be deleted. This number represents a _maximum_ number of entries and will not be reached if the end of the range is reached first. A value of `Infinity` or `-1` means there is no limit. When `reverse` is true the entries with the highest keys will be deleted instead of the lowest keys. +- `keyEncoding`: custom key encoding for this operation, used to encode range options. +- `snapshot`: explicit [snapshot](#snapshot--dbsnapshotoptions) to read from, such that entries not present in the snapshot will not be deleted. If no `snapshot` is provided, the database may create its own internal snapshot but (unlike on other methods) this is currently not a hard requirement for implementations. + +The `gte` and `lte` range options take precedence over `gt` and `lt` respectively. If no options are provided, all entries will be deleted. + +### `sublevel = db.sublevel(name[, options])` + +Create a [sublevel](#sublevel) that has the same interface as `db` (except for additional, implementation-specific methods) and prefixes the keys of operations before passing them on to `db`. The `name` argument is required and must be a string, or an array of strings (explained further below). + +```js +const example = db.sublevel('example') + +await example.put('hello', 'world') +await db.put('a', '1') + +// Prints ['hello', 'world'] +for await (const [key, value] of example.iterator()) { + console.log([key, value]) +} +``` + +Sublevels effectively separate a database into sections. Think SQL tables, but evented, ranged and realtime! Each sublevel is an `AbstractLevel` instance with its own keyspace, [encodings](https://github.com/Level/abstract-level#encodings), [hooks](https://github.com/Level/abstract-level#hooks) and [events](https://github.com/Level/abstract-level#events). For example, it's possible to have one sublevel with `'buffer'` keys and another with `'utf8'` keys. The same goes for values. Like so: + +```js +db.sublevel('one', { valueEncoding: 'json' }) +db.sublevel('two', { keyEncoding: 'buffer' }) +``` + +An own keyspace means that `sublevel.iterator()` only includes entries of that sublevel, `sublevel.clear()` will only delete entries of that sublevel, and so forth. Range options get prefixed too. + +Fully qualified keys (as seen from the parent database) take the form of `prefix + key` where `prefix` is `separator + name + separator`. If `name` is empty, the effective prefix is two separators. Sublevels can be nested: if `db` is itself a sublevel then the effective prefix is a combined prefix, e.g. `'!one!!two!'`. Note that a parent database will see its own keys as well as keys of any nested sublevels: + +```js +// Prints ['!example!hello', 'world'] and ['a', '1'] +for await (const [key, value] of db.iterator()) { + console.log([key, value]) +} +``` + +> :pushpin: The key structure is equal to that of [`subleveldown`](https://github.com/Level/subleveldown) which offered sublevels before they were built-in to `abstract-level`. This means that an `abstract-level` sublevel can read sublevels previously created with (and populated by) `subleveldown`. + +Internally, sublevels operate on keys that are either a string, Buffer or Uint8Array, depending on parent database and choice of encoding. Which is to say: binary keys are fully supported. The `name` must however always be a string and can only contain ASCII characters. + +The optional `options` object may contain: + +- `separator` (string, default: `'!'`): Character for separating sublevel names from user keys and each other. Must sort before characters used in `name`. An error will be thrown if that's not the case. +- `keyEncoding` (string or object, default `'utf8'`): encoding to use for keys +- `valueEncoding` (string or object, default `'utf8'`): encoding to use for values. + +The `keyEncoding` and `valueEncoding` options are forwarded to the `AbstractLevel` constructor and work the same, as if a new, separate database was created. They default to `'utf8'` regardless of the encodings configured on `db`. Other options are forwarded too but `abstract-level` has no relevant options at the time of writing. For example, setting the `createIfMissing` option will have no effect. Why is that? + +Like regular databases, sublevels open themselves, but they do not affect the state of the parent database. This means a sublevel can be individually closed and (re)opened. If the sublevel is created while the parent database is opening, it will wait for that to finish. Closing the parent database will automatically close the sublevel, along with other resources like iterators. + +Lastly, the `name` argument can be an array as a shortcut to create nested sublevels. Those are normally created like so: + +```js +const indexes = db.sublevel('idx') +const colorIndex = indexes.sublevel('colors') +``` + +Here, the parent database of `colorIndex` is `indexes`. Operations made on `colorIndex` are thus forwarded from that sublevel to `indexes` and from there to `db`. At each step, hooks and events are available to transform and react to data from a different perspective. Which comes at a (typically small) performance cost that increases with further nested sublevels. If the `indexes` sublevel is only used to organize keys and not directly interfaced with, operations on `colorIndex` can be made faster by skipping `indexes`: + +```js +const colorIndex = db.sublevel(['idx', 'colors']) +``` + +In this case, the parent database of `colorIndex` is `db`. Note that it's still possible to separately create the `indexes` sublevel, but it will be disconnected from `colorIndex`, meaning that `indexes` will not see (live) operations made on `colorIndex`. + +### `encoding = db.keyEncoding([encoding])` + +Returns the given `encoding` argument as a normalized encoding object that follows the [`level-transcoder`](https://github.com/Level/transcoder) encoding interface. See [Encodings](#encodings) for an introduction. The `encoding` argument may be: + +- A string to select a known encoding by its name +- An object that follows one of the following interfaces: [`level-transcoder`](https://github.com/Level/transcoder#encoding-interface), [`level-codec`](https://github.com/Level/codec#encoding-format), [`abstract-encoding`](https://github.com/mafintosh/abstract-encoding), [`multiformats`](https://github.com/multiformats/js-multiformats/blob/master/src/codecs/interface.ts) +- A previously normalized encoding, such that `keyEncoding(x)` equals `keyEncoding(keyEncoding(x))` +- Omitted, `null` or `undefined`, in which case the default `keyEncoding` of the database is returned. + +Other methods that take `keyEncoding` or `valueEncoding` options, accept the same as above. Results are cached. If the `encoding` argument is an object and it has a name then subsequent calls can refer to that encoding by name. + +Depending on the encodings supported by a database, this method may return a _transcoder encoding_ that translates the desired encoding from / to an encoding supported by the database. Its `encode()` and `decode()` methods will have respectively the same input and output types as a non-transcoded encoding, but its `name` property will differ. + +Assume that e.g. `db.keyEncoding().encode(key)` is safe to call at any time including if the database isn't open, because encodings must be stateless. If the given encoding is not found or supported, a [`LEVEL_ENCODING_NOT_FOUND` or `LEVEL_ENCODING_NOT_SUPPORTED` error](#errors) is thrown. + +### `encoding = db.valueEncoding([encoding])` + +Same as `db.keyEncoding([encoding])` except that it returns the default `valueEncoding` of the database (if the `encoding` argument is omitted, `null` or `undefined`). + +### `key = db.prefixKey(key, keyFormat[, local])` + +Add sublevel prefix to the given `key`, which must be already-encoded. If this database is not a sublevel, the given `key` is returned as-is. The `keyFormat` must be one of `'utf8'`, `'buffer'`, `'view'`. If `'utf8'` then `key` must be a string and the return value will be a string. If `'buffer'` then Buffer, if `'view'` then Uint8Array. + +```js +const sublevel = db.sublevel('example') + +console.log(db.prefixKey('a', 'utf8')) // 'a' +console.log(sublevel.prefixKey('a', 'utf8')) // '!example!a' +``` + +By default, the given `key` will be prefixed to form a fully-qualified key in the context of the _root_ (i.e. top-most) database, as the following example will demonstrate. If `local` is true, the given `key` will instead be prefixed to form a fully-qualified key in the context of the _parent_ database. + +```js +const sublevel = db.sublevel('example') +const nested = sublevel.sublevel('nested') + +console.log(nested.prefixKey('a', 'utf8')) // '!example!!nested!a' +console.log(nested.prefixKey('a', 'utf8', true)) // '!nested!a' +``` + +### `snapshot = db.snapshot(options)` + +Create an explicit [snapshot](#snapshot). Throws a [`LEVEL_NOT_SUPPORTED`](#level_not_supported) error if `db.supports.explicitSnapshots` is false ([Level/community#118](https://github.com/Level/community/issues/118)). For details, see [Reading From Snapshots](#reading-from-snapshots). + +There are currently no options but specific implementations may add their own. + +### `db.supports` + +A [manifest](https://github.com/Level/supports) describing the features supported by this database. Might be used like so: + +```js +if (!db.supports.permanence) { + throw new Error('Persistent storage is required') +} +``` + +### `db.defer(fn[, options])` + +Call the function `fn` at a later time when [`db.status`](#dbstatus) changes to `'open'` or `'closed'`. Known as a _deferred operation_. Used by `abstract-level` itself to implement "deferred open" which is a feature that makes it possible to call methods like `db.put()` before the database has finished opening. The `defer()` method is exposed for implementations and plugins to achieve the same on their custom methods: + +```js +db.foo = function (key) { + if (this.status === 'opening') { + this.defer(() => this.foo(key)) + } else { + // .. + } +} +``` + +The optional `options` object may contain: + +- `signal`: an [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) to abort the deferred operation. When aborted (now or later) the `fn` function will not be called. + +When deferring a custom operation, do it early: after normalizing optional arguments but before encoding (to avoid double encoding and to emit original input if the operation has events) and before any _fast paths_ (to avoid calling back before the database has finished opening). For example, `db.batch([])` has an internal fast path where it skips work if the array of operations is empty. Resources that can be closed on their own (like iterators) should however first check such state before deferring, in order to reject operations after close (including when the database was reopened). + +### `db.deferAsync(fn[, options])` + +Similar to `db.defer(fn)` but for asynchronous work. Returns a promise, which waits for [`db.status`](#dbstatus) to change to `'open'` or `'closed'` and then calls `fn` which itself must return a promise. This allows for recursion: + +```js +db.foo = async function (key) { + if (this.status === 'opening') { + return this.deferAsync(() => this.foo(key)) + } else { + // .. + } +} +``` + +The optional `options` object may contain: + +- `signal`: an [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) to abort the deferred operation. When aborted (now or later) the `fn` function will not be called, and the promise returned by `deferAsync()` will be rejected with a [`LEVEL_ABORTED`](#level_aborted) error. + +### `db.attachResource(resource)` + +Keep track of the given `resource` in order to call its `close()` method when the database is closed. Once successfully closed, the resource will no longer be tracked, to the same effect as manually calling [`db.detachResource()`](#dbdetachresourceresource). When given multiple resources, the database will close them in parallel. Resources are kept in a [set](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) so that the same object will not be attached (and closed) twice. + +Intended for objects that rely on an open database. Used internally for built-in resources like iterators and sublevels, and is publicly exposed for custom resources. + +### `db.detachResource(resource)` + +Stop tracking the given `resource`. + +### `iterator` + +An iterator allows one to lazily read a range of entries stored in the database. The entries will be sorted by keys in [lexicographic order](https://en.wikipedia.org/wiki/Lexicographic_order) (in other words: byte order) which in short means key `'a'` comes before `'b'` and key `'10'` comes before `'2'`. + +Iterators can be consumed with [`for await...of`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) and `iterator.all()`, or by manually calling `iterator.next()` or `nextv()` in succession. In the latter case, `iterator.close()` must always be called. In contrast, finishing, throwing, breaking or returning from a `for await...of` loop automatically calls `iterator.close()`, as does `iterator.all()`. + +An iterator reaches its natural end in the following situations: + +- The end of the database has been reached +- The end of the range has been reached +- The last `iterator.seek()` was out of range. + +An iterator keeps track of calls that are in progress. It doesn't allow concurrent `next()`, `nextv()` or `all()` calls (including a combination thereof) and will throw an error with code [`LEVEL_ITERATOR_BUSY`](#level_iterator_busy) if that happens: + +```js +// Not awaited +iterator.next() + +try { + // Which means next() is still in progress here + iterator.all() +} catch (err) { + console.log(err.code) // 'LEVEL_ITERATOR_BUSY' +} +``` + +#### `for await...of iterator` + +Yields entries, which are arrays containing a `key` and `value`. The type of `key` and `value` depends on the options passed to `db.iterator()`. + +```js +try { + for await (const [key, value] of db.iterator()) { + console.log(key) + } +} catch (err) { + console.error(err) +} +``` + +Note for implementors: this uses `iterator.next()` and `iterator.close()` under the hood so no further method implementations are needed to support `for await...of`. + +#### `iterator.next()` + +Advance to the next entry and yield that entry. Returns a promise for either an entry array (containing a `key` and `value`) or for `undefined` if the iterator reached its natural end. The type of `key` and `value` depends on the options passed to `db.iterator()`. + +**Note:** `iterator.close()` must always be called once there's no intention to call `next()` or `nextv()` again. Even if such calls yielded an error and even if the iterator reached its natural end. Not closing the iterator will result in memory leaks and may also affect performance of other operations if many iterators are unclosed and each is holding a snapshot of the database. + +#### `iterator.nextv(size[, options])` + +Advance repeatedly and get at most `size` amount of entries in a single call. Can be faster than repeated `next()` calls. The `size` argument must be an integer and has a soft minimum of 1. There are no `options` by default but implementations may add theirs. + +Returns a promise for an array of entries, where each entry is an array containing a key and value. The natural end of the iterator will be signaled by yielding an empty array. + +```js +const iterator = db.iterator() + +while (true) { + const entries = await iterator.nextv(100) + + if (entries.length === 0) { + break + } + + for (const [key, value] of entries) { + // .. + } +} + +await iterator.close() +``` + +#### `iterator.all([options])` + +Advance repeatedly and get all (remaining) entries as an array, automatically closing the iterator. Assumes that those entries fit in memory. If that's not the case, instead use `next()`, `nextv()` or `for await...of`. There are no `options` by default but implementations may add theirs. Returns a promise for an array of entries, where each entry is an array containing a key and value. + +```js +const entries = await db.iterator({ limit: 100 }).all() + +for (const [key, value] of entries) { + // .. +} +``` + +#### `iterator.seek(target[, options])` + +Seek to the key closest to `target`. This method is synchronous, but the actual work may happen lazily. Subsequent calls to `iterator.next()`, `nextv()` or `all()` (including implicit calls in a `for await...of` loop) will yield entries with keys equal to or larger than `target`, or equal to or smaller than `target` if the `reverse` option passed to `db.iterator()` was true. + +The optional `options` object may contain: + +- `keyEncoding`: custom key encoding, used to encode the `target`. By default the `keyEncoding` option of the iterator is used or (if that wasn't set) the `keyEncoding` of the database. + +If range options like `gt` were passed to `db.iterator()` and `target` does not fall within that range, the iterator will reach its natural end. + +#### `iterator.close()` + +Free up underlying resources. Returns a promise. Closing the iterator is an idempotent operation, such that calling `close()` more than once is allowed and makes no difference. + +If a `next()` ,`nextv()` or `all()` call is in progress, closing will wait for that to finish. After `close()` has been called, further calls to `next()` ,`nextv()` or `all()` will yield an error with code [`LEVEL_ITERATOR_NOT_OPEN`](#level_iterator_not_open). + +#### `iterator.db` + +A reference to the database that created this iterator. + +#### `iterator.count` + +Read-only getter that indicates how many entries have been yielded so far (by any method) excluding calls that errored or yielded `undefined`. + +#### `iterator.limit` + +Read-only getter that reflects the `limit` that was set in options. Greater than or equal to zero. Equals `Infinity` if no limit, which allows for easy math: + +```js +const hasMore = iterator.count < iterator.limit +const remaining = iterator.limit - iterator.count +``` + +#### Aborting Iterators + +Iterators take an experimental `signal` option that, once signaled, aborts an in-progress read operation (if any) and rejects subsequent reads. The relevant promise will be rejected with a [`LEVEL_ABORTED`](#level_aborted) error. Aborting does not close the iterator, because closing is asynchronous and may result in an error that needs a place to go. This means signals should be used together with a pattern that automatically closes the iterator: + +```js +const abortController = new AbortController() +const signal = abortController.signal + +// Will result in 'aborted' log +abortController.abort() + +try { + for await (const entry of db.iterator({ signal })) { + console.log(entry) + } +} catch (err) { + if (err.code === 'LEVEL_ABORTED') { + console.log('aborted') + } +} +``` + +Otherwise, close the iterator explicitly: + +```js +const iterator = db.iterator({ signal }) + +try { + const entries = await iterator.nextv(10) +} catch (err) { + if (err.code === 'LEVEL_ABORTED') { + console.log('aborted') + } +} finally { + await iterator.close() +} +``` + +Support of signals is indicated via [`db.supports.signals.iterators`](https://github.com/Level/supports#signals-object). + +### `keyIterator` + +A key iterator has the same interface as `iterator` except that its methods yield keys instead of entries. Usage is otherwise the same. + +### `valueIterator` + +A value iterator has the same interface as `iterator` except that its methods yield values instead of entries. Usage is otherwise the same. + +### `chainedBatch` + +#### `chainedBatch.put(key, value[, options])` + +Add a `put` operation to this chained batch, not committed until `write()` is called. This will throw a [`LEVEL_INVALID_KEY`](#level_invalid_key) or [`LEVEL_INVALID_VALUE`](#level_invalid_value) error if `key` or `value` is invalid. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. +- `valueEncoding`: custom value encoding for this operation, used to encode the `value`. +- `sublevel` (sublevel instance): act as though the `put` operation is performed on the given sublevel, to similar effect as `sublevel.batch().put(key, value)`. This allows atomically committing data to multiple sublevels. The given `sublevel` must have the same _root_ (i.e. top-most) database as `chainedBatch.db`. The `key` will be prefixed with the prefix of the sublevel, and the `key` and `value` will be encoded by the sublevel (using the default encodings of the sublevel unless `keyEncoding` and / or `valueEncoding` are provided). + +#### `chainedBatch.del(key[, options])` + +Add a `del` operation to this chained batch, not committed until `write()` is called. This will throw a [`LEVEL_INVALID_KEY`](#level_invalid_key) error if `key` is invalid. The optional `options` object may contain: + +- `keyEncoding`: custom key encoding for this operation, used to encode the `key`. +- `sublevel` (sublevel instance): act as though the `del` operation is performed on the given sublevel, to similar effect as `sublevel.batch().del(key)`. This allows atomically committing data to multiple sublevels. The given `sublevel` must have the same _root_ (i.e. top-most) database as `chainedBatch.db`. The `key` will be prefixed with the prefix of the sublevel, and the `key` will be encoded by the sublevel (using the default key encoding of the sublevel unless `keyEncoding` is provided). + +#### `chainedBatch.clear()` + +Remove all operations from this chained batch, so that they will not be committed. + +#### `chainedBatch.write([options])` + +Commit the operations. Returns a promise. All operations will be written atomically, that is, they will either all succeed or fail with no partial commits. + +There are no `options` by default but implementations may add theirs. Note that `write()` does not take encoding options. Those can only be set on `put()` and `del()` because implementations may synchronously forward such calls to an underlying store and thus need keys and values to be encoded at that point. + +After `write()` or `close()` has been called, no further operations are allowed. + +#### `chainedBatch.close()` + +Free up underlying resources. This should be done even if the chained batch has zero operations. Automatically called by `write()` so normally not necessary to call, unless the intent is to discard a chained batch without committing it. Closing the batch is an idempotent operation, such that calling `close()` more than once is allowed and makes no difference. Returns a promise. + +#### `chainedBatch.length` + +The number of operations in this chained batch, including operations that were added by [`prewrite`](#hook--dbhooksprewrite) hook functions if any. + +#### `chainedBatch.db` + +A reference to the database that created this chained batch. + +### `sublevel` + +A sublevel is an instance of the `AbstractSublevel` class, which extends `AbstractLevel` and thus has the same API. Sublevels have a few additional properties and methods. + +#### `sublevel.prefix` + +Prefix of the sublevel. A read-only string property. + +```js +const example = db.sublevel('example') +const nested = example.sublevel('nested') + +console.log(example.prefix) // '!example!' +console.log(nested.prefix) // '!example!!nested!' +``` + +#### `sublevel.parent` + +Parent database. A read-only property. + +```js +const example = db.sublevel('example') +const nested = example.sublevel('nested') + +console.log(example.parent === db) // true +console.log(nested.parent === example) // true +``` + +#### `sublevel.db` + +Root database. A read-only property. + +```js +const example = db.sublevel('example') +const nested = example.sublevel('nested') + +console.log(example.db === db) // true +console.log(nested.db === db) // true +``` + +#### `sublevel.path([local])` + +Get the path of this sublevel, which is its prefix without separators. If `local` is true, exclude path of parent database. If false (the default) then recurse to form a fully-qualified path that travels from the root database to this sublevel. + +```js +const example = db.sublevel('example') +const nested = example.sublevel('nested') +const foo = db.sublevel(['example', 'nested', 'foo']) + +// Get global or local path +console.log(nested.path()) // ['example', 'nested'] +console.log(nested.path(true)) // ['nested'] + +// Has no intermediary sublevels, so the local option has no effect +console.log(foo.path()) // ['example', 'nested', 'foo'] +console.log(foo.path(true)) // ['example', 'nested', 'foo'] +``` + +### `snapshot` + +#### `snapshot.ref()` + +Increment reference count, to register work that should delay closing until `snapshot.unref()` is called an equal amount of times. The promise that will be returned by `snapshot.close()` will not resolve until the reference count returns to 0. This prevents prematurely closing underlying resources while the snapshot is in use. + +It is normally not necessary to call `snapshot.ref()` and `snapshot.unref()` because builtin database methods automatically do. + +#### `snapshot.unref()` + +Decrement reference count, to indicate that the work has finished. + +#### `snapshot.close()` + +Free up underlying resources. Be sure to call this when the snapshot is no longer needed, because snapshots may cause the database to temporarily pause internal storage optimizations. Returns a promise. Closing the snapshot is an idempotent operation, such that calling `snapshot.close()` more than once is allowed and makes no difference. + +After `snapshot.close()` has been called, no further operations are allowed. For example, `db.get(key, { snapshot })` will throw an error with code [`LEVEL_SNAPSHOT_NOT_OPEN`](#level_snapshot_not_open). + +### Encodings + +Any database method that takes a `key` argument, `value` argument or range options like `gte`, hereby jointly referred to as `data`, runs that `data` through an _encoding_. This means to encode input `data` and decode output `data`. + +[Several encodings](https://github.com/Level/transcoder#built-in-encodings) are builtin courtesy of [`level-transcoder`](https://github.com/Level/transcoder) and can be selected by a short name like `'utf8'` or `'json'`. The default encoding is `'utf8'` which ensures you'll always get back a string. Encodings can be specified for keys and values independently with `keyEncoding` and `valueEncoding` options, either in the database constructor or per method to apply an encoding selectively. For example: + +```js +const db = level('./db', { + keyEncoding: 'view', + valueEncoding: 'json' +}) + +// Use binary keys +const key = Uint8Array.from([1, 2]) + +// Encode the value with JSON +await db.put(key, { x: 2 }) + +// Decode the value with JSON. Yields { x: 2 } +const obj = await db.get(key) + +// Decode the value with utf8. Yields '{"x":2}' +const str = await db.get(key, { valueEncoding: 'utf8' }) +``` + +The `keyEncoding` and `valueEncoding` options accept a string to select a known encoding by its name, or an object to use a custom encoding like [`charwise`](https://github.com/dominictarr/charwise). See [`keyEncoding()`](#encoding--dbkeyencodingencoding) for details. If a custom encoding is passed to the database constructor, subsequent method calls can refer to that encoding by name. Supported encodings are exposed in the `db.supports` manifest: + +```js +const db = level('./db', { + keyEncoding: require('charwise'), + valueEncoding: 'json' +}) + +// Includes builtin and custom encodings +console.log(db.supports.encodings.utf8) // true +console.log(db.supports.encodings.charwise) // true +``` + +An encoding can both widen and limit the range of `data` types. The default `'utf8'` encoding can only store strings. Other types, though accepted, are irreversibly stringified before storage. That includes JavaScript primitives which are converted with [`String(x)`](https://tc39.es/ecma262/multipage/text-processing.html#sec-string-constructor-string-value), Buffer which is converted with [`x.toString('utf8')`](https://nodejs.org/api/buffer.html#buftostringencoding-start-end) and Uint8Array converted with [`TextDecoder#decode(x)`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder/decode). Use other encodings for a richer set of `data` types, as well as binary data without a conversion cost - or loss of non-unicode bytes. + +For binary data two builtin encodings are available: `'buffer'` and `'view'`. They use a Buffer or Uint8Array respectively. To some extent these encodings are interchangeable, as the `'buffer'` encoding also accepts Uint8Array as input `data` (and will convert that to a Buffer without copying the underlying ArrayBuffer), the `'view'` encoding also accepts Buffer as input `data` and so forth. Output `data` will be either a Buffer or Uint8Array respectively and can also be converted: + +```js +const db = level('./db', { valueEncoding: 'view' }) +const buffer = await db.get('example', { valueEncoding: 'buffer' }) +``` + +In browser environments it may be preferable to only use `'view'`. When bundling JavaScript with Webpack, Browserify or other, you can choose not to use the `'buffer'` encoding and (through configuration of the bundler) exclude the [`buffer`](https://github.com/feross/buffer) shim in order to reduce bundle size. + +Regardless of the choice of encoding, a `key` or `value` may not be `null` or `undefined` due to preexisting significance in iterators and streams. No such restriction exists on range options because `null` and `undefined` are significant types in encodings like [`charwise`](https://github.com/dominictarr/charwise) as well as some underlying stores like IndexedDB. Consumers of an `abstract-level` implementation must assume that range options like `{ gt: undefined }` are _not_ the same as `{}`. The [abstract test suite](#test-suite) does not test these types. Whether they are supported or how they sort may differ per implementation. An implementation can choose to: + +- Encode these types to make them meaningful +- Have no defined behavior (moving the concern to a higher level) +- Delegate to an underlying database (moving the concern to a lower level). + +Lastly, one way or another, every implementation _must_ support `data` of type String and _should_ support `data` of type Buffer or Uint8Array. + +### Events + +An `abstract-level` database is an [`EventEmitter`](https://nodejs.org/api/events.html) and emits the events listed below. + +#### `opening` + +Emitted when database is opening. Receives 0 arguments: + +```js +db.once('opening', function () { + console.log('Opening...') +}) +``` + +#### `open` + +Emitted when database has successfully opened. Receives 0 arguments: + +```js +db.once('open', function () { + console.log('Opened!') +}) +``` + +#### `closing` + +Emitted when database is closing. Receives 0 arguments. + +#### `closed` + +Emitted when database has successfully closed. Receives 0 arguments. + +#### `write` + +Emitted when data was successfully written to the database as the result of `db.batch()`, `db.put()` or `db.del()`. Receives a single `operations` argument, which is an array containing normalized operation objects. The array will contain at least one operation object and reflects modifications made (and operations added) by the [`prewrite`](#hook--dbhooksprewrite) hook. Normalized means that every operation object has `keyEncoding` and (if `type` is `'put'`) `valueEncoding` properties and these are always encoding objects, rather than their string names like `'utf8'` or whatever was given in the input. + +Operation objects also include userland options that were provided in the `options` argument of the originating call, for example the `options` in a `db.put(key, value, options)` call: + +```js +db.on('write', function (operations) { + for (const op of operations) { + if (op.type === 'put') { + console.log(op.key, op.value, op.foo) + } + } +}) + +// Put with a userland 'foo' option +await db.put('abc', 'xyz', { foo: true }) +``` + +The `key` and `value` of the operation object match the original input, before having encoded it. To provide access to encoded data, the operation object additionally has `encodedKey` and (if `type` is `'put'`) `encodedValue` properties. Event listeners can inspect [`keyEncoding.format`](https://github.com/Level/transcoder#encodingformat) and `valueEncoding.format` to determine the data type of `encodedKey` and `encodedValue`. + +As an example, given a sublevel created with `users = db.sublevel('users', { valueEncoding: 'json' })`, a call like `users.put('isa', { score: 10 })` will emit a `write` event from the sublevel with an `operations` argument that looks like the following. Note that specifics (in data types and encodings) may differ per database at it depends on which encodings an implementation supports and uses internally. This example assumes that the database uses `'utf8'`. + +```js +[{ + type: 'put', + key: 'isa', + value: { score: 10 }, + keyEncoding: users.keyEncoding('utf8'), + valueEncoding: users.valueEncoding('json'), + encodedKey: 'isa', // No change (was already utf8) + encodedValue: '{"score":10}', // JSON-encoded +}] +``` + +Because sublevels encode and then forward operations to their parent database, a separate `write` event will be emitted from `db` with: + +```js +[{ + type: 'put', + key: '!users!isa', // Prefixed + value: '{"score":10}', // No change + keyEncoding: db.keyEncoding('utf8'), + valueEncoding: db.valueEncoding('utf8'), + encodedKey: '!users!isa', + encodedValue: '{"score":10}' +}] +``` + +Similarly, if a `sublevel` option was provided: + +```js +await db.batch() + .del('isa', { sublevel: users }) + .write() +``` + +We'll get: + +```js +[{ + type: 'del', + key: '!users!isa', // Prefixed + keyEncoding: db.keyEncoding('utf8'), + encodedKey: '!users!isa' +}] +``` + +Lastly, newly added `write` event listeners are only called for subsequently created batches (including chained batches): + +```js +const promise = db.batch([{ type: 'del', key: 'abc' }]) +db.on('write', listener) // Too late +await promise +``` + +For the event listener to be called it must be added earlier: + +```js +db.on('write', listener) +await db.batch([{ type: 'del', key: 'abc' }]) +``` + +The same is true for `db.put()` and `db.del()`. + +#### `clear` + +Emitted when a `db.clear()` call completed and entries were thus successfully deleted from the database. Receives a single `options` argument, which is the verbatim `options` argument that was passed to `db.clear(options)` (or an empty object if none) before having encoded range options. + +### Order Of Operations + +There is no defined order between parallel write operations. Consider: + +```js +await Promise.all([ + db.put('example', 1), + db.put('example', 2) +]) + +const result = await db.get('example') +``` + +The value of `result` could be either `1` or `2`, because the `db.put()` calls are asynchronous and awaited in parallel. Some implementations of `abstract-level` may unintentionally exhibit a "defined" order due to internal details. Implementations are free to change such details at any time, because per the asynchronous `abstract-level` interface that they follow, the order is theoretically random. + +Removing this concern (if necessary) must be done on an application-level. For example, the application could have a queue of operations, or per-key locks, or implement transactions on top of snapshots, or a versioning mechanism in its keyspace, or specialized data types like CRDT, or just say that conflicts are acceptable for that particular application, and so forth. The abundance of examples should explain why `abstract-level` itself doesn't enter this opinionated and application-specific problem space. Each solution has tradeoffs and `abstract-level`, being the core of a modular database, cannot decide which tradeoff to make. + +### Reading From Snapshots + +A snapshot is a lightweight "token" that represents a version of a database at a particular point in time. This allows for reading data without seeing subsequent writes made on the database. It comes in two forms: + +1. Implicit snapshots: created internally by the database and not visible to the outside world. +2. Explicit snapshots: created with `snapshot = db.snapshot()`. Because it acts as a token, `snapshot` has no read methods of its own. Instead the snapshot is to be passed to database methods like `db.get()` and `db.iterator()`. This also works on sublevels. + +Use explicit snapshots wisely, because their lifetime must be managed manually. Implicit snapshots are typically more convenient and possibly more performant because they can handled natively and have their lifetime limited by the surrounding operation. That said, explicit snapshots can be useful to make multiple read operations that require a shared, consistent view of the data. + +Most but not all `abstract-level` implementations support snapshots. They can be divided into three groups. + +#### 1. Implementation does not support snapshots + +As indicated by `db.supports.implicitSnapshots` and `db.supports.explicitSnapshots` being false. In this case, operations read from the latest version of the database. This most notably affects iterators: + +```js +await db.put('example', 'a') +const it = db.iterator() +await db.del('example') +const entries = await it.all() // Likely an empty array +``` + +The `db.supports.implicitSnapshots` property is aliased as `db.supports.snapshots` for backwards compatibility. + +#### 2. Implementation supports implicit snapshots + +As indicated by `db.supports.implicitSnapshots` being true. An iterator, upon creation, will synchronously create a snapshot and subsequently read from that snapshot rather than the latest version of the database. There are no actual numerical versions, but let's say there are in order to clarify the behavior: + +```js +await db.put('example', 'a') // Results in v1 +const it = db.iterator() // Creates snapshot of v1 +await db.del('example') // Results in v2 +const entries = await it.all() // Reads from snapshot and thus v1 +``` + +The `entries` array thus includes the deleted entry, because the snapshot of the iterator represents the database version from before the entry was deleted. + +Other read operations like `db.get()` also use a snapshot. Such calls synchronously create a snapshot and then asynchronously read from it. This means a write operation (to the same key) may not be visible unless awaited: + +```js +await db.put('example', 1) // Awaited +db.put('example', 2) // Not awaited +await db.get('example') // Yields 1 (typically) +``` + +In other words, once a write operation has _finished_ (including having communicated that to the main thread of JavaScript, i.e. by resolving the promise in the above example) subsequent reads are guaranteed to include that data. That's because those reads use a snapshot created in the main thread which is aware of the finished write at this point. Before that point, no guarantee can be given. + +#### 3. Implementation supports explicit snapshots + +As indicated by `db.supports.explicitSnapshots` being true. This is the most precise and flexible way to control the version of the data to read. The previous example can be modified to get a consistent result: + +```js +await db.put('example', 1) +const snapshot = db.snapshot() +db.put('example', 2) +await db.get('example', { snapshot })) // Yields 1 (always) +await snapshot.close() +``` + +The main use case for explicit snapshots is retrieving data from an index. + +```js +// We'll use charwise to encode "compound" keys +const charwise = require('charwise-compact') +const players = db.sublevel('players', { valueEncoding: 'json' }) +const index = db.sublevel('scores', { keyEncoding: charwise }) + +// Write sample data (using an atomic batch so that the index remains in-sync) +await db.batch() + .put('alice', { score: 620 }, { sublevel: players }) + .put([620, 'alice'], '', { sublevel: index }) + .write() + +// Iterate players that have a score higher than 100 +const snapshot = db.snapshot() +const iterator = index.keys({ gt: [100, charwise.HI], snapshot }) + +for await (const key of iterator) { + // Index key is [620, 'alice'] so key[1] gives us 'alice' + const player = await players.get(key[1], { snapshot }) +} + +// Don't forget to close (and try/catch/finally) +await snapshot.close() +``` + +On implementations that support implicit but not explicit snapshots, some of the above can be simulated. In particular, to get multiple entries from a snapshot, one could create an iterator and then repeatedly `seek()` to the desired entries. + +### Hooks + +**Hooks are experimental and subject to change without notice.** + +Hooks allow userland _hook functions_ to customize behavior of the database. Each hook is a different extension point, accessible via `db.hooks`. Some are shared between database methods to encapsulate common behavior. A hook is either synchronous or asynchronous, and functions added to a hook must respect that trait. + +#### `hook = db.hooks.prewrite` + +A synchronous hook for modifying or adding operations to [`db.batch([])`](#dbbatchoperations-options), [`db.batch().put()`](#chainedbatchputkey-value-options), [`db.batch().del()`](#chainedbatchdelkey-options), [`db.put()`](#dbputkey-value-options) and [`db.del()`](#dbdelkey-options) calls. It does not include [`db.clear()`](#dbclearoptions) because the entries deleted by such a call are not communicated back to `db`. + +Functions added to this hook will receive two arguments: `op` and `batch`. + +##### Example + +```js +const charwise = require('charwise-compact') +const books = db.sublevel('books', { valueEncoding: 'json' }) +const index = db.sublevel('authors', { keyEncoding: charwise }) + +books.hooks.prewrite.add(function (op, batch) { + if (op.type === 'put') { + batch.add({ + type: 'put', + key: [op.value.author, op.key], + value: '', + sublevel: index + }) + } +}) + +// Will atomically commit it to the author index as well +await books.put('12', { title: 'Siddhartha', author: 'Hesse' }) +``` + +##### Arguments + +###### `op` (object) + +The `op` argument reflects the input operation and has the following properties: `type`, `key`, `keyEncoding`, an optional `sublevel`, and if `type` is `'put'` then also `value` and `valueEncoding`. It can also include userland options, that were provided either in the input operation object (if it originated from [`db.batch([])`](#db_batchoperations-options)) or in the `options` argument of the originating call, for example the `options` in `db.del(key, options)`. + +The `key` and `value` have not yet been encoded at this point. The `keyEncoding` and `valueEncoding` properties are always encoding objects (rather than encoding names like `'json'`) which means hook functions can call (for example) `op.keyEncoding.encode(123)`. + +Hook functions can modify the `key`, `value`, `keyEncoding` and `valueEncoding` properties, but not `type` or `sublevel`. If a hook function modifies `keyEncoding` or `valueEncoding` it can use either encoding names or encoding objects, which will subsequently be normalized to encoding objects. Hook functions can also add custom properties to `op` which will be visible to other hook functions, the private API of the database and in the [`write`](#write) event. + +###### `batch` (object) + +The `batch` argument of the hook function is an interface to add operations, to be committed in the same batch as the input operation(s). This also works if the originating call was a singular operation like `db.put()` because the presence of one or more hook functions will change `db.put()` and `db.del()` to internally use a batch. For originating calls like [`db.batch([])`](#dbbatchoperations-options) that provide multiple input operations, operations will be added after the last input operation, rather than interleaving. The hook function will not be called for operations that were added by either itself or other hook functions. + +###### `batch = batch.add(op)` + +Add a batch operation, using the same format as the operations that [`db.batch([])`](#dbbatchoperations-options) takes. However, it is assumed that `op` can be freely mutated by `abstract-level`. Unlike input operations it will not be cloned before doing so. The `add` method returns `batch` which allows for chaining, similar to the [chained batch](#chainedbatch) API. + +For hook functions to be generic, it is recommended to explicitly define `keyEncoding` and `valueEncoding` properties on `op` (instead of relying on database defaults) or to use an isolated sublevel with known defaults. + +#### `hook = db.hooks.postopen` + +An asynchronous hook that runs after the database has succesfully opened, but before deferred operations are executed and before events are emitted. It thus allows for additional initialization, including reading and writing data that deferred operations might need. The postopen hook always runs before the prewrite hook. + +Functions added to this hook must return a promise and will receive one argument: `options`. If one of the hook functions yields an error then the database will be closed. In the rare event that closing also fails, which means there's no safe state to return to, the database will enter an internal locked state where `db.status` is `'closed'` and subsequent calls to `db.open()` or `db.close()` will be met with a [`LEVEL_STATUS_LOCKED`](#errors) error. This locked state is also used during the postopen hook itself, meaning hook functions are not allowed to call `db.open()` or `db.close()`. + +##### Example + +```js +db.hooks.postopen.add(async function (options) { + // Can read and write like usual + return db.put('example', 123, { + valueEncoding: 'json' + }) +}) +``` + +##### Arguments + +###### `options` (object) + +The `options` that were provided in the originating [`db.open(options)`](#dbopenoptions) call, merged with constructor options and defaults. Equivalent to what the private API received in [`db._open(options)`](#db_openoptions). + +#### `hook = db.hooks.newsub` + +A synchronous hook that runs when a `AbstractSublevel` instance has been created by [`db.sublevel(options)`](#sublevel--dbsublevelname-options). Functions added to this hook will receive two arguments: `sublevel` and `options`. + +##### Example + +This hook can be useful to hook into a database and any sublevels created on that database. Userland modules that act like plugins might like the following pattern: + +```js +module.exports = function logger (db, options) { + // Recurse so that db.sublevel('foo', opts) will call logger(sublevel, opts) + db.hooks.newsub.add(logger) + + db.hooks.prewrite.add(function (op, batch) { + console.log('writing', { db, op }) + }) +} +``` + +##### Arguments + +###### `sublevel` (object) + +The `AbstractSublevel` instance that was created. + +###### `options` (object) + +The `options` that were provided in the originating `db.sublevel(options)` call, merged with defaults. Equivalent to what the private API received in [`db._sublevel(options)`](#sublevel--db_sublevelname-options). + +#### `hook` + +##### `hook.add(fn)` + +Add the given `fn` function to this hook, if it wasn't already added. + +##### `hook.delete(fn)` + +Remove the given `fn` function from this hook. + +#### Hook Error Handling + +If a hook function throws an error, it will be wrapped in an error with code [`LEVEL_HOOK_ERROR`](#level_hook_error) and abort the originating call: + +```js +try { + await db.put('abc', 123) +} catch (err) { + if (err.code === 'LEVEL_HOOK_ERROR') { + console.log(err.cause) + } +} +``` + +As a result, other hook functions will not be called. + +#### Hooks On Sublevels + +On sublevels and their parent database(s), hooks are triggered in bottom-up order. For example, `db.sublevel('a').sublevel('b').batch(..)` will trigger the `prewrite` hook of sublevel `b`, then the `prewrite` hook of sublevel `a` and then of `db`. Only direct operations on a database will trigger hooks, not when a sublevel is provided as an option. This means `db.batch([{ sublevel, ... }])` will trigger the `prewrite` hook of `db` but not of `sublevel`. These behaviors are symmetrical to [events](#events): `db.batch([{ sublevel, ... }])` will only emit a `write` event from `db` while `db.sublevel(..).batch([{ ... }])` will emit a `write` event from the sublevel and then another from `db` (this time with fully-qualified keys). + +### Shared Access + +Unless documented otherwise, implementations of `abstract-level` do _not_ support accessing a database from multiple processes running in parallel. That includes Node.js clusters and Electron renderer processes. + +See [`Level/awesome`](https://github.com/Level/awesome#shared-access) for modules like [`many-level`](https://github.com/Level/many-level) and [`rave-level`](https://github.com/Level/rave-level) that allow a database to be shared across processes and/or machines. + +### Errors + +Errors thrown by an `abstract-level` database have a `code` property that is an uppercase string. Error codes will not change between major versions, but error messages will. Messages may also differ between implementations; they are free and encouraged to tune messages. + +A database may also throw [`TypeError`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypeError) errors (or other core error constructors in JavaScript) without a `code` and without any guarantee on the stability of error properties - because these errors indicate invalid arguments and other programming mistakes that should not be catched much less have associated logic. + +Error codes will be one of the following. + +#### `LEVEL_DATABASE_NOT_OPEN` + +When an operation was made on a database while it was closing or closed. The error may have a `cause` property that explains a failure to open: + +```js +try { + await db.open() +} catch (err) { + console.error(err.code) // 'LEVEL_DATABASE_NOT_OPEN' + + if (err.cause && err.cause.code === 'LEVEL_LOCKED') { + // Another process or instance has opened the database + } +} +``` + +#### `LEVEL_DATABASE_NOT_CLOSED` + +When a database failed to `close()`. The error may have a `cause` property that explains a failure to close. + +#### `LEVEL_ITERATOR_NOT_OPEN` + +When an operation was made on an iterator while it was closing or closed, which may also be the result of the database being closed. + +#### `LEVEL_ITERATOR_BUSY` + +When `iterator.next()` or `seek()` was called while a previous `next()` call was still in progress. + +#### `LEVEL_BATCH_NOT_OPEN` + +When an operation was made on a chained batch while it was closing or closed, which may also be the result of the database being closed or that `write()` was called on the chained batch. + +#### `LEVEL_SNAPSHOT_NOT_OPEN` + +When an operation was made on a snapshot while it was closing or closed, which may also be the result of the database being closed. + +#### `LEVEL_ABORTED` + +When an operation was aborted by the user. For [web compatibility](https://dom.spec.whatwg.org/#aborting-ongoing-activities) this error can also be identified by its `name` which is `'AbortError'`: + +```js +if (err.name === 'AbortError') { + // Operation was aborted +} +``` + +#### `LEVEL_ENCODING_NOT_FOUND` + +When a `keyEncoding` or `valueEncoding` option specified a named encoding that does not exist. + +#### `LEVEL_ENCODING_NOT_SUPPORTED` + +When a `keyEncoding` or `valueEncoding` option specified an encoding that isn't supported by the database. + +#### `LEVEL_DECODE_ERROR` + +When decoding of keys or values failed. The error _may_ have a [`cause`](https://github.com/tc39/proposal-error-cause) property containing an original error. For example, it might be a [`SyntaxError`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SyntaxError) from an internal [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) call: + +```js +await db.put('key', 'invalid json', { valueEncoding: 'utf8' }) + +try { + const value = await db.get('key', { valueEncoding: 'json' }) +} catch (err) { + console.log(err.code) // 'LEVEL_DECODE_ERROR' + console.log(err.cause) // 'SyntaxError: Unexpected token i in JSON at position 0' +} +``` + +#### `LEVEL_INVALID_KEY` + +When a key is `null`, `undefined` or (if an implementation deems it so) otherwise invalid. + +#### `LEVEL_INVALID_VALUE` + +When a value is `null`, `undefined` or (if an implementation deems it so) otherwise invalid. + +#### `LEVEL_CORRUPTION` + +Data could not be read (from an underlying store) due to a corruption. + +#### `LEVEL_IO_ERROR` + +Data could not be read (from an underlying store) due to an input/output error, for example from the filesystem. + +#### `LEVEL_INVALID_PREFIX` + +When a sublevel prefix contains characters outside of the supported byte range. + +#### `LEVEL_NOT_SUPPORTED` + +When a module needs a certain feature, typically as indicated by `db.supports`, but that feature is not available on a database argument or other. For example, some kind of plugin may depend on snapshots: + +```js +const ModuleError = require('module-error') + +module.exports = function plugin (db) { + if (!db.supports.explicitSnapshots) { + throw new ModuleError('Database must support snapshots', { + code: 'LEVEL_NOT_SUPPORTED' + }) + } + + // .. +} +``` + +#### `LEVEL_LEGACY` + +When a method, option or other property was used that has been removed from the API. + +#### `LEVEL_LOCKED` + +When an attempt was made to open a database that is already open in another process or instance. Used by `classic-level` and other implementations of `abstract-level` that use exclusive locks. + +#### `LEVEL_HOOK_ERROR` + +An error occurred while running a hook function. The error will have a `cause` property set to the original error thrown from the hook function. + +#### `LEVEL_STATUS_LOCKED` + +When `db.open()` or `db.close()` was called while database was locked, as described in the [postopen hook](#hook--dbhookspostopen) documentation. + +#### `LEVEL_READONLY` + +When an attempt was made to write data to a read-only database. Used by `many-level`. + +#### `LEVEL_CONNECTION_LOST` + +When a database relies on a connection to a remote party and that connection has been lost. Used by `many-level`. + +#### `LEVEL_REMOTE_ERROR` + +When a remote party encountered an unexpected condition that it can't reflect with a more specific code. Used by `many-level`. + +## Private API For Implementors + +To implement an `abstract-level` database, extend the [`AbstractLevel`](./abstract-level.js) class and override the private underscored versions of its methods. For example, to implement the public `put()` method, override the private `_put()` method. The same goes for other classes (some of which are optional to override). All classes can be found on the main export of the npm package: + +```js +const { + AbstractLevel, + AbstractSublevel, + AbstractIterator, + AbstractKeyIterator, + AbstractValueIterator, + AbstractChainedBatch, + AbstractSnapshot +} = require('abstract-level') +``` + +Naming-wise, implementations should use a class name in the form of `*Level` (suffixed, for example `MemoryLevel`) and an npm package name in the form of `*-level` (for example `memory-level`). While utilities and plugins should use a package name in the form of `level-*` (prefixed). + +Each of the private methods listed below will receive exactly the number and types of arguments described, regardless of what is passed in through the public API. Public methods provide type checking: if a consumer calls `db.batch(123)` they'll get an error that the first argument must be an array. Optional arguments get sensible defaults: a `db.get(key)` call translates to a `db._get(key, options)` call. + +Where possible, the default private methods are sensible noops that do nothing. For example, `db._open()` will simply resolve its promise on a next tick. Other methods have functional defaults. Each method documents whether implementing it is mandatory. + +When throwing or yielding an error, prefer using a [known error code](#errors). If new codes are required for your implementation and you wish to use the `LEVEL_` prefix for consistency, feel free to open an issue to discuss. We'll likely want to document those codes here. + +### Example + +Let's implement a basic in-memory database: + +```js +const { AbstractLevel } = require('abstract-level') + +class ExampleLevel extends AbstractLevel { + // This in-memory example doesn't have a location argument + constructor (options) { + // Declare supported encodings + const encodings = { utf8: true } + + // Call AbstractLevel constructor + super({ encodings }, options) + + // Create a map to store entries + this._entries = new Map() + } + + async _open (options) { + // Here you would open any necessary resources. + } + + async _put (key, value, options) { + this._entries.set(key, value) + } + + async _get (key, options) { + // Is undefined if not found + return this._entries.get(key) + } + + async _del (key, options) { + this._entries.delete(key) + } +} +``` + +Now we can use our implementation: + +```js +const db = new ExampleLevel() + +await db.put('foo', 'bar') +const value = await db.get('foo') + +console.log(value) // 'bar' +``` + +Although our basic implementation only supports `'utf8'` strings internally, we do get to use [encodings](#encodings) that encode _to_ that. For example, the `'json'` encoding which encodes to `'utf8'`: + +```js +const db = new ExampleLevel({ valueEncoding: 'json' }) +await db.put('foo', { a: 123 }) +const value = await db.get('foo') + +console.log(value) // { a: 123 } +``` + +See [`memory-level`](https://github.com/Level/memory-level) if you are looking for a complete in-memory implementation. The example above notably lacks iterator support and would not pass the [abstract test suite](#test-suite). + +### `db = new AbstractLevel(manifest[, options])` + +The database constructor. Sets the [`status`](#dbstatus) to `'opening'`. Takes a [manifest](https://github.com/Level/supports) object that the constructor will enrich with defaults. At minimum, the manifest must declare which `encodings` are supported in the private API. For example: + +```js +class ExampleLevel extends AbstractLevel { + constructor (location, options) { + const manifest = { + encodings: { buffer: true } + } + + // Call AbstractLevel constructor. + // Location is not handled by AbstractLevel. + super(manifest, options) + } +} +``` + +Both the public and private API of `abstract-level` are encoding-aware. This means that private methods receive `keyEncoding` and `valueEncoding` options too. Implementations don't need to perform encoding or decoding themselves. Rather, the `keyEncoding` and `valueEncoding` options are lower-level encodings that indicate the type of already-encoded input data or the expected type of yet-to-be-decoded output data. They're one of `'buffer'`, `'view'`, `'utf8'` and always strings in the private API. + +If the manifest declared support of `'buffer'`, then `keyEncoding` and `valueEncoding` will always be `'buffer'`. If the manifest declared support of `'utf8'` then `keyEncoding` and `valueEncoding` will be `'utf8'`. + +For example: a call like `await db.put(key, { x: 2 }, { valueEncoding: 'json' })` will encode the `{ x: 2 }` value and might forward it to the private API as `db._put(key, '{"x":2}', { valueEncoding: 'utf8' })`. Same for the key (omitted for brevity). + +The public API will coerce user input as necessary. If the manifest declared support of `'utf8'` then `await db.get(24)` will forward that number key as a string: `db._get('24', { keyEncoding: 'utf8', ... })`. However, this is _not_ true for output: a private API call like `db._get(key, { keyEncoding: 'utf8', valueEncoding: 'utf8' })` _must_ yield a string value. + +All private methods below that take a `key` argument, `value` argument or range option, will receive that data in encoded form. That includes `iterator._seek()` with its `target` argument. So if the manifest declared support of `'buffer'` then `db.iterator({ gt: 2 })` translates into `db._iterator({ gt: Buffer.from('2'), ...options })` and `iterator.seek(128)` translates into `iterator._seek(Buffer.from('128'), options)`. + +The `AbstractLevel` constructor will add other supported encodings to the public manifest. If the private API only supports `'buffer'`, the resulting `db.supports.encodings` will nevertheless be as follows because all other encodings can be transcoded to `'buffer'`: + +```js +{ buffer: true, view: true, utf8: true, json: true, ... } +``` + +Implementations can also declare support of multiple encodings. Keys and values will then be encoded and decoded via the most optimal path. For example, [`classic-level`](https://github.com/Level/classic-level) uses: + +```js +super({ encodings: { buffer: true, utf8: true } }, options) +``` + +This has the benefit that user input needs less conversion steps: if the input is a string then `classic-level` can pass that to its LevelDB binding as-is. Vice versa for output. + +### `db._open(options)` + +Open the database. The `options` object will always have the following properties: `createIfMissing`, `errorIfExists`. When this is called, `db.status` will be `'opening'`. Must return a promise. If opening failed, reject the promise, which will set `db.status` to `'closed'`. Otherwise resolve the promise, which will set `db.status` to `'open'`. The default `_open()` is an async noop. + +### `db._close()` + +Close the database. When this is called, `db.status` will be `'closing'`. Must return a promise. If closing failed, reject the promise, which will reset `db.status` to `'open'`. Otherwise resolve the promise, which will set `db.status` to `'closed'`. If the database was never opened or failed to open then `_close()` will not be called. + +The default `_close()` is an async noop. In native implementations (native addons written in C++ or other) it's recommended to delay closing if any operations are in flight. See [`classic-level`](https://github.com/Level/classic-level) (previously `leveldown`) for an example of this behavior. The JavaScript side in `abstract-level` will prevent _new_ operations before the database is reopened (as explained in constructor documentation above) while the C++ side should prevent closing the database before _existing_ operations have completed. + +### `db._get(key, options)` + +Get a value by `key`. The `options` object will always have the following properties: `keyEncoding` and `valueEncoding`. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise with the value. If the `key` was not found then use `undefined` as value. + +If the database indicates support of snapshots via `db.supports.implicitSnapshots` then `db._get()` must read from a snapshot of the database. That snapshot (or similar mechanism) must be created synchronously when `db._get()` is called, before asynchronously reading the value. This means it should not see the data of write operations that are scheduled immediately after `db._get()`. + +The default `_get()` returns a promise for an `undefined` value. It must be overridden. + +### `db._getMany(keys, options)` + +Get multiple values by an array of `keys`. The `options` object will always have the following properties: `keyEncoding` and `valueEncoding`. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise with an array of values. If a key does not exist, set the relevant value to `undefined`. + +Snapshot behavior of `db._getMany()` must be the same as described for `db._get()` above. + +The default `_getMany()` returns a promise for an array of values that is equal in length to `keys` and is filled with `undefined`. It must be overridden. + +### `db._put(key, value, options)` + +Add a new entry or overwrite an existing entry. The `options` object will always have the following properties: `keyEncoding` and `valueEncoding`. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise, without an argument. + +The default `_put()` returns a resolved promise. It must be overridden. + +### `db._del(key, options)` + +Delete an entry. The `options` object will always have the following properties: `keyEncoding`. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise, without an argument. + +The default `_del()` returns a resolved promise. It must be overridden. + +### `db._batch(operations, options)` + +Perform multiple _put_ and/or _del_ operations in bulk. The `operations` argument is always an array containing a list of operations to be executed sequentially, although as a whole they should be performed as an atomic operation. The `_batch()` method will not be called if the `operations` array is empty. Each operation is guaranteed to have at least `type`, `key` and `keyEncoding` properties. If the type is `put`, the operation will also have `value` and `valueEncoding` properties. There are no default options but `options` will always be an object. + +Must return a promise. If the batch failed, reject the promise. Otherwise resolve the promise, without an argument. + +The public `batch()` method supports encoding options both in the `options` argument and per operation. The private `_batch()` method should only support encoding options per operation, which are guaranteed to be set and to be normalized (the `options` argument in the private API might also contain encoding options but only because it's cheaper to not remove them). + +The default `_batch()` returns a resolved promise. It must be overridden. + +### `db._chainedBatch()` + +The default `_chainedBatch()` returns a functional `AbstractChainedBatch` instance that uses `db._batch(array, options)` under the hood. To implement chained batch in an optimized manner, extend `AbstractChainedBatch` and return an instance of this class in the `_chainedBatch()` method: + +```js +const { AbstractChainedBatch } = require('abstract-level') + +class ExampleChainedBatch extends AbstractChainedBatch { + constructor (db) { + super(db) + } +} + +class ExampleLevel extends AbstractLevel { + _chainedBatch () { + return new ExampleChainedBatch(this) + } +} +``` + +### `db._iterator(options)` + +The default `_iterator()` returns a noop `AbstractIterator` instance. It must be overridden, by extending `AbstractIterator` and returning an instance of this class in the `_iterator(options)` method: + +```js +const { AbstractIterator } = require('abstract-level') + +class ExampleIterator extends AbstractIterator { + constructor (db, options) { + super(db, options) + } + + // .. +} + +class ExampleLevel extends AbstractLevel { + _iterator (options) { + return new ExampleIterator(this, options) + } +} +``` + +The `options` object will always have the following properties: `reverse`, `keys`, `values`, `limit`, `keyEncoding` and `valueEncoding`. The `limit` will always be an integer, greater than or equal to `-1` and less than `Infinity`. If the user passed range options to `db.iterator()`, those will be encoded and set in `options`. + +### `db._keys(options)` + +The default `_keys()` returns a functional iterator that wraps `db._iterator()` in order to map entries to keys. For optimal performance it can be overridden by extending `AbstractKeyIterator`: + +```js +const { AbstractKeyIterator } = require('abstract-level') + +class ExampleKeyIterator extends AbstractKeyIterator { + constructor (db, options) { + super(db, options) + } + + // .. +} + +class ExampleLevel extends AbstractLevel { + _keys (options) { + return new ExampleKeyIterator(this, options) + } +} +``` + +The `options` object will always have the following properties: `reverse`, `limit` and `keyEncoding`. The `limit` will always be an integer, greater than or equal to `-1` and less than `Infinity`. If the user passed range options to `db.keys()`, those will be encoded and set in `options`. + +### `db._values(options)` + +The default `_values()` returns a functional iterator that wraps `db._iterator()` in order to map entries to values. For optimal performance it can be overridden by extending `AbstractValueIterator`: + +```js +const { AbstractValueIterator } = require('abstract-level') + +class ExampleValueIterator extends AbstractValueIterator { + constructor (db, options) { + super(db, options) + } + + // .. +} + +class ExampleLevel extends AbstractLevel { + _values (options) { + return new ExampleValueIterator(this, options) + } +} +``` + +The `options` object will always have the following properties: `reverse`, `limit`, `keyEncoding` and `valueEncoding`. The `limit` will always be an integer, greater than or equal to -1 and less than Infinity. If the user passed range options to `db.values()`, those will be encoded and set in `options`. + +### `db._clear(options)` + +Delete all entries or a range. Does not have to be atomic. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise, without an argument. It is recommended (and possibly mandatory in the future) to operate on a snapshot so that writes scheduled after a call to `clear()` will not be affected. + +Implementations that wrap another database can typically forward the `_clear()` call to that database, having transformed range options if necessary. + +The `options` object will always have the following properties: `reverse`, `limit` and `keyEncoding`. If the user passed range options to `db.clear()`, those will be encoded and set in `options`. + +### `sublevel = db._sublevel(name, options)` + +Create a [sublevel](#sublevel). The `options` object will always have the following properties: `separator`. The default `_sublevel()` returns a new instance of the [`AbstractSublevel`](./lib/abstract-sublevel.js) class. Overriding is optional. The `AbstractSublevel` can be extended in order to add additional methods to sublevels: + +```js +const { AbstractLevel, AbstractSublevel } = require('abstract-level') + +class ExampleLevel extends AbstractLevel { + _sublevel (name, options) { + return new ExampleSublevel(this, name, options) + } +} + +// For brevity this does not handle deferred open +class ExampleSublevel extends AbstractSublevel { + example (key, options) { + // Encode and prefix the key + const keyEncoding = this.keyEncoding(options.keyEncoding) + const keyFormat = keyEncoding.format + + key = this.prefixKey(keyEncoding.encode(key), keyFormat, true) + + // The parent database can be accessed like so. Make sure + // to forward encoding options and use the full key. + this.parent.del(key, { keyEncoding: keyFormat }, ...) + } +} +``` + +### `snapshot = db._snapshot(options)` + +Create a snapshot. The `options` argument is guaranteed to be an object. There are currently no options but implementations may add their own. + +The default `_snapshot()` throws a [`LEVEL_NOT_SUPPORTED`](#level_not_supported) error. To implement this method, extend `AbstractSnapshot`, return an instance of this class in an overridden `_snapshot()` method and set `manifest.explicitSnapshots` to `true`: + +```js +const { AbstractSnapshot } = require('abstract-level') + +class ExampleSnapshot extends AbstractSnapshot { + constructor (options) { + super(options) + } +} + +class ExampleLevel extends AbstractLevel { + constructor (/* ..., */ options) { + const manifest = { + explicitSnapshots: true, + // ... + } + + super(manifest, options) + } + + _snapshot (options) { + return new ExampleSnapshot(options) + } +} +``` + +The snapshot of the underlying database (or other mechanisms to achieve the same effect) must be created synchronously, such that a call like `db.put()` made immediately after `db._snapshot()` will not affect the snapshot. As for previous write operations that are still in progress at the time that `db._snapshot()` is called: `db._snapshot()` does not have to (and should not) wait for such operations. Solving inconsistencies that may arise from this behavior is an application-level concern. To be clear, if the application awaits the write operations before calling `db.snapshot()` then the snapshot does need to reflect (include) those operations. + +### `iterator = new AbstractIterator(db, options)` + +The first argument to this constructor must be an instance of the relevant `AbstractLevel` implementation. The constructor will set `iterator.db` which is used (among other things) to access encodings and ensures that `db` will not be garbage collected in case there are no other references to it. The `options` argument must be the original `options` object that was passed to `db._iterator()` and it is therefore not (publicly) possible to create an iterator via constructors alone. + +The `signal` option, if any and once signaled, should abort an in-progress `_next()`, `_nextv()` or `_all()` call and reject the promise returned by that call with a [`LEVEL_ABORTED`](#level_aborted) error. Doing so is optional until a future semver-major release. Responsibilities are divided as follows: + +1. Before a database has finished opening, `abstract-level` handles the signal +2. While a call is in progress, the implementation handles the signal +3. Once the signal is aborted, `abstract-level` rejects further calls. + +A method like `_next()` therefore doesn't have to check the signal _before_ it start its asynchronous work, only _during_ that work. If supported, set `db.supports.signals.iterators` to `true` (via the manifest passed to the database constructor) which also enables relevant tests in the [test suite](#test-suite). + +#### `iterator._next()` + +Advance to the next entry and yield that entry. Must return a promise. If an error occurs, reject the promise. If the natural end of the iterator has been reached, resolve the promise with `undefined`. Otherwise resolve the promise with an array containing a `key` and `value`. If a `limit` was set and the iterator already yielded that many entries (via any of the methods) then `_next()` will not be called. + +The default `_next()` returns a promise for `undefined`. It must be overridden. + +#### `iterator._nextv(size, options)` + +Advance repeatedly and get at most `size` amount of entries in a single call. The `size` argument will always be an integer greater than 0. If a `limit` was set then `size` will be at most `limit - iterator.count`. If a `limit` was set and the iterator already yielded that many entries (via any of the methods) then `_nextv()` will not be called. There are no default options but `options` will always be an object. + +Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise with an array of entries. An empty array signifies the natural end of the iterator, so yield an array with at least one entry if the end has not been reached yet. + +The default `_nextv()` is a functional default that makes repeated calls to `_next()` and should be overridden for better performance. + +#### `iterator._all(options)` + +Advance repeatedly and get all (remaining) entries as an array. If a `limit` was set and the iterator already yielded that many entries (via any of the methods) then `_all()` will not be called. Do not call `close()` here because `all()` will do so (regardless of any error) and this may become an opt-out behavior in the future. There are no default options but `options` will always be an object. + +Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise with an array of entries. + +The default `_all()` is a functional default that makes repeated calls to `_nextv()` and should be overridden for better performance. + +#### `iterator._seek(target, options)` + +Seek to the key closest to `target`. The `options` object will always have the following properties: `keyEncoding`. The default `_seek()` will throw an error with code [`LEVEL_NOT_SUPPORTED`](#errors) and must be overridden. + +#### `iterator._close()` + +Free up underlying resources. This method is guaranteed to only be called once. Must return a promise. + +The default `_close()` returns a resolved promise. Overriding is optional. + +### `keyIterator = AbstractKeyIterator(db, options)` + +A key iterator has the same interface and constructor arguments as `AbstractIterator` except that it must yields keys instead of entries. The same goes for value iterators: + +```js +class ExampleKeyIterator extends AbstractKeyIterator { + async _next () { + return 'example-key' + } +} + +class ExampleValueIterator extends AbstractValueIterator { + async _next () { + return 'example-value' + } +} +``` + +The `options` argument must be the original `options` object that was passed to `db._keys()` and it is therefore not (publicly) possible to create a key iterator via constructors alone. The same goes for value iterators via `db._values()`. + +**Note:** the `AbstractKeyIterator` and `AbstractValueIterator` classes do _not_ extend the `AbstractIterator` class. Similarly, if your implementation overrides `db._keys()` returning a custom subclass of `AbstractKeyIterator`, then that subclass must implement methods like `_next()` separately from your subclass of `AbstractIterator`. + +### `valueIterator = AbstractValueIterator(db, options)` + +A value iterator has the same interface and constructor arguments as `AbstractIterator` except that it must yields values instead of entries. For further details, see `keyIterator` above. + +### `chainedBatch = new AbstractChainedBatch(db, options)` + +The first argument to this constructor must be an instance of the relevant `AbstractLevel` implementation. The constructor will set `chainedBatch.db` which is used (among other things) to access encodings and ensures that `db` will not be garbage collected in case there are no other references to it. + +There are two ways to implement a chained batch. If `options.add` is true, only `_add()` will be called. If `options.add` is false or not provided, only `_put()` and `_del()` will be called. + +#### `chainedBatch._add(op)` + +Add a `put` or `del` operation. The `op` object will always have the following properties: `type`, `key`, `keyEncoding` and (if `type` is `'put'`) `value` and `valueEncoding`. + +#### `chainedBatch._put(key, value, options)` + +Add a `put` operation. The `options` object will always have the following properties: `keyEncoding` and `valueEncoding`. + +#### `chainedBatch._del(key, options)` + +Add a `del` operation. The `options` object will always have the following properties: `keyEncoding`. + +#### `chainedBatch._clear()` + +Remove all operations from this batch. + +#### `chainedBatch._write(options)` + +The default `_write()` method uses `db._batch()`. If `_write()` is overridden it must atomically commit the operations. There are no default options but `options` will always be an object. Must return a promise. If an error occurs, reject the promise. Otherwise resolve the promise, without an argument. The `_write()` method will not be called if the chained batch contains zero operations. + +#### `chainedBatch._close()` + +Free up underlying resources. This method is guaranteed to only be called once. Must return a promise. + +The default `_close()` returns a resolved promise. Overriding is optional. + +### `snapshot = new AbstractSnapshot(db)` + +The first argument to this constructor must be an instance of the relevant `AbstractLevel` implementation. + +#### `snapshot._close()` + +Free up underlying resources. This method is guaranteed to only be called once and will not be called while read operations like `db._get()` are inflight. Must return a promise. + +The default `_close()` returns a resolved promise. Overriding is optional. + +## Test Suite + +To prove that your implementation is `abstract-level` compliant, include the abstract test suite in your `test.js` (or similar): + +```js +const test = require('tape') +const suite = require('abstract-level/test') +const ExampleLevel = require('.') + +suite({ + test, + factory (options) { + return new ExampleLevel(options) + } +}) +``` + +The `test` option _must_ be a function that is API-compatible with [`tape`](https://github.com/substack/tape). The `factory` option _must_ be a function that returns a unique and isolated instance of your implementation. The factory will be called many times by the test suite. + +If your implementation is disk-based we recommend using [`tempy`](https://github.com/sindresorhus/tempy) (or similar) to create unique temporary directories. Your setup could look something like: + +```js +const test = require('tape') +const tempy = require('tempy') +const suite = require('abstract-level/test') +const ExampleLevel = require('.') + +suite({ + test, + factory (options) { + return new ExampleLevel(tempy.directory(), options) + } +}) +``` + +### Excluding tests + +As not every implementation can be fully compliant due to limitations of its underlying storage, some tests may be skipped. This must be done via `db.supports` which is set via the constructor. For example, to skip tests of implicit snapshots: + +```js +const { AbstractLevel } = require('abstract-level') + +class ExampleLevel extends AbstractLevel { + constructor (location, options) { + super({ implicitSnapshots: false }, options) + } +} +``` + +This also serves as a signal to users of your implementation. + +### Reusing `testCommon` + +The input to the test suite is a `testCommon` object. Should you need to reuse `testCommon` for your own (additional) tests, use the included utility to create a `testCommon` with defaults: + +```js +const test = require('tape') +const suite = require('abstract-level/test') +const ExampleLevel = require('.') + +const testCommon = suite.common({ + test, + factory (options) { + return new ExampleLevel(options) + } +}) + +suite(testCommon) +``` + +The `testCommon` object will have the `test` and `factory` properties described above, as well as a convenience `supports` property that is lazily copied from a `factory().supports`. You might use it like so: + +```js +test('custom test', function (t) { + const db = testCommon.factory() + // .. +}) + +testCommon.supports.explicitSnapshots && test('another test', function (t) { + const db = testCommon.factory() + // .. +}) +``` + +## Spread The Word + +If you'd like to share your awesome implementation with the world, here's what you might want to do: + +- Add an awesome badge to your `README`: `![level badge](https://leveljs.org/img/badge.svg)` +- Publish your awesome module to [npm](https://npmjs.org) +- Send a Pull Request to [Level/awesome](https://github.com/Level/awesome) to advertise your work! + +## Contributing + +[`Level/abstract-level`](https://github.com/Level/abstract-level) is an **OPEN Open Source Project**. This means that: + +> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. + +See the [Contribution Guide](https://github.com/Level/community/blob/master/CONTRIBUTING.md) for more details. + +## Donate + +Support us with a monthly donation on [Open Collective](https://opencollective.com/level) and help us continue our work. + +## License + +[MIT](LICENSE) + +[level-badge]: https://leveljs.org/img/badge.svg diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 0000000..8443b21 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,669 @@ +# Upgrade Guide + +This document describes breaking changes and how to upgrade. For a complete list of changes including minor and patch releases, please refer to the [changelog](CHANGELOG.md). + +## 3.0.0 + +This release drops supports of Node.js 16. It also started using new JavaScript language features ([`1fdb362`](https://github.com/Level/abstract-level/commit/1fdb362)) which are supported by all target environments of `abstract-level` but may require additional configuration of JavaScript bundlers, for example if `browserify` is used. Third, the `put`, `del` & `batch` events (which were deprecated in `abstract-level` 2.0.0) have been removed in favor of the `write` event. + +On to the good news. We have some exciting new features! To start we have "explicit snapshots" which allow you to read previous versions of a database. This will be supported in at least `classic-level` and `memory-level` (see [Level/community#118](https://github.com/Level/community/issues/118)). Here's an example: + +```js +await db.put('example', 'before') +const snapshot = db.snapshot() +await db.put('example', 'after') +await db.get('example', { snapshot })) // Returns 'before' +await snapshot.close() +``` + +In TypeScript (5.2) that last `close()` call can be skipped because we added support of [`Symbol.asyncDispose`](https://github.com/tc39/proposal-explicit-resource-management) on databases, iterators and snapshots: + +```ts +await db.put('example', 'before') +await using snapshot = db.snapshot() +await db.put('example', 'after') +await db.get('example', { snapshot })) // Returns 'before' +``` + +Lastly, we added `has()` and `hasMany()` methods to check if keys exist without the cost of fetching values: + +```js +if (await db.has('fruit')) { + console.log('We have fruit') +} +``` + +Support of this feature is tracked in [Level/community#142](https://github.com/Level/community/issues/142). + +## 2.0.0 + +**This release adds [hooks](./README.md#hooks) and drops callbacks, not-found errors and support of Node.js < 16. The guide for this release consists of two sections. One for the public API, relevant to all consumers of `abstract-level` and implementations thereof (`level`, `classic-level`, `memory-level` et cetera) and another for the private API that only implementors should have to read.** + +If you're upgrading from `levelup`, `abstract-leveldown` or other old modules, it's recommended to first upgrade to `abstract-level` 1.x because that version includes compatibility checks that have since been removed. + +### 1. Public API + +#### 1.1. Callbacks have been removed + +All methods that previously (also) accepted a callback now only support promises. If you were already using promises then nothing changed, except for subtle timing differences and improved performance. If you were not yet using promises, migrating should be relatively straightforward because nearly all callbacks had just two arguments (an error and a result) thus making promise function signatures predictable. The only method that had a callback with more than two arguments was `iterator.next()`. If you previously did: + +```js +iterator.next(function (err, key, value) { + // .. +}) +``` + +You must now do: + +```js +const [ key, value ] = await iterator.next() +``` + +Or switch to async iterators: + +```js +for await (const [key, value] of iterator) { + // .. +} +``` + +The deprecated `iterator.end()` alias of `iterator.close()` has been removed. + +#### 1.2. Not found + +The `db.get()` method now yields `undefined` instead of an error for non-existing entries. If you previously did: + +```js +try { + await db.get('example') +} catch (err) { + if (err.code === 'LEVEL_NOT_FOUND') { + console.log('Not found') + } +} +``` + +You must now do: + +```js +const value = await db.get('example') + +if (value === undefined) { + console.log('Not found') +} +``` + +The same applies to equivalent and older `if (err.notFound)` code in the style of `levelup`. + +#### 1.3. Not ready + +The `ready` alias of the `open` event has been removed. If you previously did: + +```js +db.once('ready', function () { + // .. +}) +``` + +You must now do: + +```js +db.once('open', function () { + // .. +}) +``` + +Although, old code that uses these events would likely be better off using `db.open()` because synchronous events don't mix well with `async/await`. You could instead do: + +```js +await db.open({ passive: true }) +await db.get('example') +``` + +Or simply: + +```js +await db.get('example') +``` + +#### 1.4. Slower nested sublevels + +The internals of nested sublevels have been refactored for the benefit of [hooks](./README.md#hooks). Nested sublevels, no matter their depth, were previously all connected to the same parent database rather than forming a tree. In the following example, the `colorIndex` sublevel would previously forward its operations directly to `db`: + +```js +const indexes = db.sublevel('idx') +const colorIndex = indexes.sublevel('colors') +``` + +It will now forward its operations to `indexes`, which in turn forwards them to `db`. At each step, hooks and events are available to transform and react to data from a different perspective. Which comes at a (typically small) performance cost that increases with further nested sublevels. + +To optionally negate that cost, a new feature has been added to `db.sublevel(name)`: it now also accepts a `name` that is an array. If the `indexes` sublevel is only used to organize keys and not directly interfaced with, operations on `colorIndex` can be made faster by skipping `indexes`: + +```js +const colorIndex = db.sublevel(['idx', 'colors']) +``` + +#### 1.5. Open before creating a chained batch + +It is no longer possible to create a chained batch while the database is opening. If you previously did: + +```js +const db = new ExampleLevel() + +const batch = db.batch().del('example') +await batch.write() +``` + +You must now do: + +```js +const db = new ExampleLevel() +await db.open() + +const batch = db.batch().del('example') +await batch.write() +``` + +Alternatively: + +```js +const db = new ExampleLevel() +await db.batch([{ type: 'del', key: 'example' }]) +``` + +As for why that last example works yet the same is not supported on a chained batch: the `put()`, `del()` and `clear()` methods of a chained batch are synchronous. This meant `abstract-level` (and `levelup` before it) had to jump through several hoops to make it work while the database is opening. Having such logic internally is fine, but the problem extended to the new [hooks](./README.md#hooks) feature and more specifically, the `prewrite` hook that runs on `put()` and `del()`. + +### 2. Private API + +#### 2.1. Promises all the way + +All private methods that previously took a callback now use a promise. For example, the function signature `_get(key, options, callback)` has changed to `async _get(key, options)`. Same as in the public API, the new function signatures are predictable and the only method that requires special attention is `iterator._next()`. For details, please see the updated [README](./README.md#private-api-for-implementors). + +#### 2.2. Ticks + +Internal use of `process.nextTick` has been replaced with [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask) (which was already used in browsers) and the [polyfill](https://github.com/feross/queue-microtask) for `queueMicrotask` (for older browsers) has been removed. The `db.nextTick` utility has been removed as well. These utilities are typically not even needed anymore, thanks to the use of promises. If you previously did: + +```js +class ExampleLevel extends AbstractLevel { + _get (key, options, callback) { + process.nextTick(callback, null, 'abc') + } + + customMethod () { + this.nextTick(() => { + // .. + }) + } +} +``` + +You must now do: + +```js +class ExampleLevel extends AbstractLevel { + async _get (key, options) { + return 'abc' + } + + customMethod () { + queueMicrotask(() => { + // .. + }) + } +} +``` + +#### 2.3. A new way to abort iterator work + +Iterators now take an experimental `signal` option that is an [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal). You can use the `signal` to abort an in-progress `_next()`, `_nextv()` or `_all()` call. Doing so is optional until a future semver-major release. + +#### 2.4. Snapshots must be synchronous + +If an implementations indicates support of snapshots via `db.supports.snapshots` then the `db._get()` and `db._getMany()` methods are now required to synchronously create their snapshot, rather than asynchronously. For details, please see the [README](./README.md#db_getkey-options). This is a documentation-only change because the abstract test suite cannot verify it. + +## 1.0.0 + +**Introducing `abstract-level`: a fork of [`abstract-leveldown`](https://github.com/Level/abstract-leveldown) that removes the need for [`levelup`](https://github.com/Level/levelup), [`encoding-down`](https://github.com/Level/encoding-down) and more. An `abstract-level` database is a complete solution that doesn't need to be wrapped. It has the same API as `level(up)` including encodings, promises and events. In addition, implementations can now choose to use Uint8Array instead of Buffer. Consumers of an implementation can use both. Sublevels are builtin.** + +We've put together several upgrade guides for different modules. See the [FAQ](https://github.com/Level/community#faq) to find the best upgrade guide for you. This upgrade guide describes how to replace `abstract-leveldown` with `abstract-level`. Implementations that do so, can no longer be wrapped with `levelup`. + +The npm package name is `abstract-level` and the main export is called `AbstractLevel` rather than `AbstractLevelDOWN`. It started using classes. Support of Node.js 10 has been dropped. + +For most folks, a database that upgraded from `abstract-leveldown` to `abstract-level` can be a drop-in replacement for a `level(up)` database (with the exception of stream methods). Let's start this guide there: all methods have been enhanced to reach API parity with `levelup` and `level`. + +### 1. API parity with `levelup` + +#### 1.1. New: promises + +Methods that take a callback now also support promises. They return a promise if no callback is provided, the same as `levelup`. Implementations that override public (non-underscored) methods _must_ do the same and any implementation _should_ do the same for additional methods if any. + +#### 1.2. New: events + +An `abstract-level` database emits the same events as `levelup` would. + +#### 1.3. New: idempotent open + +Opening and closing a database is idempotent and safe, similar to `levelup` but more precise. If `open()` and `close()` are called repeatedly, the last call dictates the final status. Callbacks are not called (or promises not resolved) until any pending state changes are done. Same for events. Unlike on `levelup` it is safe to call `open()` while status is `'closing'`: the database will wait for closing to complete and then reopen. None of these changes are likely to constitute a breaking change; they increase state consistency in edge cases. + +The `open()` method has a new option called `passive`. If set to `true` the call will wait for, but not initiate, opening of the database. To similar effect as `db.once('open', callback)` with added benefit that it also works if the database is already open. Implementations that wrap another database can use the `passive` option to open themselves without taking full control of the database that they wrap. + +#### 1.4. New: deferred open + +Deferred open is built-in. This means a database opens itself a tick after its constructor returns (unless `open()` was called manually). Any operations made until opening has completed are queued up in memory. When opening completes the operations are replayed. If opening failed (and this is a new behavior compared to `levelup`) the operations will yield errors. The `AbstractLevel` class has a new `defer()` method for an implementation to defer custom operations. + +The initial `status` of a database is `'opening'` rather than `'new'`, which no longer exists. Wrapping a database with [`deferred-leveldown`](https://github.com/Level/deferred-leveldown) is not supported and will exhibit undefined behavior. + +Implementations must also accept options for `open()` in their constructor, which was previously done by `levelup`. For example, usage of the [`classic-level`](https://github.com/Level/classic-level) implementation is as follows: + +```js +const db = new ClassicLevel('./db', { + createIfMissing: false, + compression: false +}) +``` + +This works by first forwarding options to the `AbstractLevel` constructor, which in turn forwards them to `open(options)`. If `open(options)` is called manually those options will be shallowly merged with options from the constructor: + +```js +// Results in { createIfMissing: false, compression: true } +await db.open({ compression: true }) +``` + +A database is not "patch-safe". If some form of plugin monkey-patches a database like in the following example, it must now also take the responsibility of deferring the operation (as well as handling promises and callbacks) using `db.defer()`. I.e. this example is incomplete: + +```js +function plugin (db) { + const original = db.get + + db.get = function (...args) { + original.call(this, ...args) + } +} +``` + +#### 1.5. No constructor callback + +The database constructor does not take a callback argument, unlike `levelup`. This goes for `abstract-level` as well as implementations - which is to say, implementors don't have to (and should not) support this old pattern. + +Instead call `db.open()` if you wish to wait for opening (which is not necessary to use the database) or to capture an error. If that's your reason for using the callback and you previously initialized a database like so (simplified): + +```js +levelup(function (err, db) { + // .. +}) +``` + +You must now do: + +```js +db.open(function (err) { + // .. +}) +``` + +Or using promises: + +```js +await db.open() +``` + +#### 1.6. New: state checks + +On any operation, an `abstract-level` database checks if it's open. If not, it will either throw an error (if the relevant API is synchronous) or asynchronously yield an error. For example: + +```js +await db.close() + +try { + db.iterator() +} catch (err) { + console.log(err.code) // LEVEL_DATABASE_NOT_OPEN +} +``` + +_Errors now have a `code` property. More on that below\._ + +This may be a breaking change downstream because it changes error messages for implementations that had their own safety checks (which will now be ineffective because `abstract-level` checks are performed first) or implicitly relied on `levelup` checks. By safety we mean mainly that yielding a JavaScript error is preferred over segmentation faults, though non-native implementations also benefit from detecting incorrect usage. + +Implementations that have additional methods should add or align their own safety checks for consistency. Like so: + +
+Click to expand + +```js +const ModuleError = require('module-error') + +class ExampleLevel extends AbstractLevel { + // For brevity this example does not implement promises or encodings + approximateSize (start, end, callback) { + if (this.status === 'opening') { + this.defer(() => this.approximateSize(start, end, callback)) + } else if (this.status !== 'open') { + this.nextTick(callback, new ModuleError('Database is not open', { + code: 'LEVEL_DATABASE_NOT_OPEN' + })) + } else { + // .. + } + } +} +``` + +
+ +#### 1.7. New: chained batch length + +The `AbstractChainedBatch` prototype has a new `length` property that, like a chained batch in `levelup`, returns the number of queued operations in the batch. Implementations should not have to make changes for this unless they monkey-patched public methods of `AbstractChainedBatch`. + +### 2. API parity with `level` + +It was previously necessary to use [`level`](https://github.com/Level/level) to get the "full experience". Or similar modules like [`level-mem`](https://github.com/Level/mem), [`level-rocksdb`](https://github.com/Level/level-rocksdb) and more. These modules combined an `abstract-leveldown` implementation with [`encoding-down`](https://github.com/Level/encoding-down) and [`levelup`](https://github.com/Level/levelup). Encodings are now built-in to `abstract-level`, using [`level-transcoder`](https://github.com/Level/transcoder) rather than [`level-codec`](https://github.com/Level/codec). The main change is that logic from the existing public API has been expanded down into the storage layer. + +The `level` module still has a place, for its support of both Node.js and browsers and for being the main entrypoint into the Level ecosystem. The next major version of `level`, that's v8.0.0, will likely simply export [`classic-level`](https://github.com/Level/classic-level) in Node.js and [`browser-level`](https://github.com/Level/browser-level) in browsers. To differentiate, the text below will refer to the old version as `level@7`. + +#### 2.1. For consumers + +All relevant methods including the database constructor now accept `keyEncoding` and `valueEncoding` options, the same as `level@7`. Read operations now yield strings rather than buffers by default, having the same default `'utf8'` encoding as `level@7` and friends. + +There are a few differences from `level@7` and `encoding-down`. Some breaking: + +- The lesser-used `'ascii'`, `'ucs2'` and `'utf16le'` encodings are not supported +- The `'id'` encoding, which was not supported by any active `abstract-leveldown` implementation and aliased as `'none'`, has been removed +- The undocumented `encoding` option (as an alias for `valueEncoding`) is not supported. + +And some non-breaking: + +- The `'binary'` encoding has been renamed to `'buffer'`, with `'binary'` as an alias +- The `'utf8'` encoding previously did not touch Buffers. Now it will call `buffer.toString('utf8')` for consistency. Consumers can use the `'buffer'` encoding to avoid this conversion. + +If you previously did one of the following (on a database that's defaulting to the `'utf8'` encoding): + +```js +await db.put('a', Buffer.from('x')) +await db.put('a', Buffer.from('x'), { valueEncoding: 'binary' }) +``` + +Both examples will still work (assuming the buffer contains only UTF8 data) but you should now do: + +```js +await db.put('a', Buffer.from('x'), { valueEncoding: 'buffer' }) +``` + +Or use the new `'view'` encoding which accepts Uint8Arrays (and therefore also Buffer): + +```js +await db.put('a', new Uint8Array(...), { valueEncoding: 'view' }) +``` + +#### 2.2. For implementors + +_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._ + +Both the public and private API of `abstract-level` are encoding-aware. This means that private methods receive `keyEncoding` and `valueEncoding` options too, instead of the `keyAsBuffer`, `valueAsBuffer` and `asBuffer` options that `abstract-leveldown` had. Implementations don't need to perform encoding or decoding themselves. In fact they can do less: the `_serializeKey()` and `_serializeValue()` methods are also gone and implementations are less likely to have to convert between strings and buffers. + +For example: a call like `db.put(key, { x: 2 }, { valueEncoding: 'json' })` will encode the `{ x: 2 }` value and might forward it to the private API as `db._put(key, '{"x":2}', { valueEncoding: 'utf8' }, callback)`. Same for the key, omitted for brevity. We say "might" because it depends on the implementation, which can now declare which encodings it supports. + +To first give a concrete example for `get()`, if your implementation previously did: + +```js +class ExampleLeveldown extends AbstractLevelDOWN { + _get (key, options, callback) { + if (options.asBuffer) { + this.nextTick(callback, null, Buffer.from('abc')) + } else { + this.nextTick(callback, null, 'abc') + } + } +} +``` + +You must now do (if still relevant): + +```js +class ExampleLevel extends AbstractLevel { + _get (key, options, callback) { + if (options.valueEncoding === 'buffer') { + this.nextTick(callback, null, Buffer.from('abc')) + } else { + this.nextTick(callback, null, 'abc') + } + } +} +``` + +The encoding options and data received by the private API depend on which encodings it supports. It must declare those via the manifest passed to the `AbstractLevel` constructor. See the [`README`](README.md) for details. For example, an implementation might only support storing data as Uint8Arrays, known here as the `'view'` encoding: + +```js +class ExampleLevel extends AbstractLevel { + constructor (location, options) { + super({ encodings: { view: true } }, options) + } +} +``` + +The earlier `put()` example would then result in `db._put(key, value, { valueEncoding: 'view' }, callback)` where `value` is a Uint8Array containing JSON in binary form. And the earlier `_get()` example can be simplified to: + +```js +class ExampleLevel extends AbstractLevel { + _get (key, options, callback) { + // No need to check valueEncoding as it's always 'view' + this.nextTick(callback, null, new Uint8Array(...)) + } +} +``` + +Implementations can also declare support of multiple encodings; keys and values will then be encoded via the most optimal path. For example: + +```js +super({ + encodings: { + view: true, + utf8: true + } +}) +``` + +#### 2.3. Other notable changes + +- The `AbstractIterator` constructor now requires an `options` argument, for encoding options +- The `AbstractIterator#_seek()` method got a new `options` argument, for a `keyEncoding` option +- The `db.supports.bufferKeys` property has been removed. Use `db.supports.encodings.buffer` instead. + +### 3. Streams have moved + +Node.js readable streams must now be created with a new standalone module called [`level-read-stream`](https://github.com/Level/read-stream), rather than database methods like `db.createReadStream()`. Please see its [upgrade guide](https://github.com/Level/read-stream/blob/main/UPGRADING.md#100) for details. + +To offer an alternative to `db.createKeyStream()` and `db.createValueStream()`, two new types of iterators have been added: `db.keys()` and `db.values()`. Their default implementations are functional but implementors may want to override them for optimal performance. The same goes for two new methods on iterators: `nextv()` and `all()`. To achieve this and honor the `limit` option, abstract iterators now count how many items they yielded, which may remove the need for implementations to do so on their own. Please see the README for details. + +### 4. Zero-length keys and range options are now valid + +These keys sort before anything else. Historically they weren't supported for causing segmentation faults in `leveldown`. That doesn't apply to today's codebase. Implementations must now support: + +```js +await db.put('', 'example') + +console.log(await db.get('')) // 'example' + +for await (const [key, value] of db.iterator({ lte: '' })) { + console.log(value) // 'example' +} +``` + +Same goes for zero-length Buffer and Uint8Array keys. Zero-length keys would previously result in an error and never reach the private API. + +### 5. Resources are auto-closed + +To further improve safety and consistency, additional changes were made that make an `abstract-level` database safer to use than `abstract-leveldown` wrapped with `levelup`. + +#### 5.1. Closing iterators is idempotent + +The `iterator.end()` method has been renamed to `iterator.close()`, with `end()` being an alias until a next major version in the future. The term "close" makes it easier to differentiate between the iterator having reached its natural end (data-wise) versus closing it to cleanup resources. If you previously did: + +```js +const iterator = db.iterator() +iterator.end(callback) +``` + +You should now do one of: + +```js +iterator.close(callback) +await iterator.close() +``` + +Likewise, in the private API for implementors, `_end()` has been renamed to `_close()` but without an alias. This method is no longer allowed to yield an error. + +On `db.close()`, non-closed iterators are now automatically closed. This may be a breaking change but only if an implementation has (at its own risk) overridden the public `end()` method, because `close()` or `end()` is now an idempotent operation rather than yielding an `end() already called on iterator` error. If a `next()` call is in progress, closing the iterator (or database) will wait for that. + +The error message `cannot call next() after end()` has been replaced with code `LEVEL_ITERATOR_NOT_OPEN`, the error `cannot call seek() after end()` has been removed in favor of a silent return, and `cannot call next() before previous next() has completed` and `cannot call seek() before next() has completed` have been replaced with code `LEVEL_ITERATOR_BUSY`. + +The `next()` method no longer returns `this` (when a callback is provided). + +#### 5.2. Chained batch can be closed + +Chained batch has a new method `close()` which is an idempotent operation and automatically called after `write()` (for backwards compatibility) or on `db.close()`. This to ensure batches can't be used after closing and reopening a db. If a `write()` is in progress, closing will wait for that. If `write()` is never called then `close()` must be. For example: + +```js +const batch = db.batch() + .put('abc', 'zyz') + .del('foo') + +if (someCondition) { + await batch.write() +} else { + // Decided not to commit + await batch.close() +} + +// In either case this will throw +batch.put('more', 'data') +``` + +These changes could be breaking for an implementation that has (at its own risk) overridden the public `write()` method. In addition, the error message `write() already called on this batch` has been replaced with code `LEVEL_BATCH_NOT_OPEN`. + +An implementation can optionally override `AbstractChainedBatch#_close()` if it has resources to free and wishes to free them earlier than GC would. + +### 6. Errors now use codes + +The [`level-errors`](https://github.com/Level/errors) module as used by `levelup` and friends, is not used or exposed by `abstract-level`. Instead errors thrown or yielded from a database have a `code` property. See the [`README`](./README.md#errors) for details. Going forward, the semver contract will be on `code` and error messages will change without a semver-major bump. + +To minimize breakage, the most used error as yielded by `get()` when an entry is not found, has the same properties that `level-errors` added (`notFound` and `status`) in addition to code `LEVEL_NOT_FOUND`. Those properties will be removed in a future version. Implementations can still yield an error that matches `/NotFound/i.test(err)` or they can start using the code. Either way `abstract-level` will normalize the error. + +If you previously did: + +```js +db.get('abc', function (err, value) { + if (err && err.notFound) { + // Handle missing entry + } +}) +``` + +That will still work but it's preferred to do: + +```js +db.get('abc', function (err, value) { + if (err && err.code === 'LEVEL_NOT_FOUND') { + // Handle missing entry + } +}) +``` + +Or using promises: + +```js +try { + const value = await db.get('abc') +} catch (err) { + if (err.code === 'LEVEL_NOT_FOUND') { + // Handle missing entry + } +} +``` + +### 7. Semi-private properties have been removed + +The following properties and methods can no longer be accessed, as they've been removed or replaced with internal [symbols](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol): + +- `AbstractIterator#_nexting` +- `AbstractIterator#_ended` +- `AbstractChainedBatch#_written` +- `AbstractChainedBatch#_checkWritten()` +- `AbstractChainedBatch#_operations` +- `AbstractLevel#_setupIteratorOptions()` + +### 8. Changes to test suite + +_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._ + +The abstract test suite of `abstract-level` has some breaking changes compared to `abstract-leveldown`: + +- Options to skip tests have been removed in favor of `db.supports` +- Support of `db.clear()` and `db.getMany()` is now mandatory. The default (slow) implementation of `_clear()` has been removed. +- Added tests that `gte` and `lte` range options take precedence over `gt` and `lt` respectively. This is incompatible with [`ltgt`](https://github.com/dominictarr/ltgt) but aligns with `subleveldown`, [`level-option-wrap`](https://github.com/substack/level-option-wrap) and half of `leveldown`. There was no good choice. +- The `setUp` and `tearDown` functions have been removed from the test suite and `suite.common()`. +- Added ability to access manifests via `testCommon.supports`, by lazily copying it from `testCommon.factory().supports`. This requires that the manifest does not change during the lifetime of a `db`. +- Your `factory()` function must now accept an `options` argument. + +Many tests were imported from `levelup`, `encoding-down`, `deferred-leveldown`, `memdown`, `level-js` and `leveldown`. They test the changes described above and improve coverage of existing behavior. + +Lastly, it's recommended to revisit any custom tests of an implementation. In particular if those tests relied upon the previously loose state checking of `abstract-leveldown`. For example, making a `db.put()` call before `db.open()`. Such a test now has a different meaning. The previous meaning can typically be restored by inserting `db.once('open', ...)` or `await db.open()` logic. + +### 9. Sublevels are builtin + +_This section is only relevant if you use [`subleveldown`](https://github.com/Level/subleveldown) (which can not wrap an `abstract-level` database)._ + +Sublevels are now builtin. If you previously did: + +```js +const sub = require('subleveldown') +const example1 = sub(db, 'example1') +const example2 = sub(db, 'example2', { valueEncoding: 'json' }) +``` + +You must now do: + +```js +const example1 = db.sublevel('example1') +const example2 = db.sublevel('example2', { valueEncoding: 'json' }) +``` + +The key structure is equal to that of `subleveldown`. This means that an `abstract-level` sublevel can read sublevels previously created with (and populated by) `subleveldown`. There are some new features: + +- `db.batch(..)` takes a `sublevel` option on operations, to atomically commit data to multiple sublevels +- Sublevels support Uint8Array in addition to Buffer +- `AbstractLevel#_sublevel()` can be overridden to add additional methods to sublevels. + +To reduce function overloads, the prefix argument (`example1` above) is now required and it's called `name` here. If you previously did one of the following, resulting in an empty name: + +```js +subleveldown(db) +subleveldown(db, { separator: '@' }) +``` + +You must now use an explicit empty name: + +```js +db.sublevel('') +db.sublevel('', { separator: '@' }) +``` + +The string shorthand for `{ separator }` has also been removed. If you previously did: + +```js +subleveldown(db, 'example', '@') +``` + +You must now do: + +```js +db.sublevel('example', { separator: '@' }) +``` + +Third, the `open` option has been removed. If you need an asynchronous open hook, feel free to open an issue to discuss restoring this API. Should it support promises? Should `abstract-level` support it on any database and not just sublevels? + +Lastly, the error message `Parent database is not open` (courtesy of `subleveldown` which had to check open state to prevent segmentation faults from underlying databases) changed to error code [`LEVEL_DATABASE_NOT_OPEN`](https://github.com/Level/abstract-level#errors) (courtesy of `abstract-level` which does those checks on any database). + +--- + +_For earlier releases, before `abstract-level` was forked from `abstract-leveldown` (v7.2.0), please see [the upgrade guide of `abstract-leveldown`](https://github.com/Level/abstract-leveldown/blob/master/UPGRADING.md)._ diff --git a/abstract-chained-batch.js b/abstract-chained-batch.js new file mode 100644 index 0000000..3811a4e --- /dev/null +++ b/abstract-chained-batch.js @@ -0,0 +1,397 @@ +'use strict' + +const combineErrors = require('maybe-combine-errors') +const ModuleError = require('module-error') +const { getOptions, emptyOptions, noop } = require('./lib/common') +const { prefixDescendantKey, isDescendant } = require('./lib/prefixes') +const { PrewriteBatch } = require('./lib/prewrite-batch') + +const kPublicOperations = Symbol('publicOperations') +const kPrivateOperations = Symbol('privateOperations') + +class AbstractChainedBatch { + #status = 'open' + #length = 0 + #closePromise = null + #publicOperations + #prewriteRun + #prewriteBatch + #prewriteData + #addMode + + constructor (db, options) { + if (typeof db !== 'object' || db === null) { + const hint = db === null ? 'null' : typeof db + throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`) + } + + const enableWriteEvent = db.listenerCount('write') > 0 + const enablePrewriteHook = !db.hooks.prewrite.noop + + // Operations for write event. We can skip populating this array (and cloning of + // operations, which is the expensive part) if there are 0 write event listeners. + this.#publicOperations = enableWriteEvent ? [] : null + + this.#addMode = getOptions(options, emptyOptions).add === true + + if (enablePrewriteHook) { + // Use separate arrays to collect operations added by hook functions, because + // we wait to apply those until write(). Store these arrays in PrewriteData which + // exists to separate internal data from the public PrewriteBatch interface. + const data = new PrewriteData([], enableWriteEvent ? [] : null) + + this.#prewriteData = data + this.#prewriteBatch = new PrewriteBatch(db, data[kPrivateOperations], data[kPublicOperations]) + this.#prewriteRun = db.hooks.prewrite.run // TODO: document why + } else { + this.#prewriteData = null + this.#prewriteBatch = null + this.#prewriteRun = null + } + + this.db = db + this.db.attachResource(this) + } + + get length () { + if (this.#prewriteData !== null) { + return this.#length + this.#prewriteData.length + } else { + return this.#length + } + } + + put (key, value, options) { + this.#assertStatus() + options = getOptions(options, emptyOptions) + + const delegated = options.sublevel != null + const db = delegated ? options.sublevel : this.db + + db._assertValidKey(key) + db._assertValidValue(value) + + const op = { + ...options, + type: 'put', + key, + value, + keyEncoding: db.keyEncoding(options.keyEncoding), + valueEncoding: db.valueEncoding(options.valueEncoding) + } + + if (this.#prewriteRun !== null) { + try { + // Note: we could have chosen to recurse here so that prewriteBatch.put() would + // call this.put(). But then operations added by hook functions would be inserted + // before rather than after user operations. Instead we process those operations + // lazily in write(). This does hurt the only performance benefit benefit of a + // chained batch though, which is that it avoids blocking the event loop with + // more than one operation at a time. On the other hand, if operations added by + // hook functions are adjacent (i.e. sorted) committing them should be faster. + this.#prewriteRun(op, this.#prewriteBatch) + + // Normalize encodings again in case they were modified + op.keyEncoding = db.keyEncoding(op.keyEncoding) + op.valueEncoding = db.valueEncoding(op.valueEncoding) + } catch (err) { + throw new ModuleError('The prewrite hook failed on batch.put()', { + code: 'LEVEL_HOOK_ERROR', + cause: err + }) + } + } + + // Encode data for private API + const keyEncoding = op.keyEncoding + const preencodedKey = keyEncoding.encode(op.key) + const keyFormat = keyEncoding.format + + // If the sublevel is not a descendant then forward that option to the parent db + // so that we don't erroneously add our own prefix to the key of the operation. + const siblings = delegated && !isDescendant(op.sublevel, this.db) && op.sublevel !== this.db + const encodedKey = delegated && !siblings + ? prefixDescendantKey(preencodedKey, keyFormat, db, this.db) + : preencodedKey + + const valueEncoding = op.valueEncoding + const encodedValue = valueEncoding.encode(op.value) + const valueFormat = valueEncoding.format + + // Only prefix once + if (delegated && !siblings) { + op.sublevel = null + } + + // If the sublevel is not a descendant then we shouldn't emit events + if (this.#publicOperations !== null && !siblings) { + // Clone op before we mutate it for the private API + const publicOperation = { ...op } + publicOperation.encodedKey = encodedKey + publicOperation.encodedValue = encodedValue + + if (delegated) { + // Ensure emitted data makes sense in the context of this db + publicOperation.key = encodedKey + publicOperation.value = encodedValue + publicOperation.keyEncoding = this.db.keyEncoding(keyFormat) + publicOperation.valueEncoding = this.db.valueEncoding(valueFormat) + } + + this.#publicOperations.push(publicOperation) + } + + // If we're forwarding the sublevel option then don't prefix the key yet + op.key = siblings ? encodedKey : this.db.prefixKey(encodedKey, keyFormat, true) + op.value = encodedValue + op.keyEncoding = keyFormat + op.valueEncoding = valueFormat + + if (this.#addMode) { + this._add(op) + } else { + // This "operation as options" trick avoids further cloning + this._put(op.key, encodedValue, op) + } + + // Increment only on success + this.#length++ + return this + } + + _put (key, value, options) {} + + del (key, options) { + this.#assertStatus() + options = getOptions(options, emptyOptions) + + const delegated = options.sublevel != null + const db = delegated ? options.sublevel : this.db + + db._assertValidKey(key) + + const op = { + ...options, + type: 'del', + key, + keyEncoding: db.keyEncoding(options.keyEncoding) + } + + if (this.#prewriteRun !== null) { + try { + this.#prewriteRun(op, this.#prewriteBatch) + + // Normalize encoding again in case it was modified + op.keyEncoding = db.keyEncoding(op.keyEncoding) + } catch (err) { + throw new ModuleError('The prewrite hook failed on batch.del()', { + code: 'LEVEL_HOOK_ERROR', + cause: err + }) + } + } + + // Encode data for private API + const keyEncoding = op.keyEncoding + const preencodedKey = keyEncoding.encode(op.key) + const keyFormat = keyEncoding.format + const encodedKey = delegated ? prefixDescendantKey(preencodedKey, keyFormat, db, this.db) : preencodedKey + + // Prevent double prefixing + if (delegated) op.sublevel = null + + if (this.#publicOperations !== null) { + // Clone op before we mutate it for the private API + const publicOperation = { ...op } + publicOperation.encodedKey = encodedKey + + if (delegated) { + // Ensure emitted data makes sense in the context of this db + publicOperation.key = encodedKey + publicOperation.keyEncoding = this.db.keyEncoding(keyFormat) + } + + this.#publicOperations.push(publicOperation) + } + + op.key = this.db.prefixKey(encodedKey, keyFormat, true) + op.keyEncoding = keyFormat + + if (this.#addMode) { + this._add(op) + } else { + // This "operation as options" trick avoids further cloning + this._del(op.key, op) + } + + // Increment only on success + this.#length++ + return this + } + + _del (key, options) {} + + _add (op) {} + + clear () { + this.#assertStatus() + this._clear() + + if (this.#publicOperations !== null) this.#publicOperations = [] + if (this.#prewriteData !== null) this.#prewriteData.clear() + + this.#length = 0 + return this + } + + _clear () {} + + async write (options) { + this.#assertStatus() + options = getOptions(options) + + if (this.#length === 0) { + return this.close() + } else { + this.#status = 'writing' + + // Prepare promise in case close() is called in the mean time + const close = this.#prepareClose() + + try { + // Process operations added by prewrite hook functions + if (this.#prewriteData !== null) { + const publicOperations = this.#prewriteData[kPublicOperations] + const privateOperations = this.#prewriteData[kPrivateOperations] + const length = this.#prewriteData.length + + for (let i = 0; i < length; i++) { + const op = privateOperations[i] + + // We can _add(), _put() or _del() even though status is now 'writing' because + // status isn't exposed to the private API, so there's no difference in state + // from that perspective, unless an implementation overrides the public write() + // method at its own risk. + if (this.#addMode) { + this._add(op) + } else if (op.type === 'put') { + this._put(op.key, op.value, op) + } else { + this._del(op.key, op) + } + } + + if (publicOperations !== null && length !== 0) { + this.#publicOperations = this.#publicOperations.concat(publicOperations) + } + } + + await this._write(options) + } catch (err) { + close() + + try { + await this.#closePromise + } catch (closeErr) { + // eslint-disable-next-line no-ex-assign + err = combineErrors([err, closeErr]) + } + + throw err + } + + close() + + // Emit after initiating the closing, because event may trigger a + // db close which in turn triggers (idempotently) closing this batch. + if (this.#publicOperations !== null) { + this.db.emit('write', this.#publicOperations) + } + + return this.#closePromise + } + } + + async _write (options) {} + + async close () { + if (this.#closePromise !== null) { + // First caller of close() or write() is responsible for error + return this.#closePromise.catch(noop) + } else { + // Wrap promise to avoid race issues on recursive calls + this.#prepareClose()() + return this.#closePromise + } + } + + async _close () {} + + #assertStatus () { + if (this.#status !== 'open') { + throw new ModuleError('Batch is not open: cannot change operations after write() or close()', { + code: 'LEVEL_BATCH_NOT_OPEN' + }) + } + + // Can technically be removed, because it's no longer possible to call db.batch() when + // status is not 'open', and db.close() closes the batch. Keep for now, in case of + // unforseen userland behaviors. + if (this.db.status !== 'open') { + /* istanbul ignore next */ + throw new ModuleError('Database is not open', { + code: 'LEVEL_DATABASE_NOT_OPEN' + }) + } + } + + #prepareClose () { + let close + + this.#closePromise = new Promise((resolve, reject) => { + close = () => { + this.#privateClose().then(resolve, reject) + } + }) + + return close + } + + async #privateClose () { + // TODO: should we not set status earlier? + this.#status = 'closing' + await this._close() + this.db.detachResource(this) + } +} + +if (typeof Symbol.asyncDispose === 'symbol') { + AbstractChainedBatch.prototype[Symbol.asyncDispose] = async function () { + return this.close() + } +} + +class PrewriteData { + constructor (privateOperations, publicOperations) { + this[kPrivateOperations] = privateOperations + this[kPublicOperations] = publicOperations + } + + get length () { + return this[kPrivateOperations].length + } + + clear () { + // Clear operation arrays if present. + for (const k of [kPublicOperations, kPrivateOperations]) { + const ops = this[k] + + if (ops !== null) { + // Keep array alive because PrewriteBatch has a reference to it + ops.splice(0, ops.length) + } + } + } +} + +exports.AbstractChainedBatch = AbstractChainedBatch diff --git a/abstract-iterator.js b/abstract-iterator.js new file mode 100644 index 0000000..6b30a61 --- /dev/null +++ b/abstract-iterator.js @@ -0,0 +1,404 @@ +'use strict' + +const ModuleError = require('module-error') +const combineErrors = require('maybe-combine-errors') +const { getOptions, emptyOptions, noop } = require('./lib/common') +const { AbortError } = require('./lib/errors') + +const kDecodeOne = Symbol('decodeOne') +const kDecodeMany = Symbol('decodeMany') +const kKeyEncoding = Symbol('keyEncoding') +const kValueEncoding = Symbol('valueEncoding') + +// This class is an internal utility for common functionality between AbstractIterator, +// AbstractKeyIterator and AbstractValueIterator. It's not exported. +class CommonIterator { + #working = false + #pendingClose = null + #closingPromise = null + #count = 0 + #signal + #limit + #ended + #snapshot + + constructor (db, options) { + if (typeof db !== 'object' || db === null) { + const hint = db === null ? 'null' : typeof db + throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`) + } + + if (typeof options !== 'object' || options === null) { + throw new TypeError('The second argument must be an options object') + } + + this[kKeyEncoding] = options[kKeyEncoding] + this[kValueEncoding] = options[kValueEncoding] + + this.#limit = Number.isInteger(options.limit) && options.limit >= 0 ? options.limit : Infinity + this.#signal = options.signal != null ? options.signal : null + this.#snapshot = options.snapshot != null ? options.snapshot : null + + // Ending means reaching the natural end of the data and (unlike closing) that can + // be reset by seek(), unless the limit was reached. + this.#ended = false + + this.db = db + this.db.attachResource(this) + } + + get count () { + return this.#count + } + + get limit () { + return this.#limit + } + + async next () { + this.#startWork() + + try { + if (this.#ended || this.#count >= this.#limit) { + this.#ended = true + return undefined + } + + let item = await this._next() + + if (item === undefined) { + this.#ended = true + return undefined + } + + try { + item = this[kDecodeOne](item) + } catch (err) { + throw new IteratorDecodeError(err) + } + + this.#count++ + return item + } finally { + this.#endWork() + } + } + + async _next () {} + + async nextv (size, options) { + if (!Number.isInteger(size)) { + throw new TypeError("The first argument 'size' must be an integer") + } + + options = getOptions(options, emptyOptions) + + if (size < 1) size = 1 + if (this.#limit < Infinity) size = Math.min(size, this.#limit - this.#count) + + this.#startWork() + + try { + if (this.#ended || size <= 0) { + this.#ended = true + return [] + } + + const items = await this._nextv(size, options) + + if (items.length === 0) { + this.#ended = true + return items + } + + try { + this[kDecodeMany](items) + } catch (err) { + throw new IteratorDecodeError(err) + } + + this.#count += items.length + return items + } finally { + this.#endWork() + } + } + + async _nextv (size, options) { + const acc = [] + + while (acc.length < size) { + const item = await this._next(options) + + if (item !== undefined) { + acc.push(item) + } else { + // Must track this here because we're directly calling _next() + this.#ended = true + break + } + } + + return acc + } + + async all (options) { + options = getOptions(options, emptyOptions) + this.#startWork() + + try { + if (this.#ended || this.#count >= this.#limit) { + return [] + } + + const items = await this._all(options) + + try { + this[kDecodeMany](items) + } catch (err) { + throw new IteratorDecodeError(err) + } + + this.#count += items.length + return items + } catch (err) { + this.#endWork() + await this.#destroy(err) + } finally { + this.#ended = true + + if (this.#working) { + this.#endWork() + await this.close() + } + } + } + + async _all (options) { + // Must count here because we're directly calling _nextv() + let count = this.#count + + const acc = [] + + while (true) { + // Not configurable, because implementations should optimize _all(). + const size = this.#limit < Infinity ? Math.min(1e3, this.#limit - count) : 1e3 + + if (size <= 0) { + return acc + } + + const items = await this._nextv(size, options) + + if (items.length === 0) { + return acc + } + + acc.push.apply(acc, items) + count += items.length + } + } + + seek (target, options) { + options = getOptions(options, emptyOptions) + + if (this.#closingPromise !== null) { + // Don't throw here, to be kind to implementations that wrap + // another db and don't necessarily control when the db is closed + } else if (this.#working) { + throw new ModuleError('Iterator is busy: cannot call seek() until next() has completed', { + code: 'LEVEL_ITERATOR_BUSY' + }) + } else { + const keyEncoding = this.db.keyEncoding(options.keyEncoding || this[kKeyEncoding]) + const keyFormat = keyEncoding.format + + if (options.keyEncoding !== keyFormat) { + options = { ...options, keyEncoding: keyFormat } + } + + const mapped = this.db.prefixKey(keyEncoding.encode(target), keyFormat, false) + this._seek(mapped, options) + + // If _seek() was successfull, more data may be available. + this.#ended = false + } + } + + _seek (target, options) { + throw new ModuleError('Iterator does not implement seek()', { + code: 'LEVEL_NOT_SUPPORTED' + }) + } + + async close () { + if (this.#closingPromise !== null) { + // First caller of close() is responsible for error + return this.#closingPromise.catch(noop) + } + + // Wrap to avoid race issues on recursive calls + this.#closingPromise = new Promise((resolve, reject) => { + this.#pendingClose = () => { + this.#pendingClose = null + this.#privateClose().then(resolve, reject) + } + }) + + // If working we'll delay closing, but still handle the close error (if any) here + if (!this.#working) { + this.#pendingClose() + } + + return this.#closingPromise + } + + async _close () {} + + async * [Symbol.asyncIterator] () { + try { + let item + + while ((item = (await this.next())) !== undefined) { + yield item + } + } catch (err) { + await this.#destroy(err) + } finally { + await this.close() + } + } + + #startWork () { + if (this.#closingPromise !== null) { + throw new ModuleError('Iterator is not open: cannot read after close()', { + code: 'LEVEL_ITERATOR_NOT_OPEN' + }) + } else if (this.#working) { + throw new ModuleError('Iterator is busy: cannot read until previous read has completed', { + code: 'LEVEL_ITERATOR_BUSY' + }) + } else if (this.#signal?.aborted) { + throw new AbortError() + } + + // Keep snapshot open during operation + this.#snapshot?.ref() + this.#working = true + } + + #endWork () { + this.#working = false + this.#pendingClose?.() + this.#snapshot?.unref() + } + + async #privateClose () { + await this._close() + this.db.detachResource(this) + } + + async #destroy (err) { + try { + await this.close() + } catch (closeErr) { + throw combineErrors([err, closeErr]) + } + + throw err + } +} + +if (typeof Symbol.asyncDispose === 'symbol') { + CommonIterator.prototype[Symbol.asyncDispose] = async function () { + return this.close() + } +} + +// For backwards compatibility this class is not (yet) called AbstractEntryIterator. +class AbstractIterator extends CommonIterator { + #keys + #values + + constructor (db, options) { + super(db, options) + this.#keys = options.keys !== false + this.#values = options.values !== false + } + + [kDecodeOne] (entry) { + const key = entry[0] + const value = entry[1] + + if (key !== undefined) { + entry[0] = this.#keys ? this[kKeyEncoding].decode(key) : undefined + } + + if (value !== undefined) { + entry[1] = this.#values ? this[kValueEncoding].decode(value) : undefined + } + + return entry + } + + [kDecodeMany] (entries) { + const keyEncoding = this[kKeyEncoding] + const valueEncoding = this[kValueEncoding] + + for (const entry of entries) { + const key = entry[0] + const value = entry[1] + + if (key !== undefined) entry[0] = this.#keys ? keyEncoding.decode(key) : undefined + if (value !== undefined) entry[1] = this.#values ? valueEncoding.decode(value) : undefined + } + } +} + +class AbstractKeyIterator extends CommonIterator { + [kDecodeOne] (key) { + return this[kKeyEncoding].decode(key) + } + + [kDecodeMany] (keys) { + const keyEncoding = this[kKeyEncoding] + + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + if (key !== undefined) keys[i] = keyEncoding.decode(key) + } + } +} + +class AbstractValueIterator extends CommonIterator { + [kDecodeOne] (value) { + return this[kValueEncoding].decode(value) + } + + [kDecodeMany] (values) { + const valueEncoding = this[kValueEncoding] + + for (let i = 0; i < values.length; i++) { + const value = values[i] + if (value !== undefined) values[i] = valueEncoding.decode(value) + } + } +} + +// Internal utility, not typed or exported +class IteratorDecodeError extends ModuleError { + constructor (cause) { + super('Iterator could not decode data', { + code: 'LEVEL_DECODE_ERROR', + cause + }) + } +} + +// Exposed so that AbstractLevel can set these options +AbstractIterator.keyEncoding = kKeyEncoding +AbstractIterator.valueEncoding = kValueEncoding + +exports.AbstractIterator = AbstractIterator +exports.AbstractKeyIterator = AbstractKeyIterator +exports.AbstractValueIterator = AbstractValueIterator diff --git a/abstract-level.js b/abstract-level.js new file mode 100644 index 0000000..d0621e0 --- /dev/null +++ b/abstract-level.js @@ -0,0 +1,1060 @@ +'use strict' + +const { supports } = require('level-supports') +const { Transcoder } = require('level-transcoder') +const { EventEmitter } = require('events') +const ModuleError = require('module-error') +const combineErrors = require('maybe-combine-errors') +const { AbstractIterator } = require('./abstract-iterator') +const { DefaultKeyIterator, DefaultValueIterator } = require('./lib/default-kv-iterator') +const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('./lib/deferred-iterator') +const { DefaultChainedBatch } = require('./lib/default-chained-batch') +const { DatabaseHooks } = require('./lib/hooks') +const { PrewriteBatch } = require('./lib/prewrite-batch') +const { EventMonitor } = require('./lib/event-monitor') +const { getOptions, noop, emptyOptions, resolvedPromise } = require('./lib/common') +const { prefixDescendantKey, isDescendant } = require('./lib/prefixes') +const { DeferredQueue } = require('./lib/deferred-queue') +const rangeOptions = require('./lib/range-options') + +class AbstractLevel extends EventEmitter { + #status = 'opening' + #deferOpen = true + #statusChange = null + #statusLocked = false + #resources + #queue + #options + #defaultOptions + #transcoder + #keyEncoding + #valueEncoding + #eventMonitor + + constructor (manifest, options) { + super() + + if (typeof manifest !== 'object' || manifest === null) { + throw new TypeError("The first argument 'manifest' must be an object") + } + + options = getOptions(options) + const { keyEncoding, valueEncoding, passive, ...forward } = options + + this.#resources = new Set() + this.#queue = new DeferredQueue() + this.#options = forward + + // Aliased for backwards compatibility + const implicitSnapshots = manifest.snapshots !== false && + manifest.implicitSnapshots !== false + + this.hooks = new DatabaseHooks() + this.supports = supports(manifest, { + deferredOpen: true, + seek: true, + implicitSnapshots, + permanence: manifest.permanence !== false, + + encodings: manifest.encodings || {}, + events: { + ...manifest.events, + opening: true, + open: true, + closing: true, + closed: true, + write: true, + clear: true + } + }) + + this.#eventMonitor = new EventMonitor(this) + this.#transcoder = new Transcoder(formats(this)) + this.#keyEncoding = this.#transcoder.encoding(keyEncoding || 'utf8') + this.#valueEncoding = this.#transcoder.encoding(valueEncoding || 'utf8') + + // Add custom and transcoder encodings to manifest + for (const encoding of this.#transcoder.encodings()) { + if (!this.supports.encodings[encoding.commonName]) { + this.supports.encodings[encoding.commonName] = true + } + } + + this.#defaultOptions = { + empty: emptyOptions, + entry: Object.freeze({ + keyEncoding: this.#keyEncoding.commonName, + valueEncoding: this.#valueEncoding.commonName + }), + entryFormat: Object.freeze({ + keyEncoding: this.#keyEncoding.format, + valueEncoding: this.#valueEncoding.format + }), + key: Object.freeze({ + keyEncoding: this.#keyEncoding.commonName + }), + keyFormat: Object.freeze({ + keyEncoding: this.#keyEncoding.format + }), + owner: Object.freeze({ + owner: this + }) + } + + // Before we start opening, let subclass finish its constructor + // and allow events and postopen hook functions to be added. + queueMicrotask(() => { + if (this.#deferOpen) { + this.open({ passive: false }).catch(noop) + } + }) + } + + get status () { + return this.#status + } + + get parent () { + return null + } + + keyEncoding (encoding) { + return this.#transcoder.encoding(encoding ?? this.#keyEncoding) + } + + valueEncoding (encoding) { + return this.#transcoder.encoding(encoding ?? this.#valueEncoding) + } + + async open (options) { + options = { ...this.#options, ...getOptions(options) } + + options.createIfMissing = options.createIfMissing !== false + options.errorIfExists = !!options.errorIfExists + + // TODO: document why we do this + const postopen = this.hooks.postopen.noop ? null : this.hooks.postopen.run + const passive = options.passive + + if (passive && this.#deferOpen) { + // Wait a tick until constructor calls open() non-passively + await undefined + } + + // Wait for pending changes and check that opening is allowed + this.#assertUnlocked() + while (this.#statusChange !== null) await this.#statusChange.catch(noop) + this.#assertUnlocked() + + if (passive) { + if (this.#status !== 'open') throw new NotOpenError() + } else if (this.#status === 'closed' || this.#deferOpen) { + this.#deferOpen = false + this.#statusChange = resolvedPromise // TODO: refactor + this.#statusChange = (async () => { + this.#status = 'opening' + + try { + this.emit('opening') + await this._open(options) + } catch (err) { + this.#status = 'closed' + + // Must happen before we close resources, in case their close() is waiting + // on a deferred operation which in turn is waiting on db.open(). + this.#queue.drain() + + try { + await this.#closeResources() + } catch (resourceErr) { + // eslint-disable-next-line no-ex-assign + err = combineErrors([err, resourceErr]) + } + + throw new NotOpenError(err) + } + + this.#status = 'open' + + if (postopen !== null) { + let hookErr + + try { + // Prevent deadlock + this.#statusLocked = true + await postopen(options) + } catch (err) { + hookErr = convertRejection(err) + } finally { + this.#statusLocked = false + } + + // Revert + if (hookErr) { + this.#status = 'closing' + this.#queue.drain() + + try { + await this.#closeResources() + await this._close() + } catch (closeErr) { + // There's no safe state to return to. Can't return to 'open' because + // postopen hook failed. Can't return to 'closed' (with the ability to + // reopen) because the underlying database is potentially still open. + this.#statusLocked = true + hookErr = combineErrors([hookErr, closeErr]) + } + + this.#status = 'closed' + + throw new ModuleError('The postopen hook failed on open()', { + code: 'LEVEL_HOOK_ERROR', + cause: hookErr + }) + } + } + + this.#queue.drain() + this.emit('open') + })() + + try { + await this.#statusChange + } finally { + this.#statusChange = null + } + } else if (this.#status !== 'open') { + /* istanbul ignore next: should not happen */ + throw new NotOpenError() + } + } + + async _open (options) {} + + async close () { + // Wait for pending changes and check that closing is allowed + this.#assertUnlocked() + while (this.#statusChange !== null) await this.#statusChange.catch(noop) + this.#assertUnlocked() + + if (this.#status === 'open' || this.#deferOpen) { + // If close() was called after constructor, we didn't open yet + const fromInitial = this.#deferOpen + + this.#deferOpen = false + this.#statusChange = resolvedPromise + this.#statusChange = (async () => { + this.#status = 'closing' + this.#queue.drain() + + try { + this.emit('closing') + await this.#closeResources() + if (!fromInitial) await this._close() + } catch (err) { + this.#status = 'open' + this.#queue.drain() + throw new NotClosedError(err) + } + + this.#status = 'closed' + this.#queue.drain() + this.emit('closed') + })() + + try { + await this.#statusChange + } finally { + this.#statusChange = null + } + } else if (this.#status !== 'closed') { + /* istanbul ignore next: should not happen */ + throw new NotClosedError() + } + } + + async #closeResources () { + if (this.#resources.size === 0) { + return + } + + // In parallel so that all resources know they are closed + const resources = Array.from(this.#resources) + const promises = resources.map(closeResource) + const results = await Promise.allSettled(promises) + const errors = [] + + for (let i = 0; i < results.length; i++) { + if (results[i].status === 'fulfilled') { + this.#resources.delete(resources[i]) + } else { + errors.push(convertRejection(results[i].reason)) + } + } + + if (errors.length > 0) { + throw combineErrors(errors) + } + } + + async _close () {} + + async get (key, options) { + options = getOptions(options, this.#defaultOptions.entry) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.get(key, options)) + } + + this.#assertOpen() + this._assertValidKey(key) + + const snapshot = options.snapshot + const keyEncoding = this.keyEncoding(options.keyEncoding) + const valueEncoding = this.valueEncoding(options.valueEncoding) + const keyFormat = keyEncoding.format + const valueFormat = valueEncoding.format + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.entry) { + options = this.#defaultOptions.entryFormat + } else if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) { + options = { ...options, keyEncoding: keyFormat, valueEncoding: valueFormat } + } + + const encodedKey = keyEncoding.encode(key) + const mappedKey = this.prefixKey(encodedKey, keyFormat, true) + + // Keep snapshot open during operation + snapshot?.ref() + + let value + + try { + value = await this._get(mappedKey, options) + } finally { + // Release snapshot + snapshot?.unref() + } + + try { + return value === undefined ? value : valueEncoding.decode(value) + } catch (err) { + throw new ModuleError('Could not decode value', { + code: 'LEVEL_DECODE_ERROR', + cause: err + }) + } + } + + async _get (key, options) { + return undefined + } + + async getMany (keys, options) { + options = getOptions(options, this.#defaultOptions.entry) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.getMany(keys, options)) + } + + this.#assertOpen() + + if (!Array.isArray(keys)) { + throw new TypeError("The first argument 'keys' must be an array") + } + + if (keys.length === 0) { + return [] + } + + const snapshot = options.snapshot + const keyEncoding = this.keyEncoding(options.keyEncoding) + const valueEncoding = this.valueEncoding(options.valueEncoding) + const keyFormat = keyEncoding.format + const valueFormat = valueEncoding.format + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.entry) { + options = this.#defaultOptions.entryFormat + } else if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) { + options = { ...options, keyEncoding: keyFormat, valueEncoding: valueFormat } + } + + const mappedKeys = new Array(keys.length) + + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + this._assertValidKey(key) + mappedKeys[i] = this.prefixKey(keyEncoding.encode(key), keyFormat, true) + } + + // Keep snapshot open during operation + snapshot?.ref() + + let values + + try { + values = await this._getMany(mappedKeys, options) + } finally { + // Release snapshot + snapshot?.unref() + } + + try { + for (let i = 0; i < values.length; i++) { + if (values[i] !== undefined) { + values[i] = valueEncoding.decode(values[i]) + } + } + } catch (err) { + throw new ModuleError(`Could not decode one or more of ${values.length} value(s)`, { + code: 'LEVEL_DECODE_ERROR', + cause: err + }) + } + + return values + } + + async _getMany (keys, options) { + return new Array(keys.length).fill(undefined) + } + + async has (key, options) { + options = getOptions(options, this.#defaultOptions.key) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.has(key, options)) + } + + this.#assertOpen() + this._assertValidKey(key) + + const snapshot = options.snapshot + const keyEncoding = this.keyEncoding(options.keyEncoding) + const keyFormat = keyEncoding.format + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.key) { + options = this.#defaultOptions.keyFormat + } else if (options.keyEncoding !== keyFormat) { + options = { ...options, keyEncoding: keyFormat } + } + + const encodedKey = keyEncoding.encode(key) + const mappedKey = this.prefixKey(encodedKey, keyFormat, true) + + // Keep snapshot open during operation + snapshot?.ref() + + try { + return this._has(mappedKey, options) + } finally { + // Release snapshot + snapshot?.unref() + } + } + + async _has (key, options) { + throw new ModuleError('Database does not support has()', { + code: 'LEVEL_NOT_SUPPORTED' + }) + } + + async hasMany (keys, options) { + options = getOptions(options, this.#defaultOptions.key) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.hasMany(keys, options)) + } + + this.#assertOpen() + + if (!Array.isArray(keys)) { + throw new TypeError("The first argument 'keys' must be an array") + } + + if (keys.length === 0) { + return [] + } + + const snapshot = options.snapshot + const keyEncoding = this.keyEncoding(options.keyEncoding) + const keyFormat = keyEncoding.format + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.key) { + options = this.#defaultOptions.keyFormat + } else if (options.keyEncoding !== keyFormat) { + options = { ...options, keyEncoding: keyFormat } + } + + const mappedKeys = new Array(keys.length) + + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + this._assertValidKey(key) + mappedKeys[i] = this.prefixKey(keyEncoding.encode(key), keyFormat, true) + } + + // Keep snapshot open during operation + snapshot?.ref() + + try { + return this._hasMany(mappedKeys, options) + } finally { + // Release snapshot + snapshot?.unref() + } + } + + async _hasMany (keys, options) { + throw new ModuleError('Database does not support hasMany()', { + code: 'LEVEL_NOT_SUPPORTED' + }) + } + + async put (key, value, options) { + if (!this.hooks.prewrite.noop) { + // Forward to batch() which will run the hook + // Note: technically means that put() supports the sublevel option in this case, + // but it generally doesn't per documentation (which makes sense). Same for del(). + return this.batch([{ type: 'put', key, value }], options) + } + + options = getOptions(options, this.#defaultOptions.entry) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.put(key, value, options)) + } + + this.#assertOpen() + + this._assertValidKey(key) + this._assertValidValue(value) + + // Encode data for private API + const keyEncoding = this.keyEncoding(options.keyEncoding) + const valueEncoding = this.valueEncoding(options.valueEncoding) + const keyFormat = keyEncoding.format + const valueFormat = valueEncoding.format + const enableWriteEvent = this.#eventMonitor.write + const original = options + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.entry) { + options = this.#defaultOptions.entryFormat + } else if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) { + options = { ...options, keyEncoding: keyFormat, valueEncoding: valueFormat } + } + + const encodedKey = keyEncoding.encode(key) + const prefixedKey = this.prefixKey(encodedKey, keyFormat, true) + const encodedValue = valueEncoding.encode(value) + + await this._put(prefixedKey, encodedValue, options) + + if (enableWriteEvent) { + const op = { + ...original, + type: 'put', + key, + value, + keyEncoding, + valueEncoding, + encodedKey, + encodedValue + } + + this.emit('write', [op]) + } + } + + async _put (key, value, options) {} + + async del (key, options) { + if (!this.hooks.prewrite.noop) { + // Forward to batch() which will run the hook + return this.batch([{ type: 'del', key }], options) + } + + options = getOptions(options, this.#defaultOptions.key) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.del(key, options)) + } + + this.#assertOpen() + this._assertValidKey(key) + + // Encode data for private API + const keyEncoding = this.keyEncoding(options.keyEncoding) + const keyFormat = keyEncoding.format + const enableWriteEvent = this.#eventMonitor.write + const original = options + + // Forward encoding options. Avoid cloning if possible. + if (options === this.#defaultOptions.key) { + options = this.#defaultOptions.keyFormat + } else if (options.keyEncoding !== keyFormat) { + options = { ...options, keyEncoding: keyFormat } + } + + const encodedKey = keyEncoding.encode(key) + const prefixedKey = this.prefixKey(encodedKey, keyFormat, true) + + await this._del(prefixedKey, options) + + if (enableWriteEvent) { + const op = { + ...original, + type: 'del', + key, + keyEncoding, + encodedKey + } + + this.emit('write', [op]) + } + } + + async _del (key, options) {} + + // TODO (future): add way for implementations to declare which options are for the + // whole batch rather than defaults for individual operations. E.g. the sync option + // of classic-level, that should not be copied to individual operations. + batch (operations, options) { + if (!arguments.length) { + this.#assertOpen() + return this._chainedBatch() + } + + options = getOptions(options, this.#defaultOptions.empty) + return this.#arrayBatch(operations, options) + } + + // Wrapped for async error handling + async #arrayBatch (operations, options) { + // TODO (not urgent): freeze prewrite hook and write event + if (this.#status === 'opening') { + return this.deferAsync(() => this.#arrayBatch(operations, options)) + } + + this.#assertOpen() + + if (!Array.isArray(operations)) { + throw new TypeError("The first argument 'operations' must be an array") + } + + if (operations.length === 0) { + return + } + + const length = operations.length + const enablePrewriteHook = !this.hooks.prewrite.noop + const enableWriteEvent = this.#eventMonitor.write + const publicOperations = enableWriteEvent ? new Array(length) : null + const privateOperations = new Array(length) + const prewriteBatch = enablePrewriteHook + ? new PrewriteBatch(this, privateOperations, publicOperations) + : null + + for (let i = 0; i < length; i++) { + // Clone the op so that we can freely mutate it. We can't use a class because the + // op can have userland properties that we'd have to copy, negating the performance + // benefits of a class. So use a plain object. + const op = { ...options, ...operations[i] } + + // Hook functions can modify op but not its type or sublevel, so cache those + const isPut = op.type === 'put' + const delegated = op.sublevel != null + const db = delegated ? op.sublevel : this + + db._assertValidKey(op.key) + + op.keyEncoding = db.keyEncoding(op.keyEncoding) + + if (isPut) { + db._assertValidValue(op.value) + op.valueEncoding = db.valueEncoding(op.valueEncoding) + } else if (op.type !== 'del') { + throw new TypeError("A batch operation must have a type property that is 'put' or 'del'") + } + + if (enablePrewriteHook) { + try { + this.hooks.prewrite.run(op, prewriteBatch) + + // Normalize encodings again in case they were modified + op.keyEncoding = db.keyEncoding(op.keyEncoding) + if (isPut) op.valueEncoding = db.valueEncoding(op.valueEncoding) + } catch (err) { + throw new ModuleError('The prewrite hook failed on batch()', { + code: 'LEVEL_HOOK_ERROR', + cause: err + }) + } + } + + // Encode data for private API + const keyEncoding = op.keyEncoding + const preencodedKey = keyEncoding.encode(op.key) + const keyFormat = keyEncoding.format + + // If the sublevel is not a descendant then forward that option to the parent db + // so that we don't erroneously add our own prefix to the key of the operation. + const siblings = delegated && !isDescendant(op.sublevel, this) && op.sublevel !== this + const encodedKey = delegated && !siblings + ? prefixDescendantKey(preencodedKey, keyFormat, db, this) + : preencodedKey + + // Only prefix once + if (delegated && !siblings) { + op.sublevel = null + } + + let publicOperation = null + + // If the sublevel is not a descendant then we shouldn't emit events + if (enableWriteEvent && !siblings) { + // Clone op before we mutate it for the private API + // TODO (future semver-major): consider sending this shape to private API too + publicOperation = { ...op } + publicOperation.encodedKey = encodedKey + + if (delegated) { + // Ensure emitted data makes sense in the context of this db + publicOperation.key = encodedKey + publicOperation.keyEncoding = this.keyEncoding(keyFormat) + } + + publicOperations[i] = publicOperation + } + + // If we're forwarding the sublevel option then don't prefix the key yet + op.key = siblings ? encodedKey : this.prefixKey(encodedKey, keyFormat, true) + op.keyEncoding = keyFormat + + if (isPut) { + const valueEncoding = op.valueEncoding + const encodedValue = valueEncoding.encode(op.value) + const valueFormat = valueEncoding.format + + op.value = encodedValue + op.valueEncoding = valueFormat + + if (enableWriteEvent && !siblings) { + publicOperation.encodedValue = encodedValue + + if (delegated) { + publicOperation.value = encodedValue + publicOperation.valueEncoding = this.valueEncoding(valueFormat) + } + } + } + + privateOperations[i] = op + } + + // TODO (future): maybe add separate hook to run on private data. Currently can't work + // because prefixing happens too soon; we need to move that logic to the private + // API of AbstractSublevel (or reimplement with hooks). TBD how it'd work in chained + // batch. Hook would look something like hooks.midwrite.run(privateOperations, ...). + + await this._batch(privateOperations, options) + + if (enableWriteEvent) { + this.emit('write', publicOperations) + } + } + + async _batch (operations, options) {} + + sublevel (name, options) { + const xopts = AbstractSublevel.defaults(options) + const sublevel = this._sublevel(name, xopts) + + if (!this.hooks.newsub.noop) { + try { + this.hooks.newsub.run(sublevel, xopts) + } catch (err) { + throw new ModuleError('The newsub hook failed on sublevel()', { + code: 'LEVEL_HOOK_ERROR', + cause: err + }) + } + } + + return sublevel + } + + _sublevel (name, options) { + return new AbstractSublevel(this, name, options) + } + + prefixKey (key, keyFormat, local) { + return key + } + + async clear (options) { + options = getOptions(options, this.#defaultOptions.empty) + + if (this.#status === 'opening') { + return this.deferAsync(() => this.clear(options)) + } + + this.#assertOpen() + + const original = options + const keyEncoding = this.keyEncoding(options.keyEncoding) + const snapshot = options.snapshot + + options = rangeOptions(options, keyEncoding) + options.keyEncoding = keyEncoding.format + + if (options.limit !== 0) { + // Keep snapshot open during operation + snapshot?.ref() + + try { + await this._clear(options) + } finally { + // Release snapshot + snapshot?.unref() + } + + this.emit('clear', original) + } + } + + async _clear (options) {} + + iterator (options) { + const keyEncoding = this.keyEncoding(options?.keyEncoding) + const valueEncoding = this.valueEncoding(options?.valueEncoding) + + options = rangeOptions(options, keyEncoding) + options.keys = options.keys !== false + options.values = options.values !== false + + // We need the original encoding options in AbstractIterator in order to decode data + options[AbstractIterator.keyEncoding] = keyEncoding + options[AbstractIterator.valueEncoding] = valueEncoding + + // Forward encoding options to private API + options.keyEncoding = keyEncoding.format + options.valueEncoding = valueEncoding.format + + if (this.#status === 'opening') { + return new DeferredIterator(this, options) + } + + this.#assertOpen() + return this._iterator(options) + } + + _iterator (options) { + return new AbstractIterator(this, options) + } + + keys (options) { + // Also include valueEncoding (though unused) because we may fallback to _iterator() + const keyEncoding = this.keyEncoding(options?.keyEncoding) + const valueEncoding = this.valueEncoding(options?.valueEncoding) + + options = rangeOptions(options, keyEncoding) + + // We need the original encoding options in AbstractKeyIterator in order to decode data + options[AbstractIterator.keyEncoding] = keyEncoding + options[AbstractIterator.valueEncoding] = valueEncoding + + // Forward encoding options to private API + options.keyEncoding = keyEncoding.format + options.valueEncoding = valueEncoding.format + + if (this.#status === 'opening') { + return new DeferredKeyIterator(this, options) + } + + this.#assertOpen() + return this._keys(options) + } + + _keys (options) { + return new DefaultKeyIterator(this, options) + } + + values (options) { + const keyEncoding = this.keyEncoding(options?.keyEncoding) + const valueEncoding = this.valueEncoding(options?.valueEncoding) + + options = rangeOptions(options, keyEncoding) + + // We need the original encoding options in AbstractValueIterator in order to decode data + options[AbstractIterator.keyEncoding] = keyEncoding + options[AbstractIterator.valueEncoding] = valueEncoding + + // Forward encoding options to private API + options.keyEncoding = keyEncoding.format + options.valueEncoding = valueEncoding.format + + if (this.#status === 'opening') { + return new DeferredValueIterator(this, options) + } + + this.#assertOpen() + return this._values(options) + } + + _values (options) { + return new DefaultValueIterator(this, options) + } + + snapshot (options) { + this.#assertOpen() + + // Owner is an undocumented option explained in AbstractSnapshot + if (typeof options !== 'object' || options === null) { + options = this.#defaultOptions.owner + } else if (options.owner == null) { + options = { ...options, owner: this } + } + + return this._snapshot(options) + } + + _snapshot (options) { + throw new ModuleError('Database does not support explicit snapshots', { + code: 'LEVEL_NOT_SUPPORTED' + }) + } + + defer (fn, options) { + if (typeof fn !== 'function') { + throw new TypeError('The first argument must be a function') + } + + this.#queue.add(function (abortError) { + if (!abortError) fn() + }, options) + } + + deferAsync (fn, options) { + if (typeof fn !== 'function') { + throw new TypeError('The first argument must be a function') + } + + return new Promise((resolve, reject) => { + this.#queue.add(function (abortError) { + if (abortError) reject(abortError) + else fn().then(resolve, reject) + }, options) + }) + } + + attachResource (resource) { + if (typeof resource !== 'object' || resource === null || + typeof resource.close !== 'function') { + throw new TypeError('The first argument must be a resource object') + } + + this.#resources.add(resource) + } + + detachResource (resource) { + this.#resources.delete(resource) + } + + _chainedBatch () { + return new DefaultChainedBatch(this) + } + + _assertValidKey (key) { + if (key === null || key === undefined) { + throw new ModuleError('Key cannot be null or undefined', { + code: 'LEVEL_INVALID_KEY' + }) + } + } + + _assertValidValue (value) { + if (value === null || value === undefined) { + throw new ModuleError('Value cannot be null or undefined', { + code: 'LEVEL_INVALID_VALUE' + }) + } + } + + #assertOpen () { + if (this.#status !== 'open') { + throw new ModuleError('Database is not open', { + code: 'LEVEL_DATABASE_NOT_OPEN' + }) + } + } + + #assertUnlocked () { + if (this.#statusLocked) { + throw new ModuleError('Database status is locked', { + code: 'LEVEL_STATUS_LOCKED' + }) + } + } +} + +const { AbstractSublevel } = require('./lib/abstract-sublevel')({ AbstractLevel }) + +exports.AbstractLevel = AbstractLevel +exports.AbstractSublevel = AbstractSublevel + +if (typeof Symbol.asyncDispose === 'symbol') { + AbstractLevel.prototype[Symbol.asyncDispose] = async function () { + return this.close() + } +} + +const formats = function (db) { + return Object.keys(db.supports.encodings) + .filter(k => !!db.supports.encodings[k]) +} + +const closeResource = function (resource) { + return resource.close() +} + +// Ensure that we don't work with falsy err values, because JavaScript unfortunately +// allows Promise.reject(null) and similar patterns. Which'd break `if (err)` logic. +const convertRejection = function (reason) { + if (reason instanceof Error) { + return reason + } + + if (Object.prototype.toString.call(reason) === '[object Error]') { + return reason + } + + const hint = reason === null ? 'null' : typeof reason + const msg = `Promise rejection reason must be an Error, received ${hint}` + + return new TypeError(msg) +} + +// Internal utilities, not typed or exported +class NotOpenError extends ModuleError { + constructor (cause) { + super('Database failed to open', { + code: 'LEVEL_DATABASE_NOT_OPEN', + cause + }) + } +} + +class NotClosedError extends ModuleError { + constructor (cause) { + super('Database failed to close', { + code: 'LEVEL_DATABASE_NOT_CLOSED', + cause + }) + } +} diff --git a/abstract-snapshot.js b/abstract-snapshot.js new file mode 100644 index 0000000..db365a9 --- /dev/null +++ b/abstract-snapshot.js @@ -0,0 +1,84 @@ +'use strict' + +const ModuleError = require('module-error') +const { noop } = require('./lib/common') + +class AbstractSnapshot { + #open = true + #referenceCount = 0 + #pendingClose = null + #closePromise = null + #owner + + constructor (options) { + // Defining this as an option gives sublevels the opportunity to create a snapshot + // via their parent database but still designate themselves as the "owner", which + // just means which database will close the snapshot upon db.close(). This ensures + // that the API of AbstractSublevel is symmetrical to AbstractLevel. + const owner = options.owner + + if (typeof owner !== 'object' || owner === null) { + const hint = owner === null ? 'null' : typeof owner + throw new TypeError(`Owner must be an abstract-level database, received ${hint}`) + } + + // Also ensures this db will not be garbage collected + this.#owner = owner + this.#owner.attachResource(this) + } + + ref () { + if (!this.#open) { + throw new ModuleError('Snapshot is not open: cannot use snapshot after close()', { + code: 'LEVEL_SNAPSHOT_NOT_OPEN' + }) + } + + this.#referenceCount++ + } + + unref () { + if (--this.#referenceCount === 0) { + this.#pendingClose?.() + } + } + + async close () { + if (this.#closePromise !== null) { + // First caller of close() is responsible for error + return this.#closePromise.catch(noop) + } + + this.#open = false + + // Wrap to avoid race issues on recursive calls + this.#closePromise = new Promise((resolve, reject) => { + this.#pendingClose = () => { + this.#pendingClose = null + privateClose(this, this.#owner).then(resolve, reject) + } + }) + + // If working we'll delay closing, but still handle the close error (if any) here + if (this.#referenceCount === 0) { + this.#pendingClose() + } + + return this.#closePromise + } + + async _close () {} +} + +if (typeof Symbol.asyncDispose === 'symbol') { + AbstractSnapshot.prototype[Symbol.asyncDispose] = async function () { + return this.close() + } +} + +const privateClose = async function (snapshot, owner) { + await snapshot._close() + owner.detachResource(snapshot) +} + +exports.AbstractSnapshot = AbstractSnapshot diff --git a/index.d.ts b/index.d.ts new file mode 100644 index 0000000..2e438dc --- /dev/null +++ b/index.d.ts @@ -0,0 +1,52 @@ +export { + AbstractLevel, + AbstractDatabaseOptions, + AbstractOpenOptions, + AbstractGetOptions, + AbstractGetManyOptions, + AbstractHasOptions, + AbstractHasManyOptions, + AbstractPutOptions, + AbstractDelOptions, + AbstractBatchOptions, + AbstractBatchOperation, + AbstractBatchPutOperation, + AbstractBatchDelOperation, + AbstractClearOptions, + AbstractDatabaseHooks, + AbstractHook, + AbstractDeferOptions +} from './types/abstract-level' + +export { + AbstractIterator, + AbstractIteratorOptions, + AbstractSeekOptions, + AbstractKeyIterator, + AbstractKeyIteratorOptions, + AbstractValueIterator, + AbstractValueIteratorOptions +} from './types/abstract-iterator' + +export { + AbstractChainedBatch, + AbstractChainedBatchPutOptions, + AbstractChainedBatchDelOptions, + AbstractChainedBatchWriteOptions +} from './types/abstract-chained-batch' + +export { + AbstractSublevel, + AbstractSublevelOptions +} from './types/abstract-sublevel' + +export { + AbstractSnapshot +} from './types/abstract-snapshot' + +export { + AbstractReadOptions, + AbstractResource +} from './types/interfaces' + +export * as Transcoder from 'level-transcoder' diff --git a/index.js b/index.js new file mode 100644 index 0000000..753269b --- /dev/null +++ b/index.js @@ -0,0 +1,9 @@ +'use strict' + +exports.AbstractLevel = require('./abstract-level').AbstractLevel +exports.AbstractSublevel = require('./abstract-level').AbstractSublevel +exports.AbstractIterator = require('./abstract-iterator').AbstractIterator +exports.AbstractKeyIterator = require('./abstract-iterator').AbstractKeyIterator +exports.AbstractValueIterator = require('./abstract-iterator').AbstractValueIterator +exports.AbstractChainedBatch = require('./abstract-chained-batch').AbstractChainedBatch +exports.AbstractSnapshot = require('./abstract-snapshot').AbstractSnapshot diff --git a/lib/abstract-sublevel-iterator.js b/lib/abstract-sublevel-iterator.js new file mode 100644 index 0000000..cca33c8 --- /dev/null +++ b/lib/abstract-sublevel-iterator.js @@ -0,0 +1,141 @@ +'use strict' + +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator') + +// TODO: unfix natively if db supports it +class AbstractSublevelIterator extends AbstractIterator { + #iterator + #unfix + + constructor (db, options, iterator, unfix) { + super(db, options) + + this.#iterator = iterator + this.#unfix = unfix + } + + async _next () { + const entry = await this.#iterator.next() + + if (entry !== undefined) { + const key = entry[0] + if (key !== undefined) entry[0] = this.#unfix(key) + } + + return entry + } + + async _nextv (size, options) { + const entries = await this.#iterator.nextv(size, options) + const unfix = this.#unfix + + for (const entry of entries) { + const key = entry[0] + if (key !== undefined) entry[0] = unfix(key) + } + + return entries + } + + async _all (options) { + const entries = await this.#iterator.all(options) + const unfix = this.#unfix + + for (const entry of entries) { + const key = entry[0] + if (key !== undefined) entry[0] = unfix(key) + } + + return entries + } + + _seek (target, options) { + this.#iterator.seek(target, options) + } + + async _close () { + return this.#iterator.close() + } +} + +class AbstractSublevelKeyIterator extends AbstractKeyIterator { + #iterator + #unfix + + constructor (db, options, iterator, unfix) { + super(db, options) + + this.#iterator = iterator + this.#unfix = unfix + } + + async _next () { + const key = await this.#iterator.next() + return key === undefined ? key : this.#unfix(key) + } + + async _nextv (size, options) { + const keys = await this.#iterator.nextv(size, options) + const unfix = this.#unfix + + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + if (key !== undefined) keys[i] = unfix(key) + } + + return keys + } + + async _all (options) { + const keys = await this.#iterator.all(options) + const unfix = this.#unfix + + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + if (key !== undefined) keys[i] = unfix(key) + } + + return keys + } + + _seek (target, options) { + this.#iterator.seek(target, options) + } + + async _close () { + return this.#iterator.close() + } +} + +class AbstractSublevelValueIterator extends AbstractValueIterator { + #iterator + + constructor (db, options, iterator) { + super(db, options) + this.#iterator = iterator + } + + async _next () { + return this.#iterator.next() + } + + async _nextv (size, options) { + return this.#iterator.nextv(size, options) + } + + async _all (options) { + return this.#iterator.all(options) + } + + _seek (target, options) { + this.#iterator.seek(target, options) + } + + async _close () { + return this.#iterator.close() + } +} + +exports.AbstractSublevelIterator = AbstractSublevelIterator +exports.AbstractSublevelKeyIterator = AbstractSublevelKeyIterator +exports.AbstractSublevelValueIterator = AbstractSublevelValueIterator diff --git a/lib/abstract-sublevel.js b/lib/abstract-sublevel.js new file mode 100644 index 0000000..6b102b1 --- /dev/null +++ b/lib/abstract-sublevel.js @@ -0,0 +1,286 @@ +'use strict' + +const ModuleError = require('module-error') +const { Buffer } = require('buffer') || {} +const { + AbstractSublevelIterator, + AbstractSublevelKeyIterator, + AbstractSublevelValueIterator +} = require('./abstract-sublevel-iterator') + +const kRoot = Symbol('root') +const textEncoder = new TextEncoder() +const defaults = { separator: '!' } + +// Wrapped to avoid circular dependency +module.exports = function ({ AbstractLevel }) { + class AbstractSublevel extends AbstractLevel { + #globalPrefix + #localPrefix + #localPath + #globalPath + #globalUpperBound + #parent + #unfix + + static defaults (options) { + if (options == null) { + return defaults + } else if (!options.separator) { + return { ...options, separator: '!' } + } else { + return options + } + } + + constructor (db, name, options) { + // Don't forward AbstractSublevel options to AbstractLevel + const { separator, manifest, ...forward } = AbstractSublevel.defaults(options) + const names = [].concat(name).map(name => trim(name, separator)) + + // Reserve one character between separator and name to give us an upper bound + const reserved = separator.charCodeAt(0) + 1 + const root = db[kRoot] || db + + // Keys should sort like ['!a!', '!a!!a!', '!a"', '!aa!', '!b!']. + // Use ASCII for consistent length between string, Buffer and Uint8Array + if (!names.every(name => textEncoder.encode(name).every(x => x > reserved && x < 127))) { + throw new ModuleError(`Sublevel name must use bytes > ${reserved} < ${127}`, { + code: 'LEVEL_INVALID_PREFIX' + }) + } + + super(mergeManifests(db, manifest), forward) + + const localPrefix = names.map(name => separator + name + separator).join('') + const globalPrefix = (db.prefix || '') + localPrefix + const globalUpperBound = globalPrefix.slice(0, -1) + String.fromCharCode(reserved) + + // Most operations are forwarded to the parent database, but clear() and iterators + // still forward to the root database - which is older logic and does not yet need + // to change, until we add some form of preread or postread hooks. + this[kRoot] = root + this.#parent = db + this.#localPath = names + this.#globalPath = db.prefix ? db.path().concat(names) : names + this.#globalPrefix = new MultiFormat(globalPrefix) + this.#globalUpperBound = new MultiFormat(globalUpperBound) + this.#localPrefix = new MultiFormat(localPrefix) + this.#unfix = new Unfixer() + } + + prefixKey (key, keyFormat, local) { + const prefix = local ? this.#localPrefix : this.#globalPrefix + + if (keyFormat === 'utf8') { + return prefix.utf8 + key + } else if (key.byteLength === 0) { + // Fast path for empty key (no copy) + return prefix[keyFormat] + } else if (keyFormat === 'view') { + const view = prefix.view + const result = new Uint8Array(view.byteLength + key.byteLength) + + result.set(view, 0) + result.set(key, view.byteLength) + + return result + } else { + const buffer = prefix.buffer + return Buffer.concat([buffer, key], buffer.byteLength + key.byteLength) + } + } + + // Not exposed for now. + #prefixRange (range, keyFormat) { + if (range.gte !== undefined) { + range.gte = this.prefixKey(range.gte, keyFormat, false) + } else if (range.gt !== undefined) { + range.gt = this.prefixKey(range.gt, keyFormat, false) + } else { + range.gte = this.#globalPrefix[keyFormat] + } + + if (range.lte !== undefined) { + range.lte = this.prefixKey(range.lte, keyFormat, false) + } else if (range.lt !== undefined) { + range.lt = this.prefixKey(range.lt, keyFormat, false) + } else { + range.lte = this.#globalUpperBound[keyFormat] + } + } + + get prefix () { + return this.#globalPrefix.utf8 + } + + get db () { + return this[kRoot] + } + + get parent () { + return this.#parent + } + + path (local = false) { + return local ? this.#localPath : this.#globalPath + } + + async _open (options) { + // The parent db must open itself or be (re)opened by the user because + // a sublevel should not initiate state changes on the rest of the db. + await this.#parent.open({ passive: true }) + + // Close sublevel when parent is closed + this.#parent.attachResource(this) + } + + async _close () { + this.#parent.detachResource(this) + } + + async _put (key, value, options) { + return this.#parent.put(key, value, options) + } + + async _get (key, options) { + return this.#parent.get(key, options) + } + + async _getMany (keys, options) { + return this.#parent.getMany(keys, options) + } + + async _has (key, options) { + return this.#parent.has(key, options) + } + + async _hasMany (keys, options) { + return this.#parent.hasMany(keys, options) + } + + async _del (key, options) { + return this.#parent.del(key, options) + } + + async _batch (operations, options) { + return this.#parent.batch(operations, options) + } + + // TODO: call parent instead of root + async _clear (options) { + // TODO (refactor): move to AbstractLevel + this.#prefixRange(options, options.keyEncoding) + return this[kRoot].clear(options) + } + + // TODO: call parent instead of root + _iterator (options) { + // TODO (refactor): move to AbstractLevel + this.#prefixRange(options, options.keyEncoding) + const iterator = this[kRoot].iterator(options) + const unfix = this.#unfix.get(this.#globalPrefix.utf8.length, options.keyEncoding) + return new AbstractSublevelIterator(this, options, iterator, unfix) + } + + _keys (options) { + this.#prefixRange(options, options.keyEncoding) + const iterator = this[kRoot].keys(options) + const unfix = this.#unfix.get(this.#globalPrefix.utf8.length, options.keyEncoding) + return new AbstractSublevelKeyIterator(this, options, iterator, unfix) + } + + _values (options) { + this.#prefixRange(options, options.keyEncoding) + const iterator = this[kRoot].values(options) + return new AbstractSublevelValueIterator(this, options, iterator) + } + + _snapshot (options) { + return this[kRoot].snapshot(options) + } + } + + return { AbstractSublevel } +} + +const mergeManifests = function (parent, manifest) { + return { + // Inherit manifest of parent db + ...parent.supports, + + // Disable unsupported features + createIfMissing: false, + errorIfExists: false, + + // Unset additional events because we're not forwarding them + events: {}, + + // Unset additional methods (like approximateSize) which we can't support here unless + // the AbstractSublevel class is overridden by an implementation of `abstract-level`. + additionalMethods: {}, + + // Inherit manifest of custom AbstractSublevel subclass. Such a class is not + // allowed to override encodings. + ...manifest, + + encodings: { + utf8: supportsEncoding(parent, 'utf8'), + buffer: supportsEncoding(parent, 'buffer'), + view: supportsEncoding(parent, 'view') + } + } +} + +const supportsEncoding = function (parent, encoding) { + // Prefer a non-transcoded encoding for optimal performance + return parent.supports.encodings[encoding] + ? parent.keyEncoding(encoding).name === encoding + : false +} + +class MultiFormat { + constructor (key) { + this.utf8 = key + this.view = textEncoder.encode(key) + this.buffer = Buffer ? Buffer.from(this.view.buffer, 0, this.view.byteLength) : {} + } +} + +class Unfixer { + constructor () { + this.cache = new Map() + } + + get (prefixLength, keyFormat) { + let unfix = this.cache.get(keyFormat) + + if (unfix === undefined) { + if (keyFormat === 'view') { + unfix = function (prefixLength, key) { + // Avoid Uint8Array#slice() because it copies + return key.subarray(prefixLength) + }.bind(null, prefixLength) + } else { + unfix = function (prefixLength, key) { + // Avoid Buffer#subarray() because it's slow + return key.slice(prefixLength) + }.bind(null, prefixLength) + } + + this.cache.set(keyFormat, unfix) + } + + return unfix + } +} + +const trim = function (str, char) { + let start = 0 + let end = str.length + + while (start < end && str[start] === char) start++ + while (end > start && str[end - 1] === char) end-- + + return str.slice(start, end) +} diff --git a/lib/common.js b/lib/common.js new file mode 100644 index 0000000..7ebc5dd --- /dev/null +++ b/lib/common.js @@ -0,0 +1,33 @@ +'use strict' + +const ModuleError = require('module-error') +const deprecations = new Set() + +exports.getOptions = function (options, def) { + if (typeof options === 'object' && options !== null) { + return options + } + + if (def !== undefined) { + return def + } + + return {} +} + +exports.emptyOptions = Object.freeze({}) +exports.noop = function () {} +exports.resolvedPromise = Promise.resolve() + +exports.deprecate = function (message) { + if (!deprecations.has(message)) { + deprecations.add(message) + + // Avoid polyfills + const c = globalThis.console + + if (typeof c !== 'undefined' && typeof c.warn === 'function') { + c.warn(new ModuleError(message, { code: 'LEVEL_LEGACY' })) + } + } +} diff --git a/lib/default-chained-batch.js b/lib/default-chained-batch.js new file mode 100644 index 0000000..659eb86 --- /dev/null +++ b/lib/default-chained-batch.js @@ -0,0 +1,29 @@ +'use strict' + +const { AbstractChainedBatch } = require('../abstract-chained-batch') + +// Functional default for chained batch +class DefaultChainedBatch extends AbstractChainedBatch { + #encoded = [] + + constructor (db) { + // Opt-in to _add() instead of _put() and _del() + super(db, { add: true }) + } + + _add (op) { + this.#encoded.push(op) + } + + _clear () { + this.#encoded = [] + } + + async _write (options) { + // Need to call the private rather than public method, to prevent + // recursion, double prefixing, double encoding and double hooks. + return this.db._batch(this.#encoded, options) + } +} + +exports.DefaultChainedBatch = DefaultChainedBatch diff --git a/lib/default-kv-iterator.js b/lib/default-kv-iterator.js new file mode 100644 index 0000000..37dff50 --- /dev/null +++ b/lib/default-kv-iterator.js @@ -0,0 +1,74 @@ +'use strict' + +const { AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator') + +const kIterator = Symbol('iterator') +const kHandleOne = Symbol('handleOne') +const kHandleMany = Symbol('handleMany') + +class DefaultKeyIterator extends AbstractKeyIterator { + constructor (db, options) { + super(db, options) + + this[kIterator] = db.iterator({ ...options, keys: true, values: false }) + } + + [kHandleOne] (entry) { + return entry[0] + } + + [kHandleMany] (entries) { + for (let i = 0; i < entries.length; i++) { + entries[i] = entries[i][0] + } + } +} + +class DefaultValueIterator extends AbstractValueIterator { + constructor (db, options) { + super(db, options) + + this[kIterator] = db.iterator({ ...options, keys: false, values: true }) + } + + [kHandleOne] (entry) { + return entry[1] + } + + [kHandleMany] (entries) { + for (let i = 0; i < entries.length; i++) { + entries[i] = entries[i][1] + } + } +} + +for (const Iterator of [DefaultKeyIterator, DefaultValueIterator]) { + Iterator.prototype._next = async function () { + const entry = await this[kIterator].next() + return entry === undefined ? entry : this[kHandleOne](entry) + } + + Iterator.prototype._nextv = async function (size, options) { + const entries = await this[kIterator].nextv(size, options) + this[kHandleMany](entries) + return entries + } + + Iterator.prototype._all = async function (options) { + const entries = await this[kIterator].all(options) + this[kHandleMany](entries) + return entries + } + + Iterator.prototype._seek = function (target, options) { + this[kIterator].seek(target, options) + } + + Iterator.prototype._close = async function () { + return this[kIterator].close() + } +} + +// Internal utilities, should be typed as AbstractKeyIterator and AbstractValueIterator +exports.DefaultKeyIterator = DefaultKeyIterator +exports.DefaultValueIterator = DefaultValueIterator diff --git a/lib/deferred-iterator.js b/lib/deferred-iterator.js new file mode 100644 index 0000000..8292b61 --- /dev/null +++ b/lib/deferred-iterator.js @@ -0,0 +1,110 @@ +'use strict' + +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator') +const ModuleError = require('module-error') + +const kNut = Symbol('nut') +const kUndefer = Symbol('undefer') +const kFactory = Symbol('factory') +const kSignalOptions = Symbol('signalOptions') + +class DeferredIterator extends AbstractIterator { + constructor (db, options) { + super(db, options) + + this[kNut] = null + this[kFactory] = () => db.iterator(options) + this[kSignalOptions] = { signal: options.signal } + + this.db.defer(() => this[kUndefer](), this[kSignalOptions]) + } +} + +class DeferredKeyIterator extends AbstractKeyIterator { + constructor (db, options) { + super(db, options) + + this[kNut] = null + this[kFactory] = () => db.keys(options) + this[kSignalOptions] = { signal: options.signal } + + this.db.defer(() => this[kUndefer](), this[kSignalOptions]) + } +} + +class DeferredValueIterator extends AbstractValueIterator { + constructor (db, options) { + super(db, options) + + this[kNut] = null + this[kFactory] = () => db.values(options) + this[kSignalOptions] = { signal: options.signal } + + this.db.defer(() => this[kUndefer](), this[kSignalOptions]) + } +} + +for (const Iterator of [DeferredIterator, DeferredKeyIterator, DeferredValueIterator]) { + Iterator.prototype[kUndefer] = function () { + if (this.db.status === 'open') { + this[kNut] = this[kFactory]() + } + } + + Iterator.prototype._next = async function () { + if (this[kNut] !== null) { + return this[kNut].next() + } else if (this.db.status === 'opening') { + return this.db.deferAsync(() => this._next(), this[kSignalOptions]) + } else { + throw new ModuleError('Iterator is not open: cannot call next() after close()', { + code: 'LEVEL_ITERATOR_NOT_OPEN' + }) + } + } + + Iterator.prototype._nextv = async function (size, options) { + if (this[kNut] !== null) { + return this[kNut].nextv(size, options) + } else if (this.db.status === 'opening') { + return this.db.deferAsync(() => this._nextv(size, options), this[kSignalOptions]) + } else { + throw new ModuleError('Iterator is not open: cannot call nextv() after close()', { + code: 'LEVEL_ITERATOR_NOT_OPEN' + }) + } + } + + Iterator.prototype._all = async function (options) { + if (this[kNut] !== null) { + return this[kNut].all() + } else if (this.db.status === 'opening') { + return this.db.deferAsync(() => this._all(options), this[kSignalOptions]) + } else { + throw new ModuleError('Iterator is not open: cannot call all() after close()', { + code: 'LEVEL_ITERATOR_NOT_OPEN' + }) + } + } + + Iterator.prototype._seek = function (target, options) { + if (this[kNut] !== null) { + // TODO: explain why we need _seek() rather than seek() here + this[kNut]._seek(target, options) + } else if (this.db.status === 'opening') { + this.db.defer(() => this._seek(target, options), this[kSignalOptions]) + } + } + + Iterator.prototype._close = async function () { + if (this[kNut] !== null) { + return this[kNut].close() + } else if (this.db.status === 'opening') { + return this.db.deferAsync(() => this._close()) + } + } +} + +exports.DeferredIterator = DeferredIterator +exports.DeferredKeyIterator = DeferredKeyIterator +exports.DeferredValueIterator = DeferredValueIterator diff --git a/lib/deferred-queue.js b/lib/deferred-queue.js new file mode 100644 index 0000000..83805c1 --- /dev/null +++ b/lib/deferred-queue.js @@ -0,0 +1,84 @@ +'use strict' + +const { getOptions, emptyOptions } = require('./common') +const { AbortError } = require('./errors') + +class DeferredOperation { + constructor (fn, signal) { + this.fn = fn + this.signal = signal + } +} + +class DeferredQueue { + #operations + #signals + + constructor () { + this.#operations = [] + this.#signals = new Set() + } + + add (fn, options) { + options = getOptions(options, emptyOptions) + const signal = options.signal + + if (signal == null) { + this.#operations.push(new DeferredOperation(fn, null)) + return + } + + if (signal.aborted) { + // Note that this is called in the same tick + fn(new AbortError()) + return + } + + if (!this.#signals.has(signal)) { + this.#signals.add(signal) + signal.addEventListener('abort', this.#handleAbort, { once: true }) + } + + this.#operations.push(new DeferredOperation(fn, signal)) + } + + drain () { + const operations = this.#operations + const signals = this.#signals + + this.#operations = [] + this.#signals = new Set() + + for (const signal of signals) { + signal.removeEventListener('abort', this.#handleAbort) + } + + for (const operation of operations) { + operation.fn.call(null) + } + } + + #handleAbort = (ev) => { + const signal = ev.target + const err = new AbortError() + const aborted = [] + + // TODO: optimize + this.#operations = this.#operations.filter(function (operation) { + if (operation.signal !== null && operation.signal === signal) { + aborted.push(operation) + return false + } else { + return true + } + }) + + this.#signals.delete(signal) + + for (const operation of aborted) { + operation.fn.call(null, err) + } + } +} + +exports.DeferredQueue = DeferredQueue diff --git a/lib/errors.js b/lib/errors.js new file mode 100644 index 0000000..0960a89 --- /dev/null +++ b/lib/errors.js @@ -0,0 +1,21 @@ +'use strict' + +const ModuleError = require('module-error') + +class AbortError extends ModuleError { + constructor (cause) { + super('Operation has been aborted', { + code: 'LEVEL_ABORTED', + cause + }) + } + + // Set name to AbortError for web compatibility. See: + // https://dom.spec.whatwg.org/#aborting-ongoing-activities + // https://github.com/nodejs/node/pull/35911#discussion_r515779306 + get name () { + return 'AbortError' + } +} + +exports.AbortError = AbortError diff --git a/lib/event-monitor.js b/lib/event-monitor.js new file mode 100644 index 0000000..829b79b --- /dev/null +++ b/lib/event-monitor.js @@ -0,0 +1,30 @@ +'use strict' + +const { deprecate } = require('./common') + +exports.EventMonitor = class EventMonitor { + constructor (emitter) { + // Track whether listeners are present, because checking + // a boolean is faster than checking listenerCount(). + this.write = false + + const beforeAdded = (name) => { + if (name === 'write') { + this.write = true + } + + if (name === 'put' || name === 'del' || name === 'batch') { + deprecate(`The '${name}' event has been removed in favor of 'write'`) + } + } + + const afterRemoved = (name) => { + if (name === 'write') { + this.write = emitter.listenerCount('write') > 0 + } + } + + emitter.on('newListener', beforeAdded) + emitter.on('removeListener', afterRemoved) + } +} diff --git a/lib/hooks.js b/lib/hooks.js new file mode 100644 index 0000000..4afa567 --- /dev/null +++ b/lib/hooks.js @@ -0,0 +1,78 @@ +'use strict' + +const { noop } = require('./common') + +class DatabaseHooks { + constructor () { + this.postopen = new Hook({ async: true }) + this.prewrite = new Hook({ async: false }) + this.newsub = new Hook({ async: false }) + } +} + +class Hook { + #functions = new Set() + #isAsync + + constructor (options) { + this.#isAsync = options.async + + // Offer a fast way to check if hook functions are present. We could also expose a + // size getter, which would be slower, or check it by hook.run !== noop, which would + // not allow userland to do the same check. + this.noop = true + this.run = this.#runner() + } + + add (fn) { + // Validate now rather than in asynchronous code paths + assertFunction(fn) + this.#functions.add(fn) + this.noop = false + this.run = this.#runner() + } + + delete (fn) { + assertFunction(fn) + this.#functions.delete(fn) + this.noop = this.#functions.size === 0 + this.run = this.#runner() + } + + #runner () { + if (this.noop) { + return noop + } else if (this.#functions.size === 1) { + const [fn] = this.#functions + return fn + } else if (this.#isAsync) { + // The run function should not reference hook, so that consumers like chained batch + // and db.open() can save a reference to hook.run and safely assume it won't change + // during their lifetime or async work. + const run = async function (functions, ...args) { + for (const fn of functions) { + await fn(...args) + } + } + + return run.bind(null, Array.from(this.#functions)) + } else { + const run = function (functions, ...args) { + for (const fn of functions) { + fn(...args) + } + } + + return run.bind(null, Array.from(this.#functions)) + } + } +} + +const assertFunction = function (fn) { + if (typeof fn !== 'function') { + const hint = fn === null ? 'null' : typeof fn + throw new TypeError(`The first argument must be a function, received ${hint}`) + } +} + +exports.DatabaseHooks = DatabaseHooks diff --git a/lib/prefixes.js b/lib/prefixes.js new file mode 100644 index 0000000..13984f8 --- /dev/null +++ b/lib/prefixes.js @@ -0,0 +1,21 @@ +'use strict' + +exports.prefixDescendantKey = function (key, keyFormat, descendant, ancestor) { + while (descendant !== null && descendant !== ancestor) { + key = descendant.prefixKey(key, keyFormat, true) + descendant = descendant.parent + } + + return key +} + +// Check if db is a descendant of ancestor +// TODO: optimize, when used alongside prefixDescendantKey +// which means we visit parents twice. +exports.isDescendant = function (db, ancestor) { + while (true) { + if (db.parent == null) return false + if (db.parent === ancestor) return true + db = db.parent + } +} diff --git a/lib/prewrite-batch.js b/lib/prewrite-batch.js new file mode 100644 index 0000000..9c37728 --- /dev/null +++ b/lib/prewrite-batch.js @@ -0,0 +1,96 @@ +'use strict' + +const { prefixDescendantKey, isDescendant } = require('./prefixes') + +// An interface for prewrite hook functions to add operations +class PrewriteBatch { + #db + #privateOperations + #publicOperations + + constructor (db, privateOperations, publicOperations) { + this.#db = db + + // Note: if for db.batch([]), these arrays include input operations (or empty slots + // for them) but if for chained batch then it does not. Small implementation detail. + this.#privateOperations = privateOperations + this.#publicOperations = publicOperations + } + + add (op) { + const isPut = op.type === 'put' + const delegated = op.sublevel != null + const db = delegated ? op.sublevel : this.#db + + db._assertValidKey(op.key) + op.keyEncoding = db.keyEncoding(op.keyEncoding) + + if (isPut) { + db._assertValidValue(op.value) + op.valueEncoding = db.valueEncoding(op.valueEncoding) + } else if (op.type !== 'del') { + throw new TypeError("A batch operation must have a type property that is 'put' or 'del'") + } + + // Encode data for private API + const keyEncoding = op.keyEncoding + const preencodedKey = keyEncoding.encode(op.key) + const keyFormat = keyEncoding.format + + // If the sublevel is not a descendant then forward that option to the parent db + // so that we don't erroneously add our own prefix to the key of the operation. + const siblings = delegated && !isDescendant(op.sublevel, this.#db) && op.sublevel !== this.#db + const encodedKey = delegated && !siblings + ? prefixDescendantKey(preencodedKey, keyFormat, db, this.#db) + : preencodedKey + + // Only prefix once + if (delegated && !siblings) { + op.sublevel = null + } + + let publicOperation = null + + // If the sublevel is not a descendant then we shouldn't emit events + if (this.#publicOperations !== null && !siblings) { + // Clone op before we mutate it for the private API + publicOperation = { ...op } + publicOperation.encodedKey = encodedKey + + if (delegated) { + // Ensure emitted data makes sense in the context of this.#db + publicOperation.key = encodedKey + publicOperation.keyEncoding = this.#db.keyEncoding(keyFormat) + } + + this.#publicOperations.push(publicOperation) + } + + // If we're forwarding the sublevel option then don't prefix the key yet + op.key = siblings ? encodedKey : this.#db.prefixKey(encodedKey, keyFormat, true) + op.keyEncoding = keyFormat + + if (isPut) { + const valueEncoding = op.valueEncoding + const encodedValue = valueEncoding.encode(op.value) + const valueFormat = valueEncoding.format + + op.value = encodedValue + op.valueEncoding = valueFormat + + if (publicOperation !== null) { + publicOperation.encodedValue = encodedValue + + if (delegated) { + publicOperation.value = encodedValue + publicOperation.valueEncoding = this.#db.valueEncoding(valueFormat) + } + } + } + + this.#privateOperations.push(op) + return this + } +} + +exports.PrewriteBatch = PrewriteBatch diff --git a/lib/range-options.js b/lib/range-options.js new file mode 100644 index 0000000..d5a315d --- /dev/null +++ b/lib/range-options.js @@ -0,0 +1,26 @@ +'use strict' + +const hasOwnProperty = Object.prototype.hasOwnProperty +const rangeOptions = new Set(['lt', 'lte', 'gt', 'gte']) + +module.exports = function (options, keyEncoding) { + const result = {} + + for (const k in options) { + if (!hasOwnProperty.call(options, k)) continue + if (k === 'keyEncoding' || k === 'valueEncoding') continue + + if (rangeOptions.has(k)) { + // Note that we don't reject nullish and empty options here. While + // those types are invalid as keys, they are valid as range options. + result[k] = keyEncoding.encode(options[k]) + } else { + result[k] = options[k] + } + } + + result.reverse = !!result.reverse + result.limit = Number.isInteger(result.limit) && result.limit >= 0 ? result.limit : -1 + + return result +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..dcd1d48 --- /dev/null +++ b/package.json @@ -0,0 +1,65 @@ +{ + "name": "abstract-level", + "version": "3.0.1", + "description": "Abstract class for a lexicographically sorted key-value database", + "license": "MIT", + "main": "index.js", + "types": "./index.d.ts", + "scripts": { + "test": "standard && hallmark && (nyc -s node test/self.js | tap-arc) && nyc report", + "test-pessimistic": "node test/self.js | tap-arc -pv", + "test-browsers": "airtap --coverage test/self.js", + "test-electron": "airtap -p electron --coverage test/self.js | tap-arc", + "coverage": "nyc report -r lcovonly" + }, + "files": [ + "abstract-chained-batch.js", + "abstract-iterator.js", + "abstract-level.js", + "abstract-snapshot.js", + "index.js", + "index.d.ts", + "lib", + "test", + "types", + "CHANGELOG.md", + "UPGRADING.md" + ], + "dependencies": { + "buffer": "^6.0.3", + "is-buffer": "^2.0.5", + "level-supports": "^6.2.0", + "level-transcoder": "^1.0.1", + "maybe-combine-errors": "^1.0.0", + "module-error": "^1.0.1" + }, + "devDependencies": { + "@babel/preset-env": "^7.26.0", + "@types/node": "^22.10.2", + "@voxpelli/tsconfig": "^15.1.0", + "airtap": "^5.0.0", + "airtap-electron": "^1.0.0", + "airtap-playwright": "^1.0.1", + "babelify": "^10.0.0", + "electron": "^33.2.1", + "hallmark": "^5.0.1", + "nyc": "^17.1.0", + "standard": "^17.1.2", + "tap-arc": "^1.3.2", + "tape": "^5.9.0", + "typescript": "^5.7.2" + }, + "repository": { + "type": "git", + "url": "https://github.com/Level/abstract-level.git" + }, + "homepage": "https://github.com/Level/abstract-level", + "keywords": [ + "abstract-level", + "level", + "leveldb" + ], + "engines": { + "node": ">=18" + } +} diff --git a/test/async-iterator-test.js b/test/async-iterator-test.js new file mode 100644 index 0000000..66111b9 --- /dev/null +++ b/test/async-iterator-test.js @@ -0,0 +1,132 @@ +'use strict' + +const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }] + +let db + +exports.setup = function (test, testCommon) { + test('async iterator setup', async function (t) { + db = testCommon.factory() + await db.open() + return db.batch(input.map(entry => ({ ...entry, type: 'put' }))) + }) +} + +exports.asyncIterator = function (test, testCommon) { + for (const mode of ['iterator', 'keys', 'values']) { + test(`for await...of ${mode}()`, async function (t) { + t.plan(1) + + const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const output = [] + + for await (const item of it) { + output.push(item) + } + + t.same(output, input.map(({ key, value }) => { + return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value + })) + }) + + testCommon.supports.permanence && test(`for await...of ${mode}() (deferred)`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + await db.batch(input.map(entry => ({ ...entry, type: 'put' }))) + await db.close() + + // Don't await + db.open() + + const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const output = [] + + for await (const item of it) { + output.push(item) + } + + t.same(output, input.map(({ key, value }) => { + return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value + })) + + await db.close() + }) + + testCommon.supports.implicitSnapshots && test(`for await...of ${mode}() (deferred, with snapshot)`, async function (t) { + t.plan(2) + + const db = testCommon.factory() + const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const promise = db.batch(input.map(entry => ({ ...entry, type: 'put' }))) + const output = [] + + for await (const item of it) { + output.push(item) + } + + t.same(output, [], 'used snapshot') + + // Wait for data to be written + await promise + + for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) { + output.push(item) + } + + t.same(output, input.map(({ key, value }) => { + return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value + })) + + await db.close() + }) + + for (const deferred of [false, true]) { + test(`for await...of ${mode}() (empty, deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + const entries = [] + + if (!deferred) await db.open() + + for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) { + entries.push(item) + } + + t.same(entries, []) + await db.close() + }) + } + + test(`for await...of ${mode}() does not permit reuse`, async function (t) { + t.plan(3) + + const it = db[mode]() + + // eslint-disable-next-line no-unused-vars + for await (const item of it) { + t.pass('nexted') + } + + try { + // eslint-disable-next-line no-unused-vars + for await (const item of it) { + t.fail('should not be called') + } + } catch (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + } + }) + } +} + +exports.teardown = async function (test, testCommon) { + test('async iterator teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setup(test, testCommon) + exports.asyncIterator(test, testCommon) + exports.teardown(test, testCommon) +} diff --git a/test/batch-test.js b/test/batch-test.js new file mode 100644 index 0000000..c85d344 --- /dev/null +++ b/test/batch-test.js @@ -0,0 +1,214 @@ +'use strict' + +const { Buffer } = require('buffer') +const { illegalKeys, illegalValues } = require('./util') + +let db + +exports.setUp = function (test, testCommon) { + test('batch([]) setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('batch([]) with missing value fails', function (t) { + t.plan(1) + + db.batch([{ type: 'put', key: 'foo1' }]).catch((err) => { + t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code') + }) + }) + + test('batch([]) with illegal values fails', function (t) { + t.plan(illegalValues.length * 2) + + for (const { name, value } of illegalValues) { + db.batch([{ type: 'put', key: 'foo1', value }]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code') + }) + } + }) + + test('batch([]) with missing key fails', function (t) { + t.plan(1) + + db.batch([{ type: 'put', value: 'foo1' }]).catch(function (err) { + t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code') + }) + }) + + test('batch([]) with illegal keys fails', function (t) { + t.plan(illegalKeys.length * 2) + + for (const { name, key } of illegalKeys) { + db.batch([{ type: 'put', key, value: 'foo1' }]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + } + }) + + test('batch([]) with missing or incorrect type fails', function (t) { + t.plan(4) + + db.batch([{ key: 'key', value: 'value' }]).catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message') + }) + + db.batch([{ key: 'key', value: 'value', type: 'foo' }]).catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message') + }) + }) + + test('batch([]) with missing or nullish operations fails', function (t) { + t.plan(2 * 2) + + for (const array of [null, undefined]) { + db.batch(array).catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err.message, "The first argument 'operations' must be an array", 'correct error message') + }) + } + }) + + test('batch([]) with empty operations array and empty options', async function (t) { + await db.batch([]) + await db.batch([], null) + await db.batch([], undefined) + await db.batch([], {}) + }) + + ;[null, undefined, 1, true].forEach(function (operation) { + const type = operation === null ? 'null' : typeof operation + + test(`batch([]) with ${type} operation fails`, function (t) { + t.plan(1) + + db.batch([operation]).catch(function (err) { + // We can either explicitly check the type of the op and throw a TypeError, + // or skip that for performance reasons in which case the next thing checked + // will be op.key or op.type. Doesn't matter, because we've documented that + // TypeErrors and such are not part of the semver contract. + t.ok(err.name === 'TypeError' || err.code === 'LEVEL_INVALID_KEY') + }) + }) + }) +} + +exports.batch = function (test, testCommon) { + test('simple batch([])', async function (t) { + const db = testCommon.factory() + await db.open() + await db.batch([{ type: 'del', key: 'non-existent' }]) // should not error + t.is(await db.get('foo'), undefined, 'not found') + await db.batch([{ type: 'put', key: 'foo', value: 'bar' }]) + t.is(await db.get('foo'), 'bar') + await db.batch([{ type: 'del', key: 'foo' }]) + t.is(await db.get('foo'), undefined, 'not found') + return db.close() + }) + + test('batch([]) with multiple operations', async function (t) { + t.plan(3) + + await db.batch([ + { type: 'put', key: 'foobatch1', value: 'bar1' }, + { type: 'put', key: 'foobatch2', value: 'bar2' }, + { type: 'put', key: 'foobatch3', value: 'bar3' }, + { type: 'del', key: 'foobatch2' } + ]) + + const promises = [ + db.get('foobatch1').then(function (value) { + t.is(value, 'bar1') + }), + db.get('foobatch2').then(function (value) { + t.is(value, undefined, 'not found') + }), + db.get('foobatch3').then(function (value) { + t.is(value, 'bar3') + }) + ] + + return Promise.all(promises) + }) + + for (const encoding of ['utf8', 'buffer', 'view']) { + if (!testCommon.supports.encodings[encoding]) continue + + // NOTE: adapted from memdown + test(`empty values in batch with ${encoding} valueEncoding`, async function (t) { + const db = testCommon.factory({ valueEncoding: encoding }) + const values = ['', Uint8Array.from([]), Buffer.alloc(0)] + const expected = encoding === 'utf8' ? values[0] : encoding === 'view' ? values[1] : values[2] + + await db.open() + await db.batch(values.map((value, i) => ({ type: 'put', key: String(i), value }))) + + for (let i = 0; i < values.length; i++) { + const value = await db.get(String(i)) + + // Buffer is a Uint8Array, so this is allowed + if (encoding === 'view' && Buffer.isBuffer(value)) { + t.same(value, values[2]) + } else { + t.same(value, expected) + } + } + + return db.close() + }) + + test(`empty keys in batch with ${encoding} keyEncoding`, async function (t) { + const db = testCommon.factory({ keyEncoding: encoding }) + const keys = ['', Uint8Array.from([]), Buffer.alloc(0)] + + await db.open() + + for (let i = 0; i < keys.length; i++) { + await db.batch([{ type: 'put', key: keys[i], value: String(i) }]) + t.same(await db.get(keys[i]), String(i), `got value ${i}`) + } + + return db.close() + }) + } +} + +exports.atomic = function (test, testCommon) { + test('batch([]) is atomic', async function (t) { + t.plan(3) + + try { + await db.batch([ + { type: 'put', key: 'foobah1', value: 'bar1' }, + { type: 'put', value: 'bar2' }, + { type: 'put', key: 'foobah3', value: 'bar3' } + ]) + } catch (err) { + t.is(err.code, 'LEVEL_INVALID_KEY', 'should error and not commit anything') + } + + t.is(await db.get('foobah1'), undefined, 'not found') + t.is(await db.get('foobah3'), undefined, 'not found') + }) +} + +exports.tearDown = function (test, testCommon) { + test('batch([]) teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.batch(test, testCommon) + exports.atomic(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/chained-batch-test.js b/test/chained-batch-test.js new file mode 100644 index 0000000..423bf8e --- /dev/null +++ b/test/chained-batch-test.js @@ -0,0 +1,315 @@ +'use strict' + +let db + +exports.setUp = function (test, testCommon) { + test('chained batch setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('chained batch has db reference', async function (t) { + const batch = db.batch() + t.ok(batch.db === db) + return batch.close() + }) + + test('chained batch.put() with missing or nullish value fails', async function (t) { + t.plan(3 * 2) + + for (const args of [[null], [undefined], []]) { + const batch = db.batch() + + try { + batch.put('key', ...args) + } catch (err) { + t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code') + t.is(batch.length, 0, 'length is not incremented on error') + } + + await batch.close() + } + }) + + test('chained batch.put() with missing of nullish key fails', async function (t) { + t.plan(3 * 2) + + for (const args of [[], [null, 'foo'], [undefined, 'foo']]) { + const batch = db.batch() + + try { + batch.put(...args) + } catch (err) { + t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code') + t.is(batch.length, 0, 'length is not incremented on error') + } + + await batch.close() + } + }) + + test('chained batch.del() with missing or nullish key fails', async function (t) { + t.plan(3 * 2) + + for (const args of [[null], [undefined], []]) { + const batch = db.batch() + + try { + batch.del(...args) + } catch (err) { + t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code') + t.is(batch.length, 0, 'length is not incremented on error') + } + + await batch.close() + } + }) + + test('chained batch.clear() does not throw if empty', async function (t) { + return db.batch().clear().close() + }) + + test('chained batch.put() after write() fails', async function (t) { + t.plan(1) + + const batch = db.batch().put('foo', 'bar') + await batch.write() + + try { + batch.put('boom', 'bang') + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code') + } + }) + + test('chained batch.del() after write() fails', async function (t) { + t.plan(1) + + const batch = db.batch().put('foo', 'bar') + await batch.write() + + try { + batch.del('foo') + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code') + } + }) + + test('chained batch.clear() after write() fails', async function (t) { + t.plan(1) + + const batch = db.batch().put('foo', 'bar') + await batch.write() + + try { + batch.clear() + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code') + } + }) + + test('chained batch.write() after write() fails', async function (t) { + t.plan(1) + + const batch = db.batch().put('foo', 'bar') + await batch.write() + + try { + await batch.write() + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code') + } + }) + + test('chained batch.write() after close() fails', async function (t) { + t.plan(1) + + const batch = db.batch().put('foo', 'bar') + await batch.close() + + try { + await batch.write() + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code') + } + }) + + test('chained batch.write() with no operations', async function (t) { + return db.batch().write() + }) + + test('chained batch.close() with no operations', async function (t) { + return db.batch().close() + }) + + test('chained batch.close() is idempotent', async function (t) { + const batch = db.batch() + await batch.close() + await batch.close() + return Promise.all([batch.close(), batch.close()]) + }) +} + +exports.batch = function (test, testCommon) { + test('simple chained batch', async function (t) { + await db.batch([ + { type: 'put', key: 'one', value: '1' }, + { type: 'put', key: 'two', value: '2' }, + { type: 'put', key: 'three', value: '3' } + ]) + + const batch = db.batch() + .put('1', 'one') + .del('2', 'two') + .put('3', 'three') + + t.is(batch.length, 3, 'length was incremented') + + batch.clear() + t.is(batch.length, 0, 'length is reset') + + batch.put('one', 'I') + .put('two', 'II') + .del('three') + .put('foo', 'bar') + + t.is(batch.length, 4, 'length was incremented') + + await batch.write() + + t.same(await db.iterator().all(), [ + ['foo', 'bar'], + ['one', 'I'], + ['two', 'II'] + ]) + }) + + test('chained batch requires database to be open', async function (t) { + t.plan(5) + + const db1 = testCommon.factory() + const db2 = testCommon.factory() + + try { + db1.batch() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + await db2.open() + const batch = db2.batch() + await db2.close() + + try { + batch.put('beep', 'boop') + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN') + } + + try { + batch.del('456') + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN') + } + + try { + batch.clear() + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN') + } + + try { + await batch.write() + } catch (err) { + t.is(err.code, 'LEVEL_BATCH_NOT_OPEN') + } + + // Should be a noop (already closed) + await batch.close() + + return Promise.all([db1.close(), db2.close()]) + }) + + // NOTE: adapted from levelup + test('chained batch with per-operation encoding options', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + const utf8 = db.keyEncoding('utf8') + const json = db.valueEncoding('json') + + db.once('write', function (operations) { + t.same(operations, [ + { type: 'put', key: 'a', value: 'a', keyEncoding: utf8, valueEncoding: json, encodedKey: utf8.encode('a'), encodedValue: utf8.encode('"a"') }, + { type: 'put', key: 'b', value: 'b', keyEncoding: utf8, valueEncoding: utf8, encodedKey: utf8.encode('b'), encodedValue: utf8.encode('b') }, + { type: 'put', key: '"c"', value: 'c', keyEncoding: utf8, valueEncoding: utf8, encodedKey: utf8.encode('"c"'), encodedValue: utf8.encode('c') }, + { type: 'del', key: 'c', keyEncoding: json, encodedKey: utf8.encode('"c"'), arbitraryOption: true } + ]) + }) + + await db.batch() + .put('a', 'a', { valueEncoding: 'json' }) + .put('b', 'b') + .put('"c"', 'c') + .del('c', { keyEncoding: 'json', arbitraryOption: true }) + .write() + + t.same(await db.iterator().all(), [ + ['a', '"a"'], + ['b', 'b'] + ]) + + return db.close() + }) +} + +exports.events = function (test, testCommon) { + test('db.close() on chained batch write event', async function (t) { + const db = testCommon.factory() + await db.open() + + let promise + + db.on('write', function () { + // Should not interfere with the current write() operation + promise = db.close() + }) + + await db.batch().put('a', 'b').write() + await promise + + t.ok(promise, 'event was emitted') + }) +} + +exports.tearDown = function (test, testCommon) { + test('chained batch teardown', async function (t) { + return db.close() + }) +} + +exports.dispose = function (test, testCommon) { + // Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management) + Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) { + const db = testCommon.factory() + await db.open() + + const batch = db.batch() + await batch[Symbol.asyncDispose]() + + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.batch(test, testCommon) + exports.events(test, testCommon) + exports.tearDown(test, testCommon) + exports.dispose(test, testCommon) +} diff --git a/test/clear-range-test.js b/test/clear-range-test.js new file mode 100644 index 0000000..53d6859 --- /dev/null +++ b/test/clear-range-test.js @@ -0,0 +1,262 @@ +'use strict' + +const data = (function () { + const d = [] + let i = 0 + let k + for (; i < 100; i++) { + k = (i < 10 ? '0' : '') + i + d.push({ + key: k, + value: String(Math.random()) + }) + } + return d +}()) + +exports.range = function (test, testCommon) { + function rangeTest (name, opts, expected) { + test('clear() range with ' + name, async function (t) { + const db = await prepare() + + await db.clear(opts) + await verify(t, db, expected) + + return db.close() + }) + } + + async function prepare (t) { + const db = testCommon.factory() + + await db.open() + await db.batch(data.map(function ({ key, value }) { + return { type: 'put', key, value } + })) + + return db + } + + async function verify (t, db, expected) { + const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const entries = await it.all() + + t.is(entries.length, expected.length, 'correct number of entries') + t.same(entries, expected.map(kv => [kv.key, kv.value])) + } + + function exclude (data, start, end, expectedLength) { + data = data.slice() + const removed = data.splice(start, end - start + 1) // Inclusive + if (expectedLength != null) checkLength(removed, expectedLength) + return data + } + + // For sanity checks on test arguments + function checkLength (arr, length) { + if (arr.length !== length) { + throw new RangeError('Expected ' + length + ' elements, got ' + arr.length) + } + + return arr + } + + rangeTest('no options', {}, []) + + // Reversing has no effect without limit + rangeTest('reverse=true', { + reverse: true + }, []) + + rangeTest('gte=00', { + gte: '00' + }, []) + + rangeTest('gte=50', { + gte: '50' + }, data.slice(0, 50)) + + rangeTest('lte=50 and reverse=true', { + lte: '50', + reverse: true + }, data.slice(51)) + + rangeTest('gte=49.5 (midway)', { + gte: '49.5' + }, data.slice(0, 50)) + + rangeTest('gte=49999 (midway)', { + gte: '49999' + }, data.slice(0, 50)) + + rangeTest('lte=49.5 (midway) and reverse=true', { + lte: '49.5', + reverse: true + }, data.slice(50)) + + rangeTest('lt=49.5 (midway) and reverse=true', { + lt: '49.5', + reverse: true + }, data.slice(50)) + + rangeTest('lt=50 and reverse=true', { + lt: '50', + reverse: true + }, data.slice(50)) + + rangeTest('lte=50', { + lte: '50' + }, data.slice(51)) + + rangeTest('lte=50.5 (midway)', { + lte: '50.5' + }, data.slice(51)) + + rangeTest('lte=50555 (midway)', { + lte: '50555' + }, data.slice(51)) + + rangeTest('lt=50555 (midway)', { + lt: '50555' + }, data.slice(51)) + + rangeTest('gte=50.5 (midway) and reverse=true', { + gte: '50.5', + reverse: true + }, data.slice(0, 51)) + + rangeTest('gt=50.5 (midway) and reverse=true', { + gt: '50.5', + reverse: true + }, data.slice(0, 51)) + + rangeTest('gt=50 and reverse=true', { + gt: '50', + reverse: true + }, data.slice(0, 51)) + + // First key is actually '00' so it should avoid it + rangeTest('lte=0', { + lte: '0' + }, data) + + // First key is actually '00' so it should avoid it + rangeTest('lt=0', { + lt: '0' + }, data) + + rangeTest('gte=30 and lte=70', { + gte: '30', + lte: '70' + }, exclude(data, 30, 70)) + + // The gte and lte options should take precedence over gt and lt respectively. + rangeTest('gte=30 and lte=70 and gt=40 and lt=60', { + gte: '30', + lte: '70', + gt: '40', + lt: '60' + }, exclude(data, 30, 70)) + + // Also test the other way around: if gt and lt were to select a bigger range. + rangeTest('gte=30 and lte=70 and gt=20 and lt=80', { + gte: '30', + lte: '70', + gt: '20', + lt: '80' + }, exclude(data, 30, 70)) + + rangeTest('gt=29 and lt=71', { + gt: '29', + lt: '71' + }, exclude(data, 30, 70)) + + rangeTest('gte=30 and lte=70 and reverse=true', { + lte: '70', + gte: '30', + reverse: true + }, exclude(data, 30, 70)) + + rangeTest('gt=29 and lt=71 and reverse=true', { + lt: '71', + gt: '29', + reverse: true + }, exclude(data, 30, 70)) + + rangeTest('limit=20', { + limit: 20 + }, data.slice(20)) + + rangeTest('limit=20 and gte=20', { + limit: 20, + gte: '20' + }, exclude(data, 20, 39, 20)) + + rangeTest('limit=20 and reverse=true', { + limit: 20, + reverse: true + }, data.slice(0, -20)) + + rangeTest('limit=20 and lte=79 and reverse=true', { + limit: 20, + lte: '79', + reverse: true + }, exclude(data, 60, 79, 20)) + + rangeTest('limit=-1 should clear whole database', { + limit: -1 + }, []) + + rangeTest('limit=0 should not clear anything', { + limit: 0 + }, data) + + rangeTest('lte after limit', { + limit: 20, + lte: '50' + }, data.slice(20)) + + rangeTest('lte before limit', { + limit: 50, + lte: '19' + }, data.slice(20)) + + rangeTest('gte after database end', { + gte: '9a' + }, data) + + rangeTest('gt after database end', { + gt: '9a' + }, data) + + rangeTest('lte after database end and reverse=true', { + lte: '9a', + reverse: true + }, []) + + rangeTest('lte and gte after database and reverse=true', { + lte: '9b', + gte: '9a', + reverse: true + }, data) + + rangeTest('lt and gt after database and reverse=true', { + lt: '9b', + gt: '9a', + reverse: true + }, data) + + rangeTest('gt greater than lt', { + gt: '20', + lt: '10' + }, data) + + rangeTest('gte greater than lte', { + gte: '20', + lte: '10' + }, data) +} + +exports.all = function (test, testCommon) { + exports.range(test, testCommon) +} diff --git a/test/clear-test.js b/test/clear-test.js new file mode 100644 index 0000000..799e41d --- /dev/null +++ b/test/clear-test.js @@ -0,0 +1,110 @@ +'use strict' + +const isBuffer = require('is-buffer') +const { Buffer } = require('buffer') + +exports.clear = function (test, testCommon) { + makeTest('string', ['a', 'b']) + + if (testCommon.supports.encodings.buffer) { + makeTest('buffer', [Buffer.from('a'), Buffer.from('b')]) + makeTest('mixed', [Buffer.from('a'), 'b']) + + // These keys would be equal when compared as utf8 strings + makeTest('non-utf8 buffer', [Buffer.from('80', 'hex'), Buffer.from('c0', 'hex')]) + } + + function makeTest (type, keys) { + test('simple clear() on ' + type + ' keys', async function (t) { + const db = testCommon.factory() + const ops = keys.map(function (key) { + return { + type: 'put', + key, + value: 'foo', + keyEncoding: isBuffer(key) ? 'buffer' : 'utf8' + } + }) + + await db.open() + await db.batch(ops) + t.is((await db.iterator().all()).length, keys.length, 'has entries') + + await db.clear() + t.is((await db.iterator().all()).length, 0, 'has no entries') + + return db.close() + }) + } + + // NOTE: adapted from levelup + for (const deferred of [false, true]) { + for (const [gte, keyEncoding] of [['"b"', 'utf8'], ['b', 'json']]) { + test(`clear() with ${keyEncoding} encoding (deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + + await db.open() + await db.batch([ + { type: 'put', key: '"a"', value: 'a' }, + { type: 'put', key: '"b"', value: 'b' } + ]) + + let promise + + if (deferred) { + await db.close() + t.is(db.status, 'closed') + promise = db.open() + t.is(db.status, 'opening') + } + + await db.clear({ gte, keyEncoding }) + await promise + + const keys = await db.keys().all() + t.same(keys, ['"a"'], 'got expected keys') + + return db.close() + }) + } + } +} + +exports.events = function (test, testCommon) { + test('test clear() with options emits clear event', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + t.ok(db.supports.events.clear) + + db.on('clear', function (options) { + t.same(options, { gt: 567, custom: 123 }) + }) + + await db.clear({ gt: 567, custom: 123 }) + return db.close() + }) + + test('test clear() without options emits clear event', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + t.ok(db.supports.events.clear) + + db.on('clear', function (options) { + t.same(options, {}) + }) + + await db.clear() + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.events(test, testCommon) + exports.clear(test, testCommon) +} diff --git a/test/common.js b/test/common.js new file mode 100644 index 0000000..fe56008 --- /dev/null +++ b/test/common.js @@ -0,0 +1,90 @@ +'use strict' + +const kNone = Symbol('none') +const kProtected = Symbol('protected') + +function testCommon (options) { + const factory = options.factory + const test = options.test + + if (typeof factory !== 'function') { + throw new TypeError('factory must be a function') + } + + if (typeof test !== 'function') { + throw new TypeError('test must be a function') + } + + if (options.legacyRange != null) { + throw new Error('The legacyRange option has been removed') + } + + let supports = kNone + + return protect(options, { + test, + factory, + internals: options.internals || {}, + + // Expose manifest through testCommon to more easily skip tests based on + // supported features. Use a getter to only create a db once. Implicitly + // we also test that the manifest doesn't change after the db constructor. + get supports () { + if (supports === kNone) this.supports = this.factory().supports + return supports + }, + + // Prefer assigning early via manifest-test unless test.only() is used + // in which case we create the manifest on-demand. Copy it to be safe. + set supports (value) { + if (supports === kNone) supports = JSON.parse(JSON.stringify(value)) + } + }) +} + +module.exports = testCommon + +// To help migrating from abstract-leveldown. +// Throw if test suite options are used instead of db.supports +function protect (options, testCommon) { + const legacyOptions = [ + ['createIfMissing', true], + ['errorIfExists', true], + ['snapshots', true], + ['seek', true], + ['encodings', true], + ['deferredOpen', true], + ['streams', true], + ['clear', true], + ['getMany', true], + ['bufferKeys', false], + ['serialize', false], + ['idempotentOpen', false], + ['passiveOpen', false], + ['openCallback', false] + ] + + Object.defineProperty(testCommon, kProtected, { + value: true + }) + + for (const [k, exists] of legacyOptions) { + const msg = exists ? 'has moved to db.supports' : 'has been removed' + + // Options may be a testCommon instance + if (!options[kProtected] && k in options) { + throw new Error(`The test suite option '${k}' ${msg}`) + } + + Object.defineProperty(testCommon, k, { + get () { + throw new Error(`The test suite option '${k}' ${msg}`) + }, + set () { + throw new Error(`The test suite option '${k}' ${msg}`) + } + }) + } + + return testCommon +} diff --git a/test/deferred-open-test.js b/test/deferred-open-test.js new file mode 100644 index 0000000..4786bba --- /dev/null +++ b/test/deferred-open-test.js @@ -0,0 +1,209 @@ +'use strict' + +const { DeferredIterator } = require('../lib/deferred-iterator') + +exports.all = function (test, testCommon) { + async function verifyValues (t, db, entries) { + const promises = [] + + for (let i = 1; i <= entries; i++) { + promises.push(db.get('k' + i).then((v) => { + t.is(v, 'v' + i, 'value is ok') + t.is(db.status, 'open', 'status is ok') + })) + } + + await Promise.all(promises) + t.is(await db.get('k' + (entries + 1)), undefined, 'not found') + } + + // NOTE: copied from levelup + test('deferred open(): batch() on new database', async function (t) { + // Create database, opens in next tick + const db = testCommon.factory() + const entries = 3 + const ops = [] + + // Add entries with batch([]), these should be deferred until the database is actually open + for (let i = 1; i <= entries; i++) { + ops.push({ type: 'put', key: 'k' + i, value: 'v' + i }) + } + + t.is(db.status, 'opening') + + await db.batch(ops) + await verifyValues(t, db, entries) + + return db.close() + }) + + // NOTE: copied from levelup + test('deferred open(): value of deferred operation is not stringified', async function (t) { + const db = testCommon.factory({ valueEncoding: 'json' }) + + t.is(db.status, 'opening') + await db.put('key', { thing: 2 }) + + t.is(db.status, 'open') + t.same(await db.get('key'), { thing: 2 }) + + return db.close() + }) + + // NOTE: copied from levelup + test('deferred open(): key of deferred operation is not stringified', async function (t) { + const db = testCommon.factory({ keyEncoding: 'json' }) + + t.is(db.status, 'opening') + await db.put({ thing: 2 }, 'value') + + t.is(db.status, 'open') + t.same(await db.keys().all(), [{ thing: 2 }]) + + return db.close() + }) + + // NOTE: copied from deferred-leveldown + // TODO: move to iterator tests, if not already covered there + test('cannot operate on closed db', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + await db.open() + await db.close() + + try { + db.iterator() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + try { + db.keys() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + try { + db.values() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + }) + + // NOTE: copied from deferred-leveldown + // TODO: move to iterator tests, if not already covered there + test('cannot operate on closing db', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + await db.open() + const promise = db.close() + + try { + db.iterator() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + try { + db.keys() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + try { + db.values() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + + return promise + }) + + // NOTE: copied from deferred-leveldown + // TODO: move to iterator tests, if not already covered there + test('deferred iterator - cannot operate on closed db', async function (t) { + t.plan(4) + + const db = testCommon.factory() + const it = db.iterator({ gt: 'foo' }) + + await db.open() + await db.close() + + t.ok(it instanceof DeferredIterator) + + const promises = [ + it.next().catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + it.nextv(10).catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + it.all().catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + // Was already closed + it.close().catch(function () { + t.fail('no close() error') + }) + ] + + try { + it.seek('foo') + } catch (err) { + // Should *not* throw + t.fail(err) + } + + return Promise.all(promises) + }) + + // NOTE: copied from deferred-leveldown + // TODO: move to iterator tests, if not already covered there + test('deferred iterator - cannot operate on closing db', async function (t) { + t.plan(4) + + const db = testCommon.factory() + const it = db.iterator({ gt: 'foo' }) + + t.ok(it instanceof DeferredIterator) + + await db.open() + const promises = [ + db.close(), + + it.next().catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + it.nextv(10).catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + it.all().catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }), + + // Is already closing + it.close().catch(function () { + t.fail('no close() error') + }) + ] + + try { + it.seek('foo') + } catch (err) { + // Should *not* throw + t.fail(err) + } + + return Promise.all(promises) + }) +} diff --git a/test/del-test.js b/test/del-test.js new file mode 100644 index 0000000..b5a0a84 --- /dev/null +++ b/test/del-test.js @@ -0,0 +1,65 @@ +'use strict' + +const { illegalKeys, assertPromise } = require('./util') +const traits = require('./traits') + +let db + +exports.setUp = function (test, testCommon) { + test('del() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('del() with illegal keys', function (t) { + t.plan(illegalKeys.length * 2) + + for (const { name, key } of illegalKeys) { + db.del(key).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + } + }) +} + +exports.del = function (test, testCommon) { + test('simple del()', async function (t) { + await db.put('foo', 'bar') + t.is(await db.get('foo'), 'bar') + t.is(await assertPromise(db.del('foo')), undefined, 'void promise') + t.is(await db.get('foo'), undefined, 'not found') + }) + + test('del() on non-existent key', async function (t) { + for (const key of ['nope', Math.random()]) { + t.is(await assertPromise(db.del(key)), undefined, 'void promise') + } + }) + + traits.open('del()', testCommon, async function (t, db) { + let emitted = false + db.once('write', () => { emitted = true }) + t.is(await db.del('foo'), undefined, 'void promise') + t.ok(emitted) // Not sure what the purpose of this test is + }) + + traits.closed('del()', testCommon, async function (t, db) { + return db.del('foo') + }) +} + +exports.tearDown = function (test, testCommon) { + test('del() teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.del(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/encoding-buffer-test.js b/test/encoding-buffer-test.js new file mode 100644 index 0000000..0ce4b06 --- /dev/null +++ b/test/encoding-buffer-test.js @@ -0,0 +1,240 @@ +'use strict' + +const { Buffer } = require('buffer') +const textEncoder = new TextEncoder() + +exports.all = function (test, testCommon) { + if (!testCommon.supports.encodings.buffer) return + + // NOTE: adapted from levelup + test('put() and get() with buffer value and buffer valueEncoding', async function (t) { + const db = testCommon.factory() + await db.open() + await db.put('test', testBuffer(), { valueEncoding: 'buffer' }) + t.same(await db.get('test', { valueEncoding: 'buffer' }), testBuffer()) + return db.close() + }) + + // NOTE: adapted from levelup + test('put() and get() with buffer value and buffer valueEncoding in factory', async function (t) { + const db = testCommon.factory({ valueEncoding: 'buffer' }) + await db.open() + await db.put('test', testBuffer()) + t.same(await db.get('test'), testBuffer()) + return db.close() + }) + + // NOTE: adapted from levelup + test('put() and get() with buffer key and buffer keyEncoding', async function (t) { + const db = testCommon.factory() + await db.open() + await db.put(testBuffer(), 'test', { keyEncoding: 'buffer' }) + t.same(await db.get(testBuffer(), { keyEncoding: 'buffer' }), 'test') + return db.close() + }) + + // NOTE: adapted from levelup + test('put() and get() with buffer key and utf8 keyEncoding', async function (t) { + const db = testCommon.factory() + await db.open() + await db.put(Buffer.from('foo🐄'), 'test', { keyEncoding: 'utf8' }) + t.same(await db.get(Buffer.from('foo🐄'), { keyEncoding: 'utf8' }), 'test') + return db.close() + }) + + // NOTE: adapted from levelup + test('put() and get() with string value and buffer valueEncoding', async function (t) { + const db = testCommon.factory() + await db.open() + await db.put('test', 'foo🐄', { valueEncoding: 'buffer' }) + t.same(await db.get('test', { valueEncoding: 'buffer' }), Buffer.from('foo🐄')) + t.same(await db.get('test', { valueEncoding: 'utf8' }), 'foo🐄') + return db.close() + }) + + // NOTE: adapted from memdown + test('put() as string, get() as buffer and vice versa', async function (t) { + const db = testCommon.factory() + await db.open() + const enc = { keyEncoding: 'buffer', valueEncoding: 'buffer' } + const [a, b] = ['🐄', '🐄 says moo'] + + const promise1 = db.put(a, a).then(async () => { + const value = await db.get(Buffer.from(a), enc) + t.same(value, Buffer.from(a), 'got buffer value') + }) + + const promise2 = db.put(Buffer.from(b), Buffer.from(b), enc).then(async () => { + const value = await db.get(b) + t.same(value, b, 'got string value') + }) + + await Promise.all([promise1, promise2]) + return db.close() + }) + + // NOTE: adapted from memdown + test('put() stringifies input to buffer', async function (t) { + const db = testCommon.factory() + await db.open() + await db.put(1, 2) + + const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' }) + const entries = await it.all() + + t.same(entries[0][0], Buffer.from('1'), 'key was stringified') + t.same(entries[0][1], Buffer.from('2'), 'value was stringified') + + return db.close() + }) + + // NOTE: adapted from memdown + test('put() as string, iterate as buffer', async function (t) { + const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + await db.open() + await db.put('🐄', '🐄') + + const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' }) + const entries = await it.all() + + t.same(entries, [[Buffer.from('🐄'), Buffer.from('🐄')]]) + return db.close() + }) + + // NOTE: adapted from memdown + test('put() as buffer, iterate as string', async function (t) { + const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' }) + await db.open() + await db.put(Buffer.from('🐄'), Buffer.from('🐄')) + + const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const entries = await it.all() + + t.same(entries, [['🐄', '🐄']]) + return db.close() + }) + + test('put() as view, iterate as view', async function (t) { + const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' }) + const cow = textEncoder.encode('🐄') + await db.open() + await db.put(cow, cow) + + const it = db.iterator() + const entries = await it.all() + const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array + const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow + + t.same(entries, [[key, value]]) + return db.close() + }) + + test('put() as string, iterate as view', async function (t) { + const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const cow = textEncoder.encode('🐄') + await db.open() + await db.put('🐄', '🐄') + + const it = db.iterator({ keyEncoding: 'view', valueEncoding: 'view' }) + const entries = await it.all() + const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array + const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow + + t.same(entries, [[key, value]]) + return db.close() + }) + + test('put() as view, iterate as string', async function (t) { + const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' }) + const cow = textEncoder.encode('🐄') + await db.open() + await db.put(cow, cow) + + const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + const entries = await it.all() + + t.same(entries, [['🐄', '🐄']]) + return db.close() + }) + + // NOTE: adapted from levelup + test('batch() with multiple puts with buffer valueEncoding per batch', async function (t) { + const db = testCommon.factory() + await db.open() + await db.batch([ + { type: 'put', key: 'foo', value: testBuffer() }, + { type: 'put', key: 'bar', value: testBuffer() }, + { type: 'put', key: 'baz', value: 'abazvalue' } + ], { valueEncoding: 'buffer' }) + + t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer()) + t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer()) + t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue')) + + return db.close() + }) + + test('batch() with multiple puts with buffer valueEncoding per operation', async function (t) { + const db = testCommon.factory() + await db.open() + await db.batch([ + { type: 'put', key: 'foo', value: testBuffer(), valueEncoding: 'buffer' }, + { type: 'put', key: 'bar', value: testBuffer(), valueEncoding: 'buffer' }, + { type: 'put', key: 'baz', value: 'abazvalue', valueEncoding: 'buffer' } + ]) + + t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer()) + t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer()) + t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue')) + + return db.close() + }) + + // NOTE: adapted from encoding-down + test('batch() with buffer encoding in factory', async function (t) { + const operations = [{ + type: 'put', + key: Buffer.from([1, 2, 3]), + value: Buffer.from([4, 5, 6]) + }, { + type: 'put', + key: Buffer.from([7, 8, 9]), + value: Buffer.from([10, 11, 12]) + }] + + const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' }) + await db.open() + await db.batch(operations) + + t.same(await db.get(operations[0].key), operations[0].value) + t.same(await db.get(operations[1].key), operations[1].value) + + return db.close() + }) + + for (const keyEncoding of ['buffer', 'view']) { + // NOTE: adapted from memdown + test(`storage is byte-aware (${keyEncoding} encoding)`, async function (t) { + const db = testCommon.factory({ keyEncoding }) + await db.open() + + const one = Buffer.from('80', 'hex') + const two = Buffer.from('c0', 'hex') + + t.ok(two.toString() === one.toString(), 'would be equal when not byte-aware') + t.ok(two.compare(one) > 0, 'but greater when byte-aware') + + await db.put(one, 'one') + t.is(await db.get(one), 'one', 'value one ok') + + await db.put(two, 'two') + t.is(await db.get(one), 'one', 'value one did not change') + + return db.close() + }) + } +} + +function testBuffer () { + return Buffer.from('0080c0ff', 'hex') +} diff --git a/test/encoding-custom-test.js b/test/encoding-custom-test.js new file mode 100644 index 0000000..fbeec2e --- /dev/null +++ b/test/encoding-custom-test.js @@ -0,0 +1,86 @@ +'use strict' + +// NOTE: copied from levelup +exports.all = function (test, testCommon) { + for (const deferred of [false, true]) { + test(`custom encoding: simple-object values (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [ + { key: '0', value: 0 }, + { key: '1', value: 1 }, + { key: 'string', value: 'a string' }, + { key: 'true', value: true }, + { key: 'false', value: false } + ]) + }) + + test(`custom encoding: simple-object keys (deferred: ${deferred})`, async function (t) { + // Test keys that would be considered the same with default utf8 encoding. + // Because String([1]) === String(1). + return run(t, deferred, [ + { value: '0', key: [1] }, + { value: '1', key: 1 }, + { value: 'string', key: 'a string' }, + { value: 'true', key: true }, + { value: 'false', key: false } + ]) + }) + + test(`custom encoding: complex-object values (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [{ + key: '0', + value: { + foo: 'bar', + bar: [1, 2, 3], + bang: { yes: true, no: false } + } + }]) + }) + + test(`custom encoding: complex-object keys (deferred: ${deferred})`, async function (t) { + // Test keys that would be considered the same with default utf8 encoding. + // Because String({}) === String({}) === '[object Object]'. + return run(t, deferred, [{ + value: '0', + key: { + foo: 'bar', + bar: [1, 2, 3], + bang: { yes: true, no: false } + } + }, { + value: '1', + key: { + foo: 'different', + bar: [1, 2, 3], + bang: { yes: true, no: false } + } + }]) + }) + } + + async function run (t, deferred, entries) { + const customEncoding = { + encode: JSON.stringify, + decode: JSON.parse, + format: 'utf8', + type: 'custom' + } + + const db = testCommon.factory({ + keyEncoding: customEncoding, + valueEncoding: customEncoding + }) + + const operations = entries.map(entry => ({ type: 'put', ...entry })) + + if (!deferred) await db.open() + + await db.batch(operations) + await Promise.all(entries.map(testGet)) + + async function testGet (entry) { + t.same(await db.get(entry.key), entry.value) + } + + return db.close() + } +} diff --git a/test/encoding-decode-error-test.js b/test/encoding-decode-error-test.js new file mode 100644 index 0000000..aee8157 --- /dev/null +++ b/test/encoding-decode-error-test.js @@ -0,0 +1,67 @@ +'use strict' + +let db +let keySequence = 0 + +const testKey = () => 'test' + (++keySequence) + +exports.all = function (test, testCommon) { + test('decode error setup', async function (t) { + db = testCommon.factory() + return db.open() + }) + + // NOTE: adapted from encoding-down + test('decode error is wrapped by get() and getMany()', async function (t) { + t.plan(4) + + const key = testKey() + const valueEncoding = { + encode: (v) => v, + decode: (v) => { throw new Error('decode error xyz') }, + format: 'utf8' + } + + await db.put(key, 'bar', { valueEncoding }) + + try { + await db.get(key, { valueEncoding }) + } catch (err) { + t.is(err.code, 'LEVEL_DECODE_ERROR') + t.is(err.cause.message, 'decode error xyz') + } + + try { + await db.getMany(['other-key', key], { valueEncoding }) + } catch (err) { + t.is(err.code, 'LEVEL_DECODE_ERROR') + t.is(err.cause.message, 'decode error xyz') + } + }) + + // NOTE: adapted from encoding-down + test('get() and getMany() yield decode error if stored value is invalid', async function (t) { + t.plan(4) + + const key = testKey() + await db.put(key, 'this {} is [] not : json', { valueEncoding: 'utf8' }) + + try { + await db.get(key, { valueEncoding: 'json' }) + } catch (err) { + t.is(err.code, 'LEVEL_DECODE_ERROR') + t.is(err.cause.name, 'SyntaxError') // From JSON.parse() + } + + try { + await db.getMany(['other-key', key], { valueEncoding: 'json' }) + } catch (err) { + t.is(err.code, 'LEVEL_DECODE_ERROR') + t.is(err.cause.name, 'SyntaxError') // From JSON.parse() + } + }) + + test('decode error teardown', async function (t) { + return db.close() + }) +} diff --git a/test/encoding-json-test.js b/test/encoding-json-test.js new file mode 100644 index 0000000..f68604a --- /dev/null +++ b/test/encoding-json-test.js @@ -0,0 +1,69 @@ +'use strict' + +// NOTE: copied from levelup +exports.all = function (test, testCommon) { + for (const deferred of [false, true]) { + test(`json encoding: simple-object values (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [ + { key: '0', value: 0 }, + { key: '1', value: 1 }, + { key: '2', value: 'a string' }, + { key: '3', value: true }, + { key: '4', value: false } + ]) + }) + + test(`json encoding: simple-object keys (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [ + { value: 'string', key: 'a string' }, + { value: '0', key: 0 }, + { value: '1', key: 1 }, + { value: 'false', key: false }, + { value: 'true', key: true } + ]) + }) + + test(`json encoding: complex-object values (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [{ + key: '0', + value: { + foo: 'bar', + bar: [1, 2, 3], + bang: { yes: true, no: false } + } + }]) + }) + + test(`json encoding: complex-object keys (deferred: ${deferred})`, async function (t) { + return run(t, deferred, [{ + value: '0', + key: { + foo: 'bar', + bar: [1, 2, 3], + bang: { yes: true, no: false } + } + }]) + }) + } + + async function run (t, deferred, entries) { + const db = testCommon.factory({ keyEncoding: 'json', valueEncoding: 'json' }) + const operations = entries.map(entry => ({ type: 'put', ...entry })) + + if (!deferred) await db.open() + + await db.batch(operations) + await Promise.all([...entries.map(testGet), testIterator()]) + + return db.close() + + async function testGet (entry) { + t.same(await db.get(entry.key), entry.value) + } + + async function testIterator () { + const result = await db.iterator().all() + t.same(result, entries.map(kv => [kv.key, kv.value])) + } + } +} diff --git a/test/encoding-test.js b/test/encoding-test.js new file mode 100644 index 0000000..d9ec58e --- /dev/null +++ b/test/encoding-test.js @@ -0,0 +1,119 @@ +'use strict' + +let db +let keySequence = 0 + +const testKey = () => 'test' + (++keySequence) + +// TODO: test encoding options on every method. This is largely +// covered (indirectly) by other tests, but a dedicated property- +// based test for each would be good to have. +exports.all = function (test, testCommon) { + test('encoding setup', async function (t) { + db = testCommon.factory() + return db.open() + }) + + // NOTE: adapted from encoding-down + test('encodings default to utf8', function (t) { + t.is(db.keyEncoding().commonName, 'utf8') + t.is(db.valueEncoding().commonName, 'utf8') + t.end() + }) + + test('can set encoding options in factory', async function (t) { + const dbs = [] + + for (const name of ['buffer', 'view', 'json']) { + if (!testCommon.supports.encodings[name]) continue + + const db1 = testCommon.factory({ keyEncoding: name }) + const db2 = testCommon.factory({ valueEncoding: name }) + const db3 = testCommon.factory({ keyEncoding: name, valueEncoding: name }) + + t.is(db1.keyEncoding().commonName, name) + t.is(db1.keyEncoding(), db1.keyEncoding(name)) + t.is(db1.valueEncoding().commonName, 'utf8') + t.is(db1.valueEncoding(), db1.valueEncoding('utf8')) + + t.is(db2.keyEncoding().commonName, 'utf8') + t.is(db2.keyEncoding(), db2.keyEncoding('utf8')) + t.is(db2.valueEncoding().commonName, name) + t.is(db2.valueEncoding(), db2.valueEncoding(name)) + + t.is(db3.keyEncoding().commonName, name) + t.is(db3.keyEncoding(), db3.keyEncoding(name)) + t.is(db3.valueEncoding().commonName, name) + t.is(db3.valueEncoding(), db3.valueEncoding(name)) + + dbs.push(db1, db2, db3) + } + + await Promise.all(dbs.map(db => db.close())) + }) + + // NOTE: adapted from encoding-down + for (const deferred of [false, true]) { + test(`default utf8 encoding stringifies numbers (deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + if (!deferred) await db.open() + await db.put(1, 2) + t.is(await db.get(1), '2') + return db.close() + }) + } + + // NOTE: adapted from encoding-down + test('can decode from string to json', async function (t) { + const key = testKey() + const data = { thisis: 'json' } + await db.put(key, JSON.stringify(data), { valueEncoding: 'utf8' }) + t.same(await db.get(key, { valueEncoding: 'json' }), data, 'got parsed object') + }) + + // NOTE: adapted from encoding-down + test('can decode from json to string', async function (t) { + const data = { thisis: 'json' } + const key = testKey() + await db.put(key, data, { valueEncoding: 'json' }) + t.same(await db.get(key, { valueEncoding: 'utf8' }), JSON.stringify(data), 'got unparsed JSON string') + }) + + // NOTE: adapted from encoding-down + test('getMany() skips decoding not-found values', async function (t) { + t.plan(2) + + const valueEncoding = { + encode: JSON.stringify, + decode (value) { + t.is(value, JSON.stringify(data)) + return JSON.parse(value) + }, + format: 'utf8' + } + + const data = { beep: 'boop' } + const key = testKey() + + await db.put(key, data, { valueEncoding }) + t.same(await db.getMany([key, testKey()], { valueEncoding }), [data, undefined]) + }) + + // NOTE: adapted from memdown + test('number keys with utf8 encoding', async function (t) { + const db = testCommon.factory() + const numbers = [-Infinity, 0, 12, 2, +Infinity] + + await db.open() + await db.batch(numbers.map(key => ({ type: 'put', key, value: 'value' }))) + + const keys = await db.keys({ keyEncoding: 'utf8' }).all() + t.same(keys, numbers.map(String), 'sorts lexicographically') + + return db.close() + }) + + test('encoding teardown', async function (t) { + return db.close() + }) +} diff --git a/test/events/write.js b/test/events/write.js new file mode 100644 index 0000000..96ad40b --- /dev/null +++ b/test/events/write.js @@ -0,0 +1,132 @@ +'use strict' + +module.exports = function (test, testCommon) { + for (const deferred of [false, true]) { + // Chained batch does not support deferred open + const batchMethods = deferred ? ['batch'] : ['batch', 'chained batch'] + const allMethods = batchMethods.concat(['singular']) + + for (const method of allMethods) { + // db.put() and db.del() do not support the sublevel option + for (const withSublevel of (method === 'singular' ? [false] : [false, true])) { + test(`db emits write event for ${method} put operation (deferred: ${deferred}, sublevel: ${withSublevel})`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + const sublevel = withSublevel ? db.sublevel('abc') : null + + if (!deferred) { + await db.open() + if (withSublevel) await sublevel.open() + } + + // Note: may return a transcoder encoding, which unfortunately makes the below + // assertions a little less precise (i.e. we can't compare output data). But + // in places where we expect encoded data, we can use strings (rather than + // numbers) as the input to encode(), which'll tell us that encoding did happen. + const dbEncoding = db.keyEncoding('utf8') + const subEncoding = withSublevel ? sublevel.keyEncoding('utf8') : null + + db.on('write', function (ops) { + t.same(ops, [ + { + type: 'put', + key: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : 456, + value: withSublevel ? subEncoding.encode('99') : 99, + keyEncoding: db.keyEncoding(withSublevel ? subEncoding.format : 'utf8'), + valueEncoding: db.valueEncoding(withSublevel ? subEncoding.format : 'utf8'), + encodedKey: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : dbEncoding.encode('456'), + encodedValue: (withSublevel ? subEncoding : dbEncoding).encode('99'), + custom: 123, + sublevel: null // Should be unset + } + ], 'got write event') + }) + + switch (method) { + case 'batch': + await db.batch([{ type: 'put', key: 456, value: 99, custom: 123, sublevel }]) + break + case 'chained batch': + await db.batch().put(456, 99, { custom: 123, sublevel }).write() + break + case 'singular': + // Does not support sublevel option + await db.put(456, 99, { custom: 123, sublevel }) + break + } + + return db.close() + }) + + test(`db emits write event for ${method} del operation (deferred: ${deferred}, sublevel: ${withSublevel})`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + const sublevel = withSublevel ? db.sublevel('abc') : null + + if (!deferred) { + await db.open() + if (withSublevel) await sublevel.open() + } + + // See notes above, in the put test + const dbEncoding = db.keyEncoding('utf8') + const subEncoding = withSublevel ? sublevel.keyEncoding('utf8') : null + + db.on('write', function (ops) { + t.same(ops, [ + { + type: 'del', + key: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : 456, + keyEncoding: db.keyEncoding(withSublevel ? subEncoding.format : 'utf8'), + encodedKey: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : dbEncoding.encode('456'), + custom: 123, + sublevel: null // Should be unset + } + ], 'got write event') + }) + + switch (method) { + case 'batch': + await db.batch([{ type: 'del', key: 456, custom: 123, sublevel }]) + break + case 'chained batch': + await db.batch().del(456, { custom: 123, sublevel }).write() + break + case 'singular': + // Does not support sublevel option + await db.del(456, { custom: 123, sublevel }) + break + } + + return db.close() + }) + } + } + + for (const method of batchMethods) { + test(`db emits write event for multiple ${method} operations (deferred: ${deferred})`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + if (!deferred) await db.open() + + db.on('write', function (ops) { + t.same(ops.map(op => op.key), ['a', 'b'], 'got multiple operations in one event') + }) + + switch (method) { + case 'batch': + await db.batch([{ type: 'put', key: 'a', value: 'foo' }, { type: 'del', key: 'b' }]) + break + case 'chained batch': + await db.batch().put('a', 'foo').del('b').write() + break + } + + return db.close() + }) + } + } +} diff --git a/test/factory-test.js b/test/factory-test.js new file mode 100644 index 0000000..dfb0a38 --- /dev/null +++ b/test/factory-test.js @@ -0,0 +1,36 @@ +'use strict' + +module.exports = function (test, testCommon) { + test('testCommon.factory() returns valid database', function (t) { + t.plan(6) + + const db = testCommon.factory() + const kEvent = Symbol('event') + + // Avoid instanceof, for levelup compatibility tests + t.is(typeof db, 'object', 'is an object') + t.isNot(db, null, 'is not null') + t.is(typeof db.open, 'function', 'has open() method') + t.is(typeof db.on, 'function', 'has on() method') + t.is(typeof db.emit, 'function', 'has emit() method') + + db.once(kEvent, (v) => t.is(v, 'foo', 'got event')) + db.emit(kEvent, 'foo') + }) + + test('testCommon.factory() returns a unique database', async function (t) { + const db1 = testCommon.factory() + const db2 = testCommon.factory() + + t.isNot(db1, db2, 'unique instances') + + await db1.open() + await db2.open() + await db1.put('key', 'value') + + const value = await db2.get('key') + t.is(value, undefined, 'db2 should be empty') + + return Promise.all([db1.close(), db2.close()]) + }) +} diff --git a/test/get-many-test.js b/test/get-many-test.js new file mode 100644 index 0000000..48a5957 --- /dev/null +++ b/test/get-many-test.js @@ -0,0 +1,157 @@ +'use strict' + +const { illegalKeys, assertPromise } = require('./util') +const traits = require('./traits') + +let db + +/** + * @param {import('tape')} test + */ +exports.setUp = function (test, testCommon) { + test('getMany() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +/** + * @param {import('tape')} test + */ +exports.args = function (test, testCommon) { + test('getMany() requires an array argument', function (t) { + t.plan(6) + + db.getMany().catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + + db.getMany('foo').catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + + db.getMany('foo', {}).catch(function (err) { + t.is(err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + }) + + test('getMany() with illegal keys', function (t) { + t.plan(illegalKeys.length * 4) + + for (const { name, key } of illegalKeys) { + db.getMany([key]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + + db.getMany(['valid', key]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error (second key)') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (second key)') + }) + } + }) +} + +/** + * @param {import('tape')} test + */ +exports.getMany = function (test, testCommon) { + test('simple getMany()', async function (t) { + await db.put('foo', 'bar') + + t.same(await assertPromise(db.getMany(['foo'])), ['bar']) + t.same(await db.getMany(['foo'], {}), ['bar']) // same but with {} + t.same(await db.getMany(['foo'], { valueEncoding: 'utf8' }), ['bar']) + }) + + test('getMany() with multiple keys', async function (t) { + await db.put('beep', 'boop') + + t.same(await db.getMany(['foo', 'beep']), ['bar', 'boop']) + t.same(await db.getMany(['beep', 'foo']), ['boop', 'bar'], 'maintains order of input keys') + }) + + test('empty getMany()', async function (t) { + t.same(await db.getMany([]), []) + + const encodings = Object.keys(db.supports.encodings) + .filter(k => db.supports.encodings[k]) + + for (const valueEncoding of encodings) { + t.same(await db.getMany([], { valueEncoding }), []) + } + }) + + test('getMany() on non-existent keys', async function (t) { + t.same(await db.getMany(['nope', 'another']), [undefined, undefined]) + t.same(await db.getMany(['beep', 'another']), ['boop', undefined]) + t.same(await db.getMany(['nope', 'beep', Math.random()]), [undefined, 'boop', undefined]) + + const encodings = Object.keys(db.supports.encodings) + .filter(k => db.supports.encodings[k]) + + for (const valueEncoding of encodings) { + t.same(await db.getMany(['nope', 'another'], { valueEncoding }), [undefined, undefined]) + } + }) + + test('simultaneous getMany()', async function (t) { + t.plan(20) + + await db.put('hello', 'world') + const promises = [] + + for (let i = 0; i < 10; ++i) { + promises.push(db.getMany(['hello']).then(function (values) { + t.same(values, ['world']) + })) + } + + for (let i = 0; i < 10; ++i) { + promises.push(db.getMany(['non-existent']).then(function (values) { + t.same(values, [undefined]) + })) + } + + return Promise.all(promises) + }) + + traits.open('getMany()', testCommon, async function (t, db) { + t.same(await assertPromise(db.getMany(['foo'])), [undefined]) + }) + + traits.closed('getMany()', testCommon, async function (t, db) { + return db.getMany(['foo']) + }) + + // Also test empty array because it has a fast-path + traits.open('getMany() with empty array', testCommon, async function (t, db) { + t.same(await assertPromise(db.getMany([])), []) + }) + + traits.closed('getMany() with empty array', testCommon, async function (t, db) { + return db.getMany([]) + }) +} + +/** + * @param {import('tape')} test + */ +exports.tearDown = function (test, testCommon) { + test('getMany() teardown', async function (t) { + return db.close() + }) +} + +/** + * @param {import('tape')} test + */ +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.getMany(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/get-test.js b/test/get-test.js new file mode 100644 index 0000000..87705a7 --- /dev/null +++ b/test/get-test.js @@ -0,0 +1,83 @@ +'use strict' + +const { illegalKeys, assertPromise } = require('./util') +const traits = require('./traits') + +let db + +exports.setUp = function (test, testCommon) { + test('get() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('get() with illegal keys', function (t) { + t.plan(illegalKeys.length * 2) + + for (const { name, key } of illegalKeys) { + db.get(key).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + } + }) +} + +exports.get = function (test, testCommon) { + test('simple get()', async function (t) { + await db.put('foo', 'bar') + t.is(await assertPromise(db.get('foo')), 'bar') + t.is(await db.get('foo', {}), 'bar') // same but with {} + t.is(await db.get('foo', { valueEncoding: 'utf8' }), 'bar') + }) + + test('get() on non-existent key', async function (t) { + for (const key of ['non-existent', Math.random()]) { + t.is(await assertPromise(db.get(key)), undefined, 'not found') + } + }) + + test('simultaneous get()', async function (t) { + t.plan(20) + + await db.put('hello', 'world') + const promises = [] + + for (let i = 0; i < 10; ++i) { + promises.push(db.get('hello').then((value) => { + t.is(value, 'world') + })) + } + + for (let i = 0; i < 10; ++i) { + promises.push(db.get('non-existent').then((value) => { + t.is(value, undefined, 'not found') + })) + } + + return Promise.all(promises) + }) + + traits.open('get()', testCommon, async function (t, db) { + t.is(await assertPromise(db.get('foo')), undefined, 'void promise') + }) + + traits.closed('get()', testCommon, async function (t, db) { + return db.get('foo') + }) +} + +exports.tearDown = function (test, testCommon) { + test('get() teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.get(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/has-many-test.js b/test/has-many-test.js new file mode 100644 index 0000000..488c18f --- /dev/null +++ b/test/has-many-test.js @@ -0,0 +1,144 @@ +'use strict' + +const { illegalKeys } = require('./util') +const traits = require('./traits') + +let db + +/** + * @param {import('tape')} test + */ +exports.setUp = function (test, testCommon) { + test('hasMany() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +/** + * @param {import('tape')} test + */ +exports.args = function (test, testCommon) { + test('hasMany() requires an array argument', function (t) { + t.plan(6) + + db.hasMany().catch(function (err) { + t.is(err && err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + + db.hasMany('foo').catch(function (err) { + t.is(err && err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + + db.hasMany('foo', {}).catch(function (err) { + t.is(err && err.name, 'TypeError') + t.is(err && err.message, "The first argument 'keys' must be an array") + }) + }) + + test('hasMany() with illegal keys', function (t) { + t.plan(illegalKeys.length * 4) + + for (const { name, key } of illegalKeys) { + db.hasMany([key]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + + db.hasMany(['valid', key]).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error (second key)') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (second key)') + }) + } + }) +} + +/** + * @param {import('tape')} test + */ +exports.hasMany = function (test, testCommon) { + test('simple hasMany()', async function (t) { + await db.put('foo', 'bar') + + t.same(await db.hasMany(['foo']), [true]) + t.same(await db.hasMany(['foo'], {}), [true]) // same but with {} + t.same(await db.hasMany(['beep']), [false]) + + await db.put('beep', 'boop') + + t.same(await db.hasMany(['beep']), [true]) + t.same(await db.hasMany(['foo', 'beep']), [true, true]) + t.same(await db.hasMany(['aaa', 'beep']), [false, true]) + t.same(await db.hasMany(['beep', 'aaa']), [true, false], 'maintains order of input keys') + }) + + test('empty hasMany()', async function (t) { + t.same(await db.hasMany([]), []) + + const encodings = Object.keys(db.supports.encodings) + .filter(k => db.supports.encodings[k]) + + for (const valueEncoding of encodings) { + t.same(await db.hasMany([], { valueEncoding }), []) + } + }) + + test('simultaneous hasMany()', async function (t) { + t.plan(20) + + await db.put('hello', 'world') + const promises = [] + + for (let i = 0; i < 10; ++i) { + promises.push(db.hasMany(['hello']).then(function (values) { + t.same(values, [true]) + })) + } + + for (let i = 0; i < 10; ++i) { + promises.push(db.hasMany(['non-existent']).then(function (values) { + t.same(values, [false]) + })) + } + + return Promise.all(promises) + }) + + traits.open('hasMany()', testCommon, async function (t, db) { + t.same(await db.hasMany(['foo']), [false]) + }) + + traits.closed('hasMany()', testCommon, async function (t, db) { + return db.hasMany(['foo']) + }) + + // Also test empty array because it has a fast-path + traits.open('hasMany() with empty array', testCommon, async function (t, db) { + t.same(await db.hasMany([]), []) + }) + + traits.closed('hasMany() with empty array', testCommon, async function (t, db) { + return db.hasMany([]) + }) +} + +/** + * @param {import('tape')} test + */ +exports.tearDown = function (test, testCommon) { + test('hasMany() teardown', async function (t) { + return db.close() + }) +} + +/** + * @param {import('tape')} test + */ +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.hasMany(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/has-test.js b/test/has-test.js new file mode 100644 index 0000000..c17b7c7 --- /dev/null +++ b/test/has-test.js @@ -0,0 +1,81 @@ +'use strict' + +const { illegalKeys } = require('./util') +const traits = require('./traits') + +let db + +exports.setUp = function (test, testCommon) { + test('has() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('has() with illegal keys', function (t) { + t.plan(illegalKeys.length * 2) + + for (const { name, key } of illegalKeys) { + db.has(key).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + } + }) +} + +exports.has = function (test, testCommon) { + test('simple has()', async function (t) { + await db.put('foo', 'bar') + + t.is(await db.has('foo'), true) + t.is(await db.has('foo', {}), true) // same but with {} + + for (const key of ['non-existent', Math.random()]) { + t.is(await db.has(key), false, 'not found') + } + }) + + test('simultaneous has()', async function (t) { + t.plan(20) + + await db.put('hello', 'world') + const promises = [] + + for (let i = 0; i < 10; ++i) { + promises.push(db.has('hello').then((value) => { + t.is(value, true, 'found') + })) + } + + for (let i = 0; i < 10; ++i) { + promises.push(db.has('non-existent').then((value) => { + t.is(value, false, 'not found') + })) + } + + return Promise.all(promises) + }) + + traits.open('has()', testCommon, async function (t, db) { + t.is(await db.has('foo'), false) + }) + + traits.closed('has()', testCommon, async function (t, db) { + return db.has('foo') + }) +} + +exports.tearDown = function (test, testCommon) { + test('has() teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.has(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/hooks/newsub.js b/test/hooks/newsub.js new file mode 100644 index 0000000..33ef7db --- /dev/null +++ b/test/hooks/newsub.js @@ -0,0 +1,57 @@ +'use strict' + +const shared = require('./shared') + +module.exports = function (test, testCommon) { + shared(test, testCommon, 'newsub') + + test('newsub hook function receives sublevel and default options', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + let instance + db.hooks.newsub.add(function (sublevel, options) { + instance = sublevel + + // Recursing is the main purpose of this hook + t.ok(sublevel.hooks, 'can access sublevel hooks') + t.same(options, { separator: '!' }) + }) + + t.ok(db.sublevel('sub') === instance) + return db.close() + }) + + test('newsub hook function receives userland options', async function (t) { + t.plan(1) + + const db = testCommon.factory() + + db.hooks.newsub.add(function (sublevel, options) { + t.same(options, { separator: '!', userland: 123 }) + }) + + db.sublevel('sub', { userland: 123 }) + return db.close() + }) + + test('db wraps error from newsub hook function', async function (t) { + t.plan(2) + + const db = testCommon.factory() + + db.hooks.newsub.add(function (sublevel, options) { + throw new Error('test') + }) + + try { + db.sublevel('sub') + } catch (err) { + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.message, 'test') + } + + return db.close() + }) +} diff --git a/test/hooks/postopen.js b/test/hooks/postopen.js new file mode 100644 index 0000000..685be16 --- /dev/null +++ b/test/hooks/postopen.js @@ -0,0 +1,229 @@ +'use strict' + +const shared = require('./shared') + +module.exports = function (test, testCommon) { + shared(test, testCommon, 'postopen') + + test('postopen hook function is called before deferred operations and open event', async function (t) { + t.plan(5) + + const db = testCommon.factory() + const order = [] + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + order.push('postopen') + }) + + db.on('opening', function () { + t.is(db.status, 'opening') + order.push('opening') + }) + + db.defer(function () { + t.is(db.status, 'open') + order.push('undefer') + }) + + db.on('open', function () { + t.is(db.status, 'open') + order.push('open') + }) + + await db.open() + t.same(order, ['opening', 'postopen', 'undefer', 'open']) + + return db.close() + }) + + test('postopen hook functions are called sequentially', async function (t) { + t.plan(1) + + const db = testCommon.factory() + + let waited = false + db.hooks.postopen.add(async function (options) { + return new Promise(function (resolve) { + setTimeout(function () { + waited = true + resolve() + }, 100) + }) + }) + + db.hooks.postopen.add(async function (options) { + t.ok(waited) + }) + + await db.open() + return db.close() + }) + + test('postopen hook function receives options from constructor', async function (t) { + t.plan(1) + + const db = testCommon.factory({ userland: 123 }) + + db.hooks.postopen.add(async function (options) { + t.same(options, { + createIfMissing: true, + errorIfExists: false, + userland: 123 + }) + }) + + await db.open() + return db.close() + }) + + test('postopen hook function receives options from open()', async function (t) { + t.plan(1) + + const db = testCommon.factory() + + db.hooks.postopen.add(async function (options) { + t.same(options, { + createIfMissing: true, + errorIfExists: false, + userland: 456 + }) + }) + + await db.open({ userland: 456 }) + return db.close() + }) + + test('error from postopen hook function closes the db', async function (t) { + t.plan(4) + + const db = testCommon.factory() + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + throw new Error('test') + }) + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.message, 'test') + } + }) + + test('error from postopen hook function must be an error', async function (t) { + t.plan(5) + + const db = testCommon.factory() + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + // eslint-disable-next-line prefer-promise-reject-errors + return Promise.reject(null) + }) + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.name, 'TypeError') + t.is(err.cause.message, 'Promise rejection reason must be an Error, received null') + } + }) + + test('error from postopen hook function must be an error, but it can be cross-realm', async function (t) { + t.plan(5) + + class FakeError { + get [Symbol.toStringTag] () { + return 'Error' + } + } + + const fake = new FakeError() + const db = testCommon.factory() + + t.is(Object.prototype.toString.call(fake), '[object Error]') + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + return Promise.reject(fake) + }) + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause, fake) + } + }) + + test('errors from both postopen hook function and resource lock the db', async function (t) { + t.plan(9) + + const db = testCommon.factory() + const resource = db.iterator() + + resource.close = async function () { + throw new Error('error from resource') + } + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + throw new Error('error from hook') + }) + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.name, 'CombinedError') + t.is(err.cause.message, 'error from hook; error from resource') + } + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_STATUS_LOCKED') + } + + try { + await db.close() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_STATUS_LOCKED') + } + }) + + for (const method of ['open', 'close']) { + test(`postopen hook function that attempts to call ${method}() results in error`, async function (t) { + t.plan(5) + + const db = testCommon.factory() + + db.hooks.postopen.add(async function (options) { + t.is(db.status, 'open') + return db[method]() + }) + + db.on('open', function () { + t.fail('should not open') + }) + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.code, 'LEVEL_STATUS_LOCKED') + t.is(err.cause.message, 'Database status is locked') + } + }) + } +} diff --git a/test/hooks/prewrite.js b/test/hooks/prewrite.js new file mode 100644 index 0000000..d4bfc17 --- /dev/null +++ b/test/hooks/prewrite.js @@ -0,0 +1,816 @@ +'use strict' + +const shared = require('./shared') + +module.exports = function (test, testCommon) { + shared(test, testCommon, 'prewrite') + + for (const deferred of [false, true]) { + for (const type of ['put', 'del']) { + for (const method of ['batch', 'chained batch', 'singular']) { + test(`prewrite hook function is called after open (deferred: ${deferred})`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + if (!deferred) await db.open() + + db.hooks.prewrite.add(function (op, batch) { + t.is(db.status, 'open') + }) + + if (type === 'put') { + switch (method) { + case 'batch': + await db.batch([{ type: 'put', key: 'beep', value: 'boop' }]) + break + case 'chained batch': + // Does not support deferred open + await db.open() + await db.batch().put('beep', 'boop').write() + break + case 'singular': + await db.put('beep', 'boop') + break + } + } else if (type === 'del') { + switch (method) { + case 'batch': + await db.batch([{ type: 'del', key: 'beep' }]) + break + case 'chained batch': + // Does not support deferred open + await db.open() + await db.batch().del('beep').write() + break + case 'singular': + await db.del('beep') + break + } + } + + return db.close() + }) + } + } + } + + test('prewrite hook function receives put op', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + t.same(op, { + type: 'put', + key: 'beep', + value: 'boop', + keyEncoding: db.keyEncoding('utf8'), + valueEncoding: db.valueEncoding('utf8') + }) + }) + + await db.put('beep', 'boop') + await db.batch([{ type: 'put', key: 'beep', value: 'boop' }]) + await db.batch().put('beep', 'boop').write() + + return db.close() + }) + + test('prewrite hook function receives del op', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + t.same(op, { + type: 'del', + key: 'beep', + keyEncoding: db.keyEncoding('utf8') + }) + }) + + await db.del('beep') + await db.batch([{ type: 'del', key: 'beep' }]) + await db.batch().del('beep').write() + + return db.close() + }) + + test('prewrite hook function receives put op with custom encodings and userland option', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + t.same(op, { + type: 'put', + key: 123, // Should not be JSON-encoded + value: 'boop', + keyEncoding: db.keyEncoding('json'), + valueEncoding: db.valueEncoding('json'), + userland: 456 + }) + }) + + await db.put(123, 'boop', { keyEncoding: 'json', valueEncoding: 'json', userland: 456 }) + await db.batch([{ type: 'put', key: 123, value: 'boop', keyEncoding: 'json', valueEncoding: 'json', userland: 456 }]) + await db.batch().put(123, 'boop', { keyEncoding: 'json', valueEncoding: 'json', userland: 456 }).write() + + return db.close() + }) + + test('prewrite hook function receives del op with custom encodings and userland option', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + t.same(op, { + type: 'del', + key: 123, // Should not be JSON-encoded + keyEncoding: db.keyEncoding('json'), + userland: 456 + }) + }) + + await db.del(123, { keyEncoding: 'json', userland: 456 }) + await db.batch([{ type: 'del', key: 123, keyEncoding: 'json', userland: 456 }]) + await db.batch().del(123, { keyEncoding: 'json', userland: 456 }).write() + + return db.close() + }) + + test('prewrite hook function can modify put operation', async function (t) { + t.plan(10 * 3) + + const db = testCommon.factory({ keyEncoding: 'json', valueEncoding: 'utf8' }) + + db.hooks.prewrite.add(function (op, batch) { + t.is(op.keyEncoding, db.keyEncoding('json')) + t.is(op.valueEncoding, db.valueEncoding('utf8')) + + op.key = '456' + op.value = { x: 1 } + + // Flip the encodings + op.keyEncoding = 'utf8' + op.valueEncoding = 'json' + + // Test adding a userland option + op.userland = 456 + }) + + db.on('write', function (ops) { + t.is(ops.length, 1) + t.is(ops[0].key, '456') + t.same(ops[0].value, { x: 1 }) + t.is(ops[0].keyEncoding, db.keyEncoding('utf8')) + t.is(ops[0].valueEncoding, db.valueEncoding('json')) + t.same(ops[0].encodedKey, db.keyEncoding('utf8').encode('456')) + t.same(ops[0].encodedValue, db.valueEncoding('json').encode({ x: 1 })) + t.is(ops[0].userland, 456) + }) + + await db.put(123, 'boop') + await db.batch([{ type: 'put', key: 123, value: 'boop' }]) + await db.batch().put(123, 'boop').write() + + return db.close() + }) + + test('prewrite hook function can modify del operation', async function (t) { + t.plan(6 * 3) + + const db = testCommon.factory({ keyEncoding: 'json' }) + + db.hooks.prewrite.add(function (op, batch) { + t.is(op.keyEncoding, db.keyEncoding('json')) + + op.key = '456' + op.keyEncoding = 'utf8' + + // Test adding a userland option + op.userland = 456 + }) + + db.on('write', function (ops) { + t.is(ops.length, 1) + t.is(ops[0].key, '456') + t.is(ops[0].keyEncoding, db.keyEncoding('utf8')) + t.same(ops[0].encodedKey, db.keyEncoding('utf8').encode('456')) + t.is(ops[0].userland, 456) + }) + + await db.del(123) + await db.batch([{ type: 'del', key: 123 }]) + await db.batch().del(123).write() + + return db.close() + }) + + test('second prewrite hook function sees modified operation of first', async function (t) { + t.plan(6 * 2) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + t.is(op.key, '1') + op.key = '2' + }) + + db.hooks.prewrite.add(function (op, batch) { + t.is(op.key, '2') + }) + + await db.put('1', 'boop') + await db.batch([{ type: 'put', key: '1', value: 'boop' }]) + await db.batch().put('1', 'boop').write() + + await db.del('1') + await db.batch([{ type: 'del', key: '1' }]) + await db.batch().del('1').write() + + return db.close() + }) + + test('prewrite hook function triggered by put can add put operation', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + // Note: may return a transcoder encoding + const utf8 = db.keyEncoding('utf8') + const json = db.valueEncoding('json') + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ + type: 'put', + key: 'from-hook', + value: { abc: 123 }, + valueEncoding: 'json' + }) + }) + + db.on('write', function (ops) { + t.same(ops, [ + { + type: 'put', + key: 'beep', + value: 'boop', + keyEncoding: db.keyEncoding('utf8'), + valueEncoding: db.valueEncoding('utf8'), + encodedKey: utf8.encode('beep'), + encodedValue: utf8.encode('boop') + }, + { + type: 'put', + key: 'from-hook', + value: { abc: 123 }, + keyEncoding: db.keyEncoding('utf8'), + valueEncoding: db.valueEncoding('json'), + encodedKey: utf8.encode('from-hook'), + encodedValue: json.encode({ abc: 123 }) + } + ]) + }) + + await db.put('beep', 'boop') + await db.batch([{ type: 'put', key: 'beep', value: 'boop' }]) + await db.batch().put('beep', 'boop').write() + + return db.close() + }) + + test('prewrite hook function triggered by del can add del operation', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + // Note: may return a transcoder encoding + const utf8 = db.keyEncoding('utf8') + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ type: 'del', key: 'from-hook' }) + }) + + db.on('write', function (ops) { + t.same(ops, [ + { + type: 'del', + key: 'beep', + keyEncoding: db.keyEncoding('utf8'), + encodedKey: utf8.encode('beep') + }, + { + type: 'del', + key: 'from-hook', + keyEncoding: db.keyEncoding('utf8'), + encodedKey: utf8.encode('from-hook') + } + ]) + }) + + await db.del('beep') + await db.batch([{ type: 'del', key: 'beep' }]) + await db.batch().del('beep').write() + + return db.close() + }) + + test('prewrite hook function can add operations with sublevel option', async function (t) { + t.plan(2 * 6) + + const db = testCommon.factory() + const sublevel = db.sublevel('sub', { keyEncoding: 'json', valueEncoding: 'json' }) + + // Note: may return a transcoder encoding + const utf8 = db.keyEncoding('utf8') + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ type: 'put', key: 'from-hook-1', value: { x: 22 }, sublevel }) + batch.add({ type: 'del', key: 'from-hook-2', sublevel }) + }) + + db.on('write', function (ops) { + t.is(ops[0].key, 'from-input') + t.same(ops.slice(1), [ + { + type: 'put', + key: utf8.encode('!sub!"from-hook-1"'), + value: utf8.encode('{"x":22}'), + keyEncoding: db.keyEncoding(sublevel.keyEncoding().format), + valueEncoding: db.valueEncoding(sublevel.valueEncoding().format), + encodedKey: utf8.encode('!sub!"from-hook-1"'), + encodedValue: utf8.encode('{"x":22}'), + sublevel: null // Should be unset + }, + { + type: 'del', + key: utf8.encode('!sub!"from-hook-2"'), + keyEncoding: db.keyEncoding(sublevel.keyEncoding().format), + encodedKey: utf8.encode('!sub!"from-hook-2"'), + sublevel: null // Should be unset + } + ]) + }) + + await db.put('from-input', 'abc') + await db.batch([{ type: 'put', key: 'from-input', value: 'abc' }]) + await db.batch().put('from-input', 'abc').write() + + await db.del('from-input') + await db.batch([{ type: 'del', key: 'from-input' }]) + await db.batch().del('from-input').write() + + return db.close() + }) + + test('prewrite hook function can add operations with descendant sublevel option', async function (t) { + t.plan(20) + + const db = testCommon.factory() + await db.open() + + const a = db.sublevel('a') + const b = a.sublevel('b') + const c = b.sublevel('c') + + // Note: may return a transcoder encoding + const utf8 = db.keyEncoding('utf8') + + const put = async (db, key, opts) => { + const fn = function (op, batch) { + batch.add({ type: 'put', key, value: 'x', ...opts }) + } + + db.hooks.prewrite.add(fn) + + try { + await db.put('0', '0') + } finally { + db.hooks.prewrite.delete(fn) + } + } + + const del = async (db, key, opts) => { + const fn = function (op, batch) { + batch.add({ type: 'del', key, ...opts }) + } + + db.hooks.prewrite.add(fn) + + try { + await db.del('0') + } finally { + db.hooks.prewrite.delete(fn) + } + } + + // Note: not entirely a noop. Use of sublevel option triggers data to be encoded early + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('1'), 'got put 1')) + await put(db, '1', { sublevel: db }) + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!2'), 'got put 2')) + await put(db, '2', { sublevel: a }) + await put(a, '2', { sublevel: a }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!3'), 'got put 3')) + await put(db, '3', { sublevel: b }) + await put(a, '3', { sublevel: b }) // Same + await put(b, '3', { sublevel: b }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!!c!4'), 'got put 4')) + await put(db, '4', { sublevel: c }) + await put(a, '4', { sublevel: c }) // Same + await put(b, '4', { sublevel: c }) // Same + await put(c, '4', { sublevel: c }) // Same + + // Test deletes + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('1'), 'got del 1')) + await del(db, '1', { sublevel: db }) + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!2'), 'got del 2')) + await del(db, '2', { sublevel: a }) + await del(a, '2', { sublevel: a }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!3'), 'got del 3')) + await del(db, '3', { sublevel: b }) + await del(a, '3', { sublevel: b }) // Same + await del(b, '3', { sublevel: b }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!!c!4'), 'got del 4')) + await del(db, '4', { sublevel: c }) + await del(a, '4', { sublevel: c }) // Same + await del(b, '4', { sublevel: c }) // Same + await del(c, '4', { sublevel: c }) // Same + + return db.close() + }) + + test('prewrite hook is triggered bottom-up for nested sublevels', async function (t) { + const db = testCommon.factory() + const a = db.sublevel('a') + const b = a.sublevel('b') + const order = [] + const triggers = [ + [['b', 'a', 'root'], () => b.put('a', 'a')], + [['b', 'a', 'root'], () => b.batch([{ type: 'put', key: 'a', value: 'a' }])], + [['b', 'a', 'root'], () => b.batch().put('a', 'a').write()], + [['b', 'a', 'root'], () => b.del('a')], + [['b', 'a', 'root'], () => b.batch([{ type: 'del', key: 'a' }])], + [['b', 'a', 'root'], () => b.batch().del('a').write()], + + [['a', 'root'], () => a.put('a', 'a')], + [['a', 'root'], () => a.batch([{ type: 'put', key: 'a', value: 'a' }])], + [['a', 'root'], () => a.batch().put('a', 'a').write()], + [['a', 'root'], () => a.del('a')], + [['a', 'root'], () => a.batch([{ type: 'del', key: 'a' }])], + [['a', 'root'], () => a.batch().del('a').write()], + + [['root'], () => db.put('a', 'a')], + [['root'], () => db.batch([{ type: 'put', key: 'a', value: 'a' }])], + [['root'], () => db.batch().put('a', 'a').write()], + [['root'], () => db.del('a')], + [['root'], () => db.batch([{ type: 'del', key: 'a' }])], + [['root'], () => db.batch().del('a').write()], + + // The sublevel option should not trigger the prewrite hook + [['root'], () => db.put('a', 'a', { sublevel: a })], + [['root'], () => db.batch([{ type: 'put', key: 'a', value: 'a', sublevel: a }])], + [['root'], () => db.batch().put('a', 'a', { sublevel: a }).write()], + [['root'], () => db.del('a', { sublevel: a })], + [['root'], () => db.batch([{ type: 'del', key: 'a', sublevel: a }])], + [['root'], () => db.batch().del('a', { sublevel: a }).write()] + ] + + t.plan(triggers.length) + + db.hooks.prewrite.add((op, batch) => { order.push('root') }) + a.hooks.prewrite.add((op, batch) => { order.push('a') }) + b.hooks.prewrite.add((op, batch) => { order.push('b') }) + + for (const [expectedOrder, trigger] of triggers) { + await trigger() + t.same(order.splice(0, order.length), expectedOrder) + } + + return db.close() + }) + + test('db catches invalid operations added by prewrite hook function', async function (t) { + const db = testCommon.factory() + const errEncoding = { + name: 'test', + format: 'utf8', + encode () { + throw new Error() + }, + decode () { + throw new Error() + } + } + + const hookFunctions = [ + (op, batch) => batch.add(), + (op, batch) => batch.add({}), + (op, batch) => batch.add({ type: 'del' }), + (op, batch) => batch.add({ type: 'del', key: null }), + (op, batch) => batch.add({ type: 'del', key: undefined }), + (op, batch) => batch.add({ type: 'put', key: 'a' }), + (op, batch) => batch.add({ type: 'put', key: 'a', value: null }), + (op, batch) => batch.add({ type: 'put', key: 'a', value: undefined }), + (op, batch) => batch.add({ type: 'nope', key: 'a', value: 'b' }), + (op, batch) => batch.add({ type: 'del', key: 'a', keyEncoding: errEncoding }), + (op, batch) => batch.add({ type: 'put', key: 'a', value: 'b', keyEncoding: errEncoding }), + (op, batch) => batch.add({ type: 'put', key: 'a', value: 'b', valueEncoding: errEncoding }) + ] + + const triggers = [ + () => db.put('beep', 'boop'), + () => db.batch([{ type: 'put', key: 'beep', value: 'boop' }]), + () => db.batch().put('beep', 'boop').write(), + () => db.del('beep'), + () => db.batch([{ type: 'del', key: 'beep' }]), + () => db.batch().del('beep').write() + ] + + t.plan(hookFunctions.length * triggers.length * 2) + + db.on('write', function (ops) { + t.fail('should not write') + }) + + for (const trigger of triggers) { + for (const fn of hookFunctions) { + db.hooks.prewrite.add(fn) + + try { + await trigger() + } catch (err) { + t.is(err.code, 'LEVEL_HOOK_ERROR') + } + + db.hooks.prewrite.delete(fn) + t.is(db.hooks.prewrite.noop, true) + } + } + + return db.close() + }) + + test('prewrite hook function is called once for every input operation', async function (t) { + t.plan(2) + + const calls = [] + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + calls.push(op.key) + }) + + await db.batch([{ type: 'del', key: '1' }, { type: 'put', key: '2', value: '123' }]) + t.same(calls.splice(0, calls.length), ['1', '2']) + + await db.batch().del('1').put('2', '123').write() + t.same(calls.splice(0, calls.length), ['1', '2']) + + return db.close() + }) + + test('prewrite hook adds operations after input operations', async function (t) { + t.plan(2) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + if (op.key === 'input1') { + batch + .add({ type: 'del', key: 'hook1' }) + .add({ type: 'del', key: 'hook2' }) + .add({ type: 'put', key: 'hook3', value: 'foo' }) + } + }) + + db.on('write', function (ops) { + t.same(ops.map(op => op.key), [ + 'input1', 'input2', 'hook1', 'hook2', 'hook3' + ], 'order is correct') + }) + + await db.batch([{ type: 'del', key: 'input1' }, { type: 'put', key: 'input2', value: '123' }]) + await db.batch().del('input1').put('input2', '123').write() + + return db.close() + }) + + test('prewrite hook does not copy input options to added operations', async function (t) { + t.plan(6) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ type: 'put', key: 'from-hook-a', value: 'xyz' }) + batch.add({ type: 'del', key: 'from-hook-b' }) + }) + + db.on('write', function (ops) { + const relevant = ops.map(op => { + return { + key: op.key, + hasOption: 'userland' in op, + keyEncoding: op.keyEncoding.commonName + } + }) + + t.same(relevant, [ + { + key: 'input-a', + keyEncoding: 'json', + hasOption: true + }, + { + key: 'from-hook-a', + keyEncoding: 'utf8', // Should be the database default (2x) + hasOption: false + }, + { + key: 'from-hook-b', + keyEncoding: 'utf8', + hasOption: false + } + ]) + }) + + await db.put('input-a', 'boop', { keyEncoding: 'json', userland: 123 }) + await db.batch([{ type: 'put', key: 'input-a', value: 'boop', keyEncoding: 'json', userland: 123 }]) + await db.batch().put('input-a', 'boop', { keyEncoding: 'json', userland: 123 }).write() + + await db.del('input-a', { keyEncoding: 'json', userland: 123 }) + await db.batch([{ type: 'del', key: 'input-a', keyEncoding: 'json', userland: 123 }]) + await db.batch().del('input-a', { keyEncoding: 'json', userland: 123 }).write() + + return db.close() + }) + + test('error thrown from prewrite hook function is catched', async function (t) { + t.plan(6 * 2) + + const db = testCommon.factory() + + db.hooks.prewrite.add(function (op, batch) { + throw new Error('test') + }) + + const verify = (err) => { + t.is(err.code, 'LEVEL_HOOK_ERROR') + t.is(err.cause.message, 'test') + } + + await db.batch([{ type: 'del', key: '1' }]).catch(verify) + await db.batch([{ type: 'put', key: '1', value: '2' }]).catch(verify) + + const batch1 = db.batch() + const batch2 = db.batch() + + try { batch1.del('1') } catch (err) { verify(err) } + try { batch2.put('1', '2') } catch (err) { verify(err) } + + await batch1.close() + await batch2.close() + + await db.del('1').catch(verify) + await db.put('1', '2').catch(verify) + + return db.close() + }) + + test('operations added by prewrite hook function count towards chained batch length', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ type: 'del', key: 'hook1' }) + }) + + const batch = db.batch() + + batch.del('input1') + t.is(batch.length, 2) + + batch.put('input2', 'foo') + t.is(batch.length, 4) + + await batch.close() + return db.close() + }) + + test('operations added by prewrite hook function can be cleared from chained batch', async function (t) { + t.plan(3) + + const db = testCommon.factory() + await db.open() + + db.hooks.prewrite.add(function (op, batch) { + batch.add({ type: 'put', key: 'x', value: 'y' }) + }) + + const batch = db.batch() + + batch.del('a') + t.is(batch.length, 2) + + batch.clear() + t.is(batch.length, 0) + + db.on('write', t.fail.bind(t)) + await batch.write() + + t.same(await db.keys().all(), [], 'did not write to db') + return db.close() + }) + + test('prewrite hook function is not called for earlier chained batch', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + const calls = [] + const batchBefore = db.batch() + + db.hooks.prewrite.add(function (op, batch) { + calls.push(op.key) + }) + + batchBefore.del('before') + t.same(calls, []) + + const batchAfter = db.batch() + batchAfter.del('after') + t.same(calls, ['after']) + + await Promise.all([batchBefore.close(), batchAfter.close()]) + return db.close() + }) + + // See https://github.com/Level/abstract-level/issues/80 + test('prewrite hook function can write to nondescendant sublevel', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + + const textDecoder = new TextDecoder() + const books = db.sublevel('books', { valueEncoding: 'json' }) + const index = db.sublevel('authors', { + // Use JSON, which normally doesn't make sense for keys but + // helps to assert that there's no double encoding happening. + keyEncoding: 'json' + }) + + db.on('write', (ops) => { + // Check that data is written to correct sublevels, specifically + // !authors!Hesse~12 rather than !books!!authors!Hesse~12. + t.same(ops.map(x => decode(x.key)), ['!books!12', '!authors!"Hesse~12"']) + + // It's unfortunate DX but because the write is made via the sublevel, the + // format of keys depends on the supported encodings of db. For example on + // a MemoryLevel({ storeEncoding: 'buffer' }) the key will be a buffer. + function decode (key) { + return db.keyEncoding('utf8').format === 'utf8' ? key : textDecoder.decode(key) + } + }) + + books.on('write', (ops) => { + // Should not include the op of the index + t.same(ops.map(x => x.key), ['12']) + }) + + index.on('write', (ops) => { + t.fail('Did not expect an event on index') + }) + + books.hooks.prewrite.add(function (op, batch) { + if (op.type === 'put') { + batch.add({ + type: 'put', + // Key structure is synthetic and not relevant to the test + key: op.value.author + '~' + op.key, + value: '', + sublevel: index + }) + } + }) + + await books.put('12', { title: 'Siddhartha', author: 'Hesse' }) + }) +} diff --git a/test/hooks/shared.js b/test/hooks/shared.js new file mode 100644 index 0000000..e0a7300 --- /dev/null +++ b/test/hooks/shared.js @@ -0,0 +1,38 @@ +'use strict' + +module.exports = function (test, testCommon, hook) { + test(`can add and remove functions to/from ${hook} hook`, async function (t) { + const db = testCommon.factory() + const fn1 = function () {} + const fn2 = function () {} + + t.is(db.hooks[hook].noop, true, 'is initially a noop') + t.is(typeof db.hooks[hook].run, 'function') + + db.hooks[hook].add(fn1) + t.is(db.hooks[hook].noop, false, 'not a noop') + t.is(typeof db.hooks[hook].run, 'function') + + db.hooks[hook].add(fn2) + t.is(db.hooks[hook].noop, false, 'not a noop') + t.is(typeof db.hooks[hook].run, 'function') + + db.hooks[hook].delete(fn1) + t.is(db.hooks[hook].noop, false, 'not a noop') + t.is(typeof db.hooks[hook].run, 'function') + + db.hooks[hook].delete(fn2) + t.is(db.hooks[hook].noop, true, 'is a noop again') + t.is(typeof db.hooks[hook].run, 'function') + + for (const invalid of [null, undefined, 123]) { + t.throws(() => db.hooks[hook].add(invalid), (err) => err.name === 'TypeError') + t.throws(() => db.hooks[hook].delete(invalid), (err) => err.name === 'TypeError') + } + + t.is(db.hooks[hook].noop, true, 'still a noop') + t.is(typeof db.hooks[hook].run, 'function') + + return db.close() + }) +} diff --git a/test/index.js b/test/index.js new file mode 100644 index 0000000..8dd02bf --- /dev/null +++ b/test/index.js @@ -0,0 +1,82 @@ +'use strict' + +const common = require('./common') +const kSublevels = Symbol('sublevels') + +function suite (options) { + const testCommon = common(options) + const test = testCommon.test + + require('./factory-test')(test, testCommon) + require('./manifest-test')(test, testCommon) + require('./open-test').all(test, testCommon) + + if (testCommon.supports.createIfMissing) { + require('./open-create-if-missing-test').all(test, testCommon) + } + + if (testCommon.supports.errorIfExists) { + require('./open-error-if-exists-test').all(test, testCommon) + } + + require('./put-test').all(test, testCommon) + require('./get-test').all(test, testCommon) + require('./del-test').all(test, testCommon) + require('./put-get-del-test').all(test, testCommon) + require('./get-many-test').all(test, testCommon) + + if (testCommon.supports.has) { + require('./has-test').all(test, testCommon) + require('./has-many-test').all(test, testCommon) + } + + require('./batch-test').all(test, testCommon) + require('./chained-batch-test').all(test, testCommon) + + require('./iterator-test').all(test, testCommon) + require('./iterator-range-test').all(test, testCommon) + require('./async-iterator-test').all(test, testCommon) + require('./iterator-seek-test').all(test, testCommon) + + require('./deferred-open-test').all(test, testCommon) + require('./encoding-test').all(test, testCommon) + require('./encoding-json-test').all(test, testCommon) + require('./encoding-custom-test').all(test, testCommon) + require('./encoding-buffer-test').all(test, testCommon) + require('./encoding-decode-error-test').all(test, testCommon) + + if (testCommon.supports.implicitSnapshots) { + require('./iterator-snapshot-test').all(test, testCommon) + } else { + require('./iterator-no-snapshot-test').all(test, testCommon) + } + + if (testCommon.supports.explicitSnapshots) { + require('./iterator-explicit-snapshot-test').all(test, testCommon) + } + + require('./clear-test').all(test, testCommon) + require('./clear-range-test').all(test, testCommon) + require('./sublevel-test').all(test, testCommon) + + require('./events/write')(test, testCommon) + require('./hooks/postopen')(test, testCommon) + require('./hooks/newsub')(test, testCommon) + require('./hooks/prewrite')(test, testCommon) + + // Run the same suite on a sublevel + if (!testCommon.internals[kSublevels]) { + const factory = testCommon.factory + + suite({ + ...testCommon, + internals: { [kSublevels]: true }, + factory (opts) { + return factory().sublevel('test', opts) + } + }) + } +} + +suite.common = common +module.exports = suite diff --git a/test/iterator-explicit-snapshot-test.js b/test/iterator-explicit-snapshot-test.js new file mode 100644 index 0000000..e8a51ea --- /dev/null +++ b/test/iterator-explicit-snapshot-test.js @@ -0,0 +1,370 @@ +'use strict' + +const traits = require('./traits') + +exports.traits = function (test, testCommon) { + // TODO: document (or fix...) that deferred open is not supported + traits.open('snapshot()', testCommon, { deferred: false }, async function (t, db) { + const snapshot = db.snapshot() + return snapshot.close() + }) + + traits.closed('snapshot()', testCommon, async function (t, db) { + db.snapshot() + }) +} + +exports.get = function (test, testCommon) { + const { testFresh, testClose } = testFactory(test, testCommon) + + testFresh('get() changed entry from snapshot', async function (t, db) { + t.plan(3) + + await db.put('abc', 'before') + const snapshot = db.snapshot() + await db.put('abc', 'after') + + t.is(await db.get('abc'), 'after') + t.is(await db.get('abc', { snapshot }), 'before') + t.is(await db.get('other', { snapshot }), undefined) + + return snapshot.close() + }) + + testFresh('get() deleted entry from snapshot', async function (t, db) { + t.plan(3) + + await db.put('abc', 'before') + const snapshot = db.snapshot() + await db.del('abc') + + t.is(await db.get('abc'), undefined) + t.is(await db.get('abc', { snapshot }), 'before') + t.is(await db.get('other', { snapshot }), undefined) + + return snapshot.close() + }) + + testFresh('get() non-existent entry from snapshot', async function (t, db) { + t.plan(2) + + const snapshot = db.snapshot() + await db.put('abc', 'after') + + t.is(await db.get('abc'), 'after') + t.is(await db.get('abc', { snapshot }), undefined) + + return snapshot.close() + }) + + testFresh('get() entries from multiple snapshots', async function (t, db) { + const snapshots = [] + const iterations = 100 + + t.plan(iterations) + + for (let i = 0; i < iterations; i++) { + await db.put('number', i.toString()) + snapshots.push(db.snapshot()) + } + + for (let i = 0; i < iterations; i++) { + const snapshot = snapshots[i] + const value = i.toString() + + t.is(await db.get('number', { snapshot }), value) + } + + return Promise.all(snapshots.map(x => x.close())) + }) + + testFresh('get() entries from snapshot after closing another', async function (t, db) { + await db.put('abc', 'before') + + const snapshot1 = db.snapshot() + const snapshot2 = db.snapshot() + + await db.put('abc', 'after') + await snapshot1.close() + + // Closing one snapshot should not affect the other + t.is(await db.get('abc', { snapshot: snapshot2 }), 'before') + + return snapshot2.close() + }) + + testClose('get()', async function (db, snapshot) { + return db.get('xyz', { snapshot }) + }) +} + +exports.getMany = function (test, testCommon) { + const { testFresh, testClose } = testFactory(test, testCommon) + + testFresh('getMany() entries from snapshot', async function (t, db) { + t.plan(3) + + await db.put('a', '1') + await db.put('b', '2') + await db.put('c', '3') + + const snapshot = db.snapshot() + + await db.put('a', 'abc') + await db.del('b') + await db.put('c', 'xyz') + + t.same(await db.getMany(['a', 'b', 'c']), ['abc', undefined, 'xyz']) + t.same(await db.getMany(['a', 'b', 'c'], { snapshot }), ['1', '2', '3']) + t.same(await db.getMany(['a', 'b', 'c']), ['abc', undefined, 'xyz'], 'no side effects') + + return snapshot.close() + }) + + testClose('getMany()', async function (db, snapshot) { + return db.getMany(['xyz'], { snapshot }) + }) +} + +exports.iterator = function (test, testCommon) { + const { testFresh, testClose } = testFactory(test, testCommon) + + testFresh('iterator(), keys(), values() with snapshot', async function (t, db) { + t.plan(10) + + await db.put('a', '1') + await db.put('b', '2') + await db.put('c', '3') + + const snapshot = db.snapshot() + + await db.put('a', 'after') + await db.del('b') + await db.put('c', 'after') + await db.put('d', 'after') + + t.same( + await db.iterator().all(), + [['a', 'after'], ['c', 'after'], ['d', 'after']], + 'data was written' + ) + + for (const fn of [all, nextv, next]) { + t.same(await fn(db.iterator({ snapshot })), [['a', '1'], ['b', '2'], ['c', '3']], 'iterator') + t.same(await fn(db.keys({ snapshot })), ['a', 'b', 'c'], 'keys') + t.same(await fn(db.values({ snapshot })), ['1', '2', '3'], 'values') + } + + async function all (iterator) { + return iterator.all() + } + + async function nextv (iterator) { + try { + return iterator.nextv(10) + } finally { + await iterator.close() + } + } + + async function next (iterator) { + try { + const entries = [] + let entry + + while ((entry = await iterator.next()) !== undefined) { + entries.push(entry) + } + + return entries + } finally { + await iterator.close() + } + } + + return snapshot.close() + }) + + // Test that every iterator type and read method checks snapshot state + for (const type of ['iterator', 'keys', 'values']) { + testClose(`${type}().all()`, async function (db, snapshot) { + return db[type]({ snapshot }).all() + }) + + testClose(`${type}().next()`, async function (db, snapshot) { + const iterator = db[type]({ snapshot }) + + try { + await iterator.next() + } finally { + iterator.close() + } + }) + + testClose(`${type}().nextv()`, async function (db, snapshot) { + const iterator = db[type]({ snapshot }) + + try { + await iterator.nextv(10) + } finally { + iterator.close() + } + }) + } +} + +exports.clear = function (test, testCommon) { + const { testFresh, testClose } = testFactory(test, testCommon) + + testFresh('clear() entries from snapshot', async function (t, db) { + t.plan(2) + + await db.put('a', 'xyz') + const snapshot = db.snapshot() + + await db.put('b', 'xyz') + await db.clear({ snapshot }) + + t.same(await db.keys().all(), ['b']) + t.same(await db.keys({ snapshot }).all(), ['a']) + + return snapshot.close() + }) + + testFresh('clear() entries from empty snapshot', async function (t, db) { + t.plan(2) + + const snapshot = db.snapshot() + + await db.put('a', 'xyz') + await db.clear({ snapshot }) + + t.same(await db.keys().all(), ['a']) + t.same(await db.keys({ snapshot }).all(), []) + + return snapshot.close() + }) + + testClose('clear()', async function (db, snapshot) { + return db.clear({ snapshot }) + }) +} + +exports.cleanup = function (test, testCommon) { + test('snapshot is closed on database close', async function (t) { + t.plan(1) + + const db = testCommon.factory() + await db.open() + const snapshot = db.snapshot() + const promise = db.close() + + try { + snapshot.ref() + } catch (err) { + t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN') + } + + return promise + }) + + test('snapshot is closed along with iterator', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + await db.put('beep', 'boop') + + // These resources have a potentially tricky relationship. If all is well, + // db.close() calls both snapshot.close() and iterator.close() in parallel, + // and snapshot.close() and iterator.close() wait on the read. Crucially, + // closing the snapshot only waits for individual operations on the iterator + // rather than for the entire iterator to be closed (which may never happen). + const snapshot = db.snapshot() + const iterator = db.iterator({ snapshot }) + const readPromise = iterator.all() + const closePromise = db.close() + + try { + snapshot.ref() + } catch (err) { + t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN', 'snapshot is closing') + } + + try { + await iterator.next() + } catch (err) { + // Effectively also asserts that the LEVEL_ITERATOR_NOT_OPEN error takes + // precedence over LEVEL_SNAPSHOT_NOT_OPEN. + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', 'iterator is closing') + } + + return Promise.all([readPromise, closePromise]) + }) +} + +exports.dispose = function (test, testCommon) { + // Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management) + Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) { + const db = testCommon.factory() + await db.open() + + const snapshot = db.snapshot() + await snapshot[Symbol.asyncDispose]() + + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.traits(test, testCommon) + exports.get(test, testCommon) + exports.getMany(test, testCommon) + exports.iterator(test, testCommon) + exports.clear(test, testCommon) + exports.cleanup(test, testCommon) + exports.dispose(test, testCommon) +} + +function testFactory (test, testCommon) { + const testFresh = function (name, run) { + test(name, async function (t) { + const db = testCommon.factory() + await db.open() + await run(t, db) + return db.close() + }) + } + + const testClose = function (name, run) { + testFresh(`${name} after closing snapshot`, async function (t, db) { + t.plan(1) + + const snapshot = db.snapshot() + await snapshot.close() + + try { + await run(db, snapshot) + } catch (err) { + t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN') + } + }) + + testFresh(`${name} while closing snapshot`, async function (t, db) { + t.plan(1) + + const snapshot = db.snapshot() + const promise = snapshot.close() + + try { + await run(db, snapshot) + } catch (err) { + t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN') + } + + return promise + }) + } + + return { testFresh, testClose } +} diff --git a/test/iterator-no-snapshot-test.js b/test/iterator-no-snapshot-test.js new file mode 100644 index 0000000..3b47e2c --- /dev/null +++ b/test/iterator-no-snapshot-test.js @@ -0,0 +1,49 @@ +'use strict' + +exports.noSnapshot = function (test, testCommon) { + const make = (run) => async function (t) { + const db = testCommon.factory() + const operations = [ + { type: 'put', key: 'a', value: 'a' }, + { type: 'put', key: 'b', value: 'b' }, + { type: 'put', key: 'c', value: 'c' } + ] + + await db.open() + await db.batch(operations) + + // For this test it is important that we don't read eagerly. + // NOTE: highWaterMarkBytes is not an abstract option, but + // it is supported by classic-level and others. Also set the + // old & equivalent leveldown highWaterMark option for compat. + const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 }) + + await run(db) + await verify(t, it, db) + + return db.close() + } + + async function verify (t, it, db) { + const entries = await it.all() + const kv = entries.map(([key, value]) => key + value) + + if (kv.length === 3) { + t.same(kv, ['aa', 'bb', 'cc'], 'maybe supports snapshots') + } else { + t.same(kv, ['aa', 'cc'], 'ignores keys that have been deleted in the mean time') + } + } + + test('delete key after creating iterator', make(async function (db) { + return db.del('b') + })) + + test('batch delete key after creating iterator', make(async function (db) { + return db.batch([{ type: 'del', key: 'b' }]) + })) +} + +exports.all = function (test, testCommon) { + exports.noSnapshot(test, testCommon) +} diff --git a/test/iterator-range-test.js b/test/iterator-range-test.js new file mode 100644 index 0000000..d099893 --- /dev/null +++ b/test/iterator-range-test.js @@ -0,0 +1,277 @@ +'use strict' + +let db + +const data = (function () { + const d = [] + let i = 0 + let k + for (; i < 100; i++) { + k = (i < 10 ? '0' : '') + i + d.push({ + key: k, + value: String(Math.random()) + }) + } + return d +}()) + +exports.setUp = function (test, testCommon) { + test('iterator() range setup', async function (t) { + db = testCommon.factory() + await db.open() + return db.batch(data.map(function ({ key, value }) { + return { type: 'put', key, value } + })) + }) +} + +exports.range = function (test, testCommon) { + function rangeTest (name, opts, expected) { + test('iterator() range with ' + name, async function (t) { + const entries = await db.iterator(opts).all() + + t.is(entries.length, expected.length, 'correct number of entries') + t.same(entries, expected.map(o => [o.key, o.value])) + }) + + // Test the documented promise that in reverse mode, + // "the returned entries are the same, but in reverse". + if (!opts.reverse && !('limit' in opts)) { + const reverseOpts = { ...opts, reverse: true } + + rangeTest( + name + ' (flipped)', + reverseOpts, + expected.slice().reverse() + ) + } + } + + rangeTest('no options', {}, data) + + rangeTest('reverse=true', { + reverse: true + }, data.slice().reverse()) + + rangeTest('gte=00', { + gte: '00' + }, data) + + rangeTest('gte=50', { + gte: '50' + }, data.slice(50)) + + rangeTest('lte=50 and reverse=true', { + lte: '50', + reverse: true + }, data.slice().reverse().slice(49)) + + rangeTest('gte=49.5 (midway)', { + gte: '49.5' + }, data.slice(50)) + + rangeTest('gte=49999 (midway)', { + gte: '49999' + }, data.slice(50)) + + rangeTest('lte=49.5 (midway) and reverse=true', { + lte: '49.5', + reverse: true + }, data.slice().reverse().slice(50)) + + rangeTest('lt=49.5 (midway) and reverse=true', { + lt: '49.5', + reverse: true + }, data.slice().reverse().slice(50)) + + rangeTest('lt=50 and reverse=true', { + lt: '50', + reverse: true + }, data.slice().reverse().slice(50)) + + rangeTest('lte=50', { + lte: '50' + }, data.slice(0, 51)) + + rangeTest('lte=50.5 (midway)', { + lte: '50.5' + }, data.slice(0, 51)) + + rangeTest('lte=50555 (midway)', { + lte: '50555' + }, data.slice(0, 51)) + + rangeTest('lt=50555 (midway)', { + lt: '50555' + }, data.slice(0, 51)) + + rangeTest('gte=50.5 (midway) and reverse=true', { + gte: '50.5', + reverse: true + }, data.slice().reverse().slice(0, 49)) + + rangeTest('gt=50.5 (midway) and reverse=true', { + gt: '50.5', + reverse: true + }, data.slice().reverse().slice(0, 49)) + + rangeTest('gt=50 and reverse=true', { + gt: '50', + reverse: true + }, data.slice().reverse().slice(0, 49)) + + // first key is actually '00' so it should avoid it + rangeTest('lte=0', { + lte: '0' + }, []) + + // first key is actually '00' so it should avoid it + rangeTest('lt=0', { + lt: '0' + }, []) + + rangeTest('gte=30 and lte=70', { + gte: '30', + lte: '70' + }, data.slice(30, 71)) + + // The gte and lte options should take precedence over gt and lt respectively. + rangeTest('gte=30 and lte=70 and gt=40 and lt=60', { + gte: '30', + lte: '70', + gt: '40', + lt: '60' + }, data.slice(30, 71)) + + // Also test the other way around: if gt and lt were to select a bigger range. + rangeTest('gte=30 and lte=70 and gt=20 and lt=80', { + gte: '30', + lte: '70', + gt: '20', + lt: '80' + }, data.slice(30, 71)) + + rangeTest('gt=29 and lt=71', { + gt: '29', + lt: '71' + }, data.slice(30, 71)) + + rangeTest('gte=30 and lte=70 and reverse=true', { + lte: '70', + gte: '30', + reverse: true + }, data.slice().reverse().slice(29, 70)) + + rangeTest('gt=29 and lt=71 and reverse=true', { + lt: '71', + gt: '29', + reverse: true + }, data.slice().reverse().slice(29, 70)) + + rangeTest('limit=20', { + limit: 20 + }, data.slice(0, 20)) + + rangeTest('limit=20 and gte=20', { + limit: 20, + gte: '20' + }, data.slice(20, 40)) + + rangeTest('limit=20 and reverse=true', { + limit: 20, + reverse: true + }, data.slice().reverse().slice(0, 20)) + + rangeTest('limit=20 and lte=79 and reverse=true', { + limit: 20, + lte: '79', + reverse: true + }, data.slice().reverse().slice(20, 40)) + + // the default limit value is -1 + rangeTest('limit=-1 (all)', { + limit: -1 + }, data) + + rangeTest('limit=0 (empty)', { + limit: 0 + }, []) + + rangeTest('lte after limit', { + limit: 20, + lte: '50' + }, data.slice(0, 20)) + + rangeTest('lte before limit', { + limit: 50, + lte: '19' + }, data.slice(0, 20)) + + rangeTest('gte after database end', { + gte: '9a' + }, []) + + rangeTest('gt after database end', { + gt: '9a' + }, []) + + rangeTest('lte after database end and reverse=true', { + lte: '9a', + reverse: true + }, data.slice().reverse()) + + rangeTest('lt after database end', { + lt: 'a' + }, data.slice()) + + rangeTest('lt at database end', { + lt: data[data.length - 1].key + }, data.slice(0, -1)) + + rangeTest('lte at database end', { + lte: data[data.length - 1].key + }, data.slice()) + + rangeTest('lt before database end', { + lt: data[data.length - 2].key + }, data.slice(0, -2)) + + rangeTest('lte before database end', { + lte: data[data.length - 2].key + }, data.slice(0, -1)) + + rangeTest('lte and gte after database and reverse=true', { + lte: '9b', + gte: '9a', + reverse: true + }, []) + + rangeTest('lt and gt after database and reverse=true', { + lt: '9b', + gt: '9a', + reverse: true + }, []) + + rangeTest('gt greater than lt', { + gt: '20', + lt: '10' + }, []) + + rangeTest('gte greater than lte', { + gte: '20', + lte: '10' + }, []) +} + +exports.tearDown = function (test, testCommon) { + test('iterator() range teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.range(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/iterator-seek-test.js b/test/iterator-seek-test.js new file mode 100644 index 0000000..e146b30 --- /dev/null +++ b/test/iterator-seek-test.js @@ -0,0 +1,335 @@ +'use strict' + +const { Buffer } = require('buffer') +const identity = (v) => v + +exports.all = function (test, testCommon) { + exports.sequence(test, testCommon) + exports.seek(test, testCommon) +} + +exports.sequence = function (test, testCommon) { + for (const deferred of [false, true]) { + for (const mode of ['iterator', 'keys', 'values']) { + test(`${mode}().seek() throws if next() has not completed (deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + if (!deferred) await db.open() + + const it = db[mode]() + const promise = it.next() + + t.throws(() => it.seek('two'), (err) => err.code === 'LEVEL_ITERATOR_BUSY') + + await promise + await db.close() + }) + + test(`${mode}().seek() does not throw after close() (deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + if (!deferred) await db.open() + + const it = db[mode]() + await it.close() + + t.doesNotThrow(() => it.seek('two')) + + await db.close() + }) + } + } +} + +exports.seek = function (test, testCommon) { + const testData = () => [ + // Note that 'three' sorts before 'two' + { type: 'put', key: 'one', value: '1' }, + { type: 'put', key: 'two', value: '2' }, + { type: 'put', key: 'three', value: '3' } + ] + + const bufferTestData = () => [ + // Note that 'b9' sorts before 'c0' + { type: 'put', key: Buffer.from('80', 'hex'), value: '1', keyEncoding: 'buffer' }, + { type: 'put', key: Buffer.from('c0', 'hex'), value: '2', keyEncoding: 'buffer' }, + { type: 'put', key: Buffer.from('b9', 'hex'), value: '3', keyEncoding: 'buffer' } + ] + + test('prepare byte-aware tests', function (t) { + const data = bufferTestData() + t.ok(data[0].key.toString() === data[1].key.toString(), 'would be equal when not byte-aware') + t.ok(data[0].key.compare(data[1].key) < 0, 'but less than when byte-aware') + t.end() + }) + + for (const mode of ['iterator', 'keys', 'values']) { + const mapEntry = mode === 'iterator' ? e => e : mode === 'keys' ? e => e[0] : e => e[1] + + test(`${mode}().seek() to string target`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]() + + it.seek('two') + + t.same(await it.next(), mapEntry(['two', '2']), 'match') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + + if (testCommon.supports.encodings.buffer) { + test(`${mode}().seek() to buffer target`, async function (t) { + // For this test to be meaningful it must use bytes outside the utf8 range + const data = bufferTestData() + const db = testCommon.factory() + await db.batch(data) + const it = db[mode]({ keyEncoding: 'buffer' }) + + // Seek to second key + it.seek(data[1].key) + + t.same(await it.next(), mapEntry([data[1].key, '2']), 'match') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + } + + test(`${mode}().seek() to target with custom encoding`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]() + const keyEncoding = { encode: () => 'two', decode: identity, format: 'utf8' } + + it.seek('xyz', { keyEncoding }) + + t.same(await it.next(), mapEntry(['two', '2']), 'match') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + + test(`${mode}().seek() on reverse iterator`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]({ reverse: true, limit: 1 }) + + // Should land on key equal to or smaller than 'three!' which is 'three' + it.seek('three!') + + t.same(await it.next(), mapEntry(['three', '3']), 'match') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + + test(`${mode}().seek() to out of range target`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]() + + it.seek('zzz') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + + test(`${mode}().seek() on reverse iterator to out of range target`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]({ reverse: true }) + + it.seek('zzz') + + t.same(await it.next(), mapEntry(['two', '2']), 'match') + t.same(await it.next(), mapEntry(['three', '3']), 'match') + t.same(await it.next(), mapEntry(['one', '1']), 'match') + t.same(await it.next(), undefined, 'end of iterator') + + return db.close() + }) + + test(`${mode}().seek() can be used to iterate twice`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const it = db[mode]() + + t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match') + t.same(await it.nextv(10), [], 'end of iterator') + + it.seek('one') + + t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match again') + t.same(await it.nextv(10), [], 'end of iterator again') + + await it.close() + return db.close() + }) + + test(`${mode}().seek() can be used to iterate twice, within limit`, async function (t) { + const db = testCommon.factory() + await db.batch(testData()) + const limit = 4 + const it = db[mode]({ limit }) + + t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match') + t.same(await it.nextv(10), [], 'end of iterator') + + it.seek('one') + + t.same(await it.nextv(10), [['one', '1']].map(mapEntry), 'limit reached') + t.same(await it.nextv(10), [], 'end of iterator') + + it.seek('one') + t.same(await it.nextv(10), [], 'does not reset after limit has been reached') + + await it.close() + return db.close() + }) + + if (testCommon.supports.implicitSnapshots) { + for (const reverse of [false, true]) { + for (const deferred of [false, true]) { + test(`${mode}().seek() respects snapshot (reverse: ${reverse}, deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + if (!deferred) await db.open() + + const it = db[mode]({ reverse }) + + // Add entry after having created the iterator (and its snapshot) + await db.put('a', 'a') + + // Seeking should not create a new snapshot, which'd include the new entry + it.seek('a') + t.same(await it.next(), undefined) + + return db.close() + }) + } + } + } + + test(`${mode}().seek() respects range`, async function (t) { + const db = testCommon.factory() + await db.open() + const ops = [] + + for (let i = 0; i < 10; i++) { + ops.push({ type: 'put', key: String(i), value: String(i) }) + } + + await db.batch(ops) + const promises = [] + + expect({ gt: '5' }, '4', undefined) + expect({ gt: '5' }, '5', undefined) + expect({ gt: '5' }, '6', '6') + + expect({ gte: '5' }, '4', undefined) + expect({ gte: '5' }, '5', '5') + expect({ gte: '5' }, '6', '6') + + // The gte option should take precedence over gt. + expect({ gte: '5', gt: '7' }, '4', undefined) + expect({ gte: '5', gt: '7' }, '5', '5') + expect({ gte: '5', gt: '7' }, '6', '6') + expect({ gte: '5', gt: '3' }, '4', undefined) + expect({ gte: '5', gt: '3' }, '5', '5') + expect({ gte: '5', gt: '3' }, '6', '6') + + expect({ lt: '5' }, '4', '4') + expect({ lt: '5' }, '5', undefined) + expect({ lt: '5' }, '6', undefined) + + expect({ lte: '5' }, '4', '4') + expect({ lte: '5' }, '5', '5') + expect({ lte: '5' }, '6', undefined) + + // The lte option should take precedence over lt. + expect({ lte: '5', lt: '3' }, '4', '4') + expect({ lte: '5', lt: '3' }, '5', '5') + expect({ lte: '5', lt: '3' }, '6', undefined) + expect({ lte: '5', lt: '7' }, '4', '4') + expect({ lte: '5', lt: '7' }, '5', '5') + expect({ lte: '5', lt: '7' }, '6', undefined) + + expect({ lt: '5', reverse: true }, '4', '4') + expect({ lt: '5', reverse: true }, '5', undefined) + expect({ lt: '5', reverse: true }, '6', undefined) + + expect({ lte: '5', reverse: true }, '4', '4') + expect({ lte: '5', reverse: true }, '5', '5') + expect({ lte: '5', reverse: true }, '6', undefined) + + expect({ gt: '5', reverse: true }, '4', undefined) + expect({ gt: '5', reverse: true }, '5', undefined) + expect({ gt: '5', reverse: true }, '6', '6') + + expect({ gte: '5', reverse: true }, '4', undefined) + expect({ gte: '5', reverse: true }, '5', '5') + expect({ gte: '5', reverse: true }, '6', '6') + + expect({ gt: '7', lt: '8' }, '7', undefined) + expect({ gte: '7', lt: '8' }, '7', '7') + expect({ gte: '7', lt: '8' }, '8', undefined) + expect({ gt: '7', lte: '8' }, '8', '8') + + await Promise.all(promises) + return db.close() + + function expect (range, target, expected) { + promises.push(async function () { + const ite = db[mode](range) + ite.seek(target) + + const item = await ite.next() + const json = JSON.stringify(range) + const msg = 'seek(' + target + ') on ' + json + ' yields ' + expected + + // Either a key or value depending on mode + t.is(mode === 'iterator' ? item[0] : item, expected, msg) + + return ite.close() + }) + } + }) + + // Tests the specific case where an iterator can (theoretically) tell that + // a seek() would be out of range by comparing the seek target against + // range options, before performing an actual seek. MemoryLevel works this + // way for example. Also test the same scenario without an explicit seek() + // which should have the same result. + for (const reverse of [false, true]) { + for (const seek of [true, false]) { + const props = `reverse = ${reverse}, seek = ${seek}` + const name = `${mode}() seek outside of range options (${props})` + const key = 'a' + + test(name, async function (t) { + const db = testCommon.factory() + + await db.open() + await db.put(key, '123') + + // Pick ranges that exclude the key + const ranges = [ + { gt: 'x', reverse }, + { gte: 'x', reverse }, + { lt: '0', reverse }, + { lte: '0', reverse } + ] + + // Test each range + for (let i = 0; i < ranges.length; i++) { + const iterator = db[mode](ranges[i]) + if (seek) iterator.seek(key) + t.same(await iterator.next(), undefined, `end of iterator ${i}`) + await iterator.close() + } + + return db.close() + }) + } + } + } +} diff --git a/test/iterator-snapshot-test.js b/test/iterator-snapshot-test.js new file mode 100644 index 0000000..67fe853 --- /dev/null +++ b/test/iterator-snapshot-test.js @@ -0,0 +1,56 @@ +'use strict' + +exports.snapshot = function (test, testCommon) { + const make = (run) => async function (t) { + const db = testCommon.factory() + await db.open() + await db.put('z', 'from snapshot') + + // For this test it is important that we don't read eagerly. + // NOTE: highWaterMarkBytes is not an abstract option, but + // it is supported by classic-level and others. Also set the + // old & equivalent leveldown highWaterMark option for compat. + const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 }) + + await run(t, db, it) + await it.close() + + return db.close() + } + + test('delete key after snapshotting', make(async function (t, db, it) { + await db.del('z') + t.same(await it.next(), ['z', 'from snapshot'], 'correct entry') + })) + + test('overwrite key after snapshotting', make(async function (t, db, it) { + await db.put('z', 'not from snapshot') + t.same(await it.next(), ['z', 'from snapshot'], 'correct entry') + })) + + test('add key after snapshotting that sorts first', make(async function (t, db, it) { + await db.put('a', 'not from snapshot') + t.same(await it.next(), ['z', 'from snapshot'], 'correct entry') + })) + + // NOTE: adapted from memdown + test('delete key after snapshotting, with more entries available', async function (t) { + const db = testCommon.factory() + await db.open() + await Promise.all([db.put('a', 'A'), db.put('b', 'B'), db.put('c', 'C')]) + + const iterator = db.iterator({ gte: 'a' }) + t.same(await iterator.next(), ['a', 'A']) + + await db.del('b') + t.same(await iterator.next(), ['b', 'B']) + t.same(await iterator.next(), ['c', 'C']) + + await iterator.close() + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.snapshot(test, testCommon) +} diff --git a/test/iterator-test.js b/test/iterator-test.js new file mode 100644 index 0000000..d66c791 --- /dev/null +++ b/test/iterator-test.js @@ -0,0 +1,621 @@ +'use strict' + +const { Buffer } = require('buffer') +const identity = (v) => v + +let db + +exports.setUp = function (test, testCommon) { + test('iterator setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + for (const mode of ['iterator', 'keys', 'values']) { + test(`${mode}() has db reference`, async function (t) { + const it = db[mode]() + + // May return iterator of an underlying db, that's okay. + t.ok(it.db === db || it.db === (db.db || db._db || db)) + + await it.close() + }) + + test(`${mode}() has limit and count properties`, async function (t) { + const iterators = [db[mode]()] + t.is(iterators[0].limit, Infinity, 'defaults to infinite') + + for (const limit of [-1, 0, 1, Infinity]) { + const it = db[mode]({ limit }) + iterators.push(it) + t.is(it.limit, limit === -1 ? Infinity : limit, 'has limit property') + } + + t.ok(iterators.every(it => it.count === 0), 'has count property') + await Promise.all(iterators.map(it => it.close())) + }) + + test(`${mode}().nextv() yields error if size is invalid`, async function (t) { + t.plan(4) + + const it = db[mode]() + + for (const args of [[], [NaN], ['1'], [2.5]]) { + try { + await it.nextv(...args) + } catch (err) { + t.is(err.message, "The first argument 'size' must be an integer") + } + } + + await it.close() + }) + } +} + +exports.sequence = function (test, testCommon) { + for (const mode of ['iterator', 'keys', 'values']) { + test(`${mode}().close() is idempotent`, async function (t) { + const iterator = db[mode]() + + await iterator.close() + await iterator.close() + + return Promise.all([iterator.close(), iterator.close()]) + }) + + for (const method of ['next', 'nextv', 'all']) { + const requiredArgs = method === 'nextv' ? [1] : [] + + test(`${mode}().${method}() after close() yields error`, async function (t) { + t.plan(1) + + const iterator = db[mode]() + await iterator.close() + + try { + await iterator[method](...requiredArgs) + } catch (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', 'correct message') + } + }) + + for (const otherMethod of ['next', 'nextv', 'all']) { + const otherRequiredArgs = otherMethod === 'nextv' ? [1] : [] + + test(`${mode}().${method}() while busy with ${otherMethod}() yields error`, async function (t) { + t.plan(1) + + const iterator = db[mode]() + const promise = iterator[otherMethod](...otherRequiredArgs) + + try { + await iterator[method](...requiredArgs) + } catch (err) { + t.is(err.code, 'LEVEL_ITERATOR_BUSY') + } + + await promise + return iterator.close() + }) + } + + for (const deferred of [false, true]) { + test(`${mode}().${method}() during close() yields error (deferred: ${deferred})`, async function (t) { + t.plan(2) + + const db = testCommon.factory() + if (!deferred) await db.open() + const it = db[mode]() + + // The first call *may* succeed, because it was scheduled before close(). The + // default implementations of nextv() and all() fallback to next*() and thus + // make multiple calls, so they're allowed to fail. + let promise = it[method](...requiredArgs).then(() => { + t.pass('Optionally succeeded') + }, (err) => { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }) + + // The second call *must* fail, because it was scheduled after close() + promise = promise.then(() => { + return it[method](...requiredArgs).then(() => { + t.fail('Expected an error') + }, (err) => { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + }) + }) + + await Promise.all([it.close(), promise]) + return db.close() + }) + } + + // 1) At the moment, we can only be sure that signals are supported if the iterator is deferred + if (globalThis.AbortController) { + test(`${mode}().${method}() with aborted signal yields error (deferred)`, async function (t) { + t.plan(3) + + const db = testCommon.factory() + const ac = new globalThis.AbortController() + const it = db[mode]({ signal: ac.signal }) + + t.is(db.status, 'opening', 'is deferred') + ac.abort() + + try { + await it[method](...requiredArgs) + } catch (err) { + t.is(err.code, 'LEVEL_ABORTED') + t.is(err.name, 'AbortError') + } + + await it.close() + return db.close() + }) + } + + // 2) Unless the implementation opts-in + if (globalThis.AbortController && testCommon.supports.signals && testCommon.supports.signals.iterators) { + test(`${mode}().${method}() with signal yields error when aborted`, async function (t) { + t.plan(2) + + const db = testCommon.factory() + + await db.open() + await db.batch().put('a', 'a').put('b', 'b').write() + + const ac = new globalThis.AbortController() + const it = db[mode]({ signal: ac.signal }) + const promise = it[method](...requiredArgs) + + ac.abort() + + try { + await promise + } catch (err) { + t.is(err.code, 'LEVEL_ABORTED') + t.is(err.name, 'AbortError') + } + + await it.close() + return db.close() + }) + + test(`${mode}().${method}() with non-aborted signal`, async function (t) { + const db = testCommon.factory() + + await db.open() + await db.batch().put('a', 'a').put('b', 'b').write() + + const ac = new globalThis.AbortController() + const it = db[mode]({ signal: ac.signal }) + + // We're merely testing that this does not throw. And implicitly testing (through + // coverage) that abort listeners are removed. An implementation might choose to + // periodically check signal.aborted instead of using an abort listener, so we + // can't directly assert that cleanup indeed happens. + await it[method](...requiredArgs) + await it.close() + + return db.close() + }) + } + } + } +} + +exports.iterator = function (test, testCommon) { + test('iterator data setup', function (t) { + return db.batch([ + { type: 'put', key: 'foobatch1', value: 'bar1' }, + { type: 'put', key: 'foobatch2', value: 'bar2' }, + { type: 'put', key: 'foobatch3', value: 'bar3' } + ]) + }) + + test('simple iterator().next()', async function (t) { + const iterator = db.iterator() + + t.same(await iterator.next(), ['foobatch1', 'bar1']) + t.same(await iterator.next(), ['foobatch2', 'bar2']) + t.same(await iterator.next(), ['foobatch3', 'bar3']) + t.is(await iterator.next(), undefined) + + return iterator.close() + }) + + // NOTE: adapted from leveldown + test('iterator().next() with values: false', async function (t) { + const it = db.iterator({ values: false }) + + t.same(await it.next(), ['foobatch1', undefined]) + t.same(await it.next(), ['foobatch2', undefined]) + t.same(await it.next(), ['foobatch3', undefined]) + t.is(await it.next(), undefined) + + return it.close() + }) + + // NOTE: adapted from leveldown + test('iterator().next() with keys: false', async function (t) { + const it = db.iterator({ keys: false }) + + t.same(await it.next(), [undefined, 'bar1']) + t.same(await it.next(), [undefined, 'bar2']) + t.same(await it.next(), [undefined, 'bar3']) + t.is(await it.next(), undefined) + + return it.close() + }) + + test('keys().next()', async function (t) { + const it = db.keys() + + t.is(await it.next(), 'foobatch1') + t.is(await it.next(), 'foobatch2') + t.is(await it.next(), 'foobatch3') + t.is(await it.next(), undefined) + + return it.close() + }) + + test('values().next()', async function (t) { + const it = db.values() + + t.is(await it.next(), 'bar1') + t.is(await it.next(), 'bar2') + t.is(await it.next(), 'bar3') + t.is(await it.next(), undefined) + + return it.close() + }) + + for (const mode of ['iterator', 'keys', 'values']) { + const mapEntry = e => mode === 'iterator' ? e : mode === 'keys' ? e[0] : e[1] + + test(`${mode}().nextv()`, async function (t) { + const it = db[mode]() + + t.same(await it.nextv(1), [['foobatch1', 'bar1']].map(mapEntry)) + t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch3', 'bar3']].map(mapEntry)) + t.same(await it.nextv(2), []) + + await it.close() + }) + + test(`${mode}().nextv() in reverse`, async function (t) { + const it = db[mode]({ reverse: true }) + + t.same(await it.nextv(1), [['foobatch3', 'bar3']].map(mapEntry)) + t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch1', 'bar1']].map(mapEntry)) + t.same(await it.nextv(2), []) + + await it.close() + }) + + test(`${mode}().nextv() has soft minimum of 1`, async function (t) { + const it = db[mode]() + + t.same(await it.nextv(0), [['foobatch1', 'bar1']].map(mapEntry)) + t.same(await it.nextv(0), [['foobatch2', 'bar2']].map(mapEntry)) + t.same(await it.nextv(0, {}), [['foobatch3', 'bar3']].map(mapEntry)) + t.same(await it.nextv(0), []) + + await it.close() + }) + + test(`${mode}().nextv() requesting more than available`, async function (t) { + const it = db[mode]() + + t.same(await it.nextv(10), [ + ['foobatch1', 'bar1'], + ['foobatch2', 'bar2'], + ['foobatch3', 'bar3'] + ].map(mapEntry)) + t.same(await it.nextv(10), []) + + await it.close() + }) + + test(`${mode}().nextv() honors limit`, async function (t) { + const it = db[mode]({ limit: 2 }) + + t.same(await it.nextv(10), [['foobatch1', 'bar1'], ['foobatch2', 'bar2']].map(mapEntry)) + t.same(await it.nextv(10), []) + + await it.close() + }) + + test(`${mode}().nextv() honors limit and size`, async function (t) { + const it = db[mode]({ limit: 2 }) + + t.same(await it.nextv(1), [['foobatch1', 'bar1']].map(mapEntry)) + t.same(await it.nextv(10), [['foobatch2', 'bar2']].map(mapEntry)) + t.same(await it.nextv(10), []) + + await it.close() + }) + + test(`${mode}().nextv() honors limit in reverse`, async function (t) { + const it = db[mode]({ limit: 2, reverse: true }) + + t.same(await it.nextv(10), [['foobatch3', 'bar3'], ['foobatch2', 'bar2']].map(mapEntry)) + t.same(await it.nextv(10), []) + + await it.close() + }) + + test(`${mode}().nextv() honors limit and size in reverse`, async function (t) { + const it = db[mode]({ limit: 2, reverse: true }) + + t.same(await it.nextv(1), [['foobatch3', 'bar3']].map(mapEntry)) + t.same(await it.nextv(10), [['foobatch2', 'bar2']].map(mapEntry)) + t.same(await it.nextv(10), []) + + await it.close() + }) + + test(`${mode}().all()`, async function (t) { + t.same(await db[mode]().all(), [ + ['foobatch1', 'bar1'], + ['foobatch2', 'bar2'], + ['foobatch3', 'bar3'] + ].map(mapEntry)) + + t.same(await db[mode]().all({}), [ + ['foobatch1', 'bar1'], + ['foobatch2', 'bar2'], + ['foobatch3', 'bar3'] + ].map(mapEntry)) + }) + + test(`${mode}().all() with keys: false`, async function (t) { + // keys option should be ignored on db.keys() and db.values() + t.same(await db[mode]({ keys: false }).all(), [ + [mode === 'iterator' ? undefined : 'foobatch1', 'bar1'], + [mode === 'iterator' ? undefined : 'foobatch2', 'bar2'], + [mode === 'iterator' ? undefined : 'foobatch3', 'bar3'] + ].map(mapEntry)) + }) + + test(`${mode}().all() with values: false`, async function (t) { + // values option should be ignored on db.keys() and db.values() + t.same(await db[mode]({ values: false }).all(), [ + ['foobatch1', mode === 'iterator' ? undefined : 'bar1'], + ['foobatch2', mode === 'iterator' ? undefined : 'bar2'], + ['foobatch3', mode === 'iterator' ? undefined : 'bar3'] + ].map(mapEntry)) + }) + + test(`${mode}().all() in reverse`, async function (t) { + t.same(await db[mode]({ reverse: true }).all(), [ + ['foobatch3', 'bar3'], + ['foobatch2', 'bar2'], + ['foobatch1', 'bar1'] + ].map(mapEntry)) + }) + + test(`${mode}().all() honors limit`, async function (t) { + t.same(await db[mode]({ limit: 2 }).all(), [ + ['foobatch1', 'bar1'], + ['foobatch2', 'bar2'] + ].map(mapEntry)) + + const it = db[mode]({ limit: 2 }) + + t.same(await it.next(), mapEntry(['foobatch1', 'bar1'])) + t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry)) + }) + + test(`${mode}().all() honors limit in reverse`, async function (t) { + t.same(await db[mode]({ limit: 2, reverse: true }).all(), [ + ['foobatch3', 'bar3'], + ['foobatch2', 'bar2'] + ].map(mapEntry)) + + const it = db[mode]({ limit: 2, reverse: true }) + + t.same(await it.next(), mapEntry(['foobatch3', 'bar3'])) + t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry)) + }) + } + + // NOTE: adapted from memdown + test('iterator() sorts lexicographically', async function (t) { + const db = testCommon.factory() + await db.open() + + // Write in unsorted order with multiple operations + await db.put('f', 'F') + await db.put('a', 'A') + await db.put('~', '~') + await db.put('e', 'E') + await db.put('🐄', '🐄') + await db.batch([ + { type: 'put', key: 'd', value: 'D' }, + { type: 'put', key: 'b', value: 'B' }, + { type: 'put', key: 'ff', value: 'FF' }, + { type: 'put', key: 'a🐄', value: 'A🐄' } + ]) + await db.batch([ + { type: 'put', key: '', value: 'empty' }, + { type: 'put', key: '2', value: '2' }, + { type: 'put', key: '12', value: '12' }, + { type: 'put', key: '\t', value: '\t' } + ]) + + t.same(await db.iterator().all(), [ + ['', 'empty'], + ['\t', '\t'], + ['12', '12'], + ['2', '2'], + ['a', 'A'], + ['a🐄', 'A🐄'], + ['b', 'B'], + ['d', 'D'], + ['e', 'E'], + ['f', 'F'], + ['ff', 'FF'], + ['~', '~'], + ['🐄', '🐄'] + ]) + + t.same(await db.iterator({ lte: '' }).all(), [ + ['', 'empty'] + ]) + + return db.close() + }) + + for (const keyEncoding of ['buffer', 'view']) { + if (!testCommon.supports.encodings[keyEncoding]) continue + + test(`iterators have byte order (${keyEncoding} encoding)`, async function (t) { + const db = testCommon.factory({ keyEncoding }) + await db.open() + + const ctor = keyEncoding === 'buffer' ? Buffer : Uint8Array + const bytes = [2, 11, 1] + const keys = bytes.map(b => ctor.from([b])) + const values = bytes.map(b => String(b)) + + await db.batch(keys.map((key, i) => ({ type: 'put', key, value: values[i] }))) + + t.same((await db.keys().all()).map(k => k[0]), [1, 2, 11], 'order of keys() is ok') + t.same((await db.iterator().all()).map(e => e[0][0]), [1, 2, 11], 'order of iterator() is ok') + t.same(await db.values().all(), ['1', '2', '11'], 'order of values() is ok') + + return db.close() + }) + + // NOTE: adapted from memdown and level-js + test(`iterator() with byte range (${keyEncoding} encoding)`, async function (t) { + const db = testCommon.factory({ keyEncoding }) + await db.open() + + await db.put(Uint8Array.from([0x0]), '0') + await db.put(Uint8Array.from([128]), '128') + await db.put(Uint8Array.from([160]), '160') + await db.put(Uint8Array.from([192]), '192') + + const collect = async (range) => { + const entries = await db.iterator(range).all() + t.ok(entries.every(e => e[0] instanceof Uint8Array)) // True for both encodings + t.ok(entries.every(e => e[1] === String(e[0][0]))) + return entries.map(e => e[0][0]) + } + + t.same(await collect({ gt: Uint8Array.from([255]) }), []) + t.same(await collect({ gt: Uint8Array.from([192]) }), []) + t.same(await collect({ gt: Uint8Array.from([160]) }), [192]) + t.same(await collect({ gt: Uint8Array.from([128]) }), [160, 192]) + t.same(await collect({ gt: Uint8Array.from([0x0]) }), [128, 160, 192]) + t.same(await collect({ gt: Uint8Array.from([]) }), [0x0, 128, 160, 192]) + + t.same(await collect({ lt: Uint8Array.from([255]) }), [0x0, 128, 160, 192]) + t.same(await collect({ lt: Uint8Array.from([192]) }), [0x0, 128, 160]) + t.same(await collect({ lt: Uint8Array.from([160]) }), [0x0, 128]) + t.same(await collect({ lt: Uint8Array.from([128]) }), [0x0]) + t.same(await collect({ lt: Uint8Array.from([0x0]) }), []) + t.same(await collect({ lt: Uint8Array.from([]) }), []) + + t.same(await collect({ gte: Uint8Array.from([255]) }), []) + t.same(await collect({ gte: Uint8Array.from([192]) }), [192]) + t.same(await collect({ gte: Uint8Array.from([160]) }), [160, 192]) + t.same(await collect({ gte: Uint8Array.from([128]) }), [128, 160, 192]) + t.same(await collect({ gte: Uint8Array.from([0x0]) }), [0x0, 128, 160, 192]) + t.same(await collect({ gte: Uint8Array.from([]) }), [0x0, 128, 160, 192]) + + t.same(await collect({ lte: Uint8Array.from([255]) }), [0x0, 128, 160, 192]) + t.same(await collect({ lte: Uint8Array.from([192]) }), [0x0, 128, 160, 192]) + t.same(await collect({ lte: Uint8Array.from([160]) }), [0x0, 128, 160]) + t.same(await collect({ lte: Uint8Array.from([128]) }), [0x0, 128]) + t.same(await collect({ lte: Uint8Array.from([0x0]) }), [0x0]) + t.same(await collect({ lte: Uint8Array.from([]) }), []) + + return db.close() + }) + } +} + +exports.decode = function (test, testCommon) { + for (const deferred of [false, true]) { + for (const mode of ['iterator', 'keys', 'values']) { + for (const method of ['next', 'nextv', 'all']) { + const requiredArgs = method === 'nextv' ? [1] : [] + + for (const encodingOption of ['keyEncoding', 'valueEncoding']) { + if (mode === 'keys' && encodingOption === 'valueEncoding') continue + if (mode === 'values' && encodingOption === 'keyEncoding') continue + + // NOTE: adapted from encoding-down + test(`${mode}().${method}() catches decoding error from ${encodingOption} (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const encoding = { + format: 'utf8', + decode: function (x) { + t.is(x, encodingOption === 'keyEncoding' ? 'testKey' : 'testValue') + throw new Error('from encoding') + }, + encode: identity + } + + const db = testCommon.factory() + await db.put('testKey', 'testValue') + + if (deferred) { + await db.close() + db.open().then(t.pass.bind(t)) + } else { + t.pass('non-deferred') + } + + const it = db[mode]({ [encodingOption]: encoding }) + + try { + await it[method](...requiredArgs) + } catch (err) { + t.is(err.code, 'LEVEL_DECODE_ERROR') + t.is(err.cause && err.cause.message, 'from encoding') + } + + return db.close() + }) + } + } + } + } +} + +exports.tearDown = function (test, testCommon) { + test('iterator teardown', async function (t) { + return db.close() + }) +} + +exports.dispose = function (test, testCommon) { + // Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management) + Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) { + const db = testCommon.factory() + await db.open() + + const iterator = db.iterator() + await iterator[Symbol.asyncDispose]() + + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.sequence(test, testCommon) + exports.iterator(test, testCommon) + exports.decode(test, testCommon) + exports.tearDown(test, testCommon) + exports.dispose(test, testCommon) +} diff --git a/test/manifest-test.js b/test/manifest-test.js new file mode 100644 index 0000000..d4899cb --- /dev/null +++ b/test/manifest-test.js @@ -0,0 +1,21 @@ +'use strict' + +const suite = require('level-supports/test') + +module.exports = function (test, testCommon) { + suite(test, testCommon) + + test('manifest has expected properties', async function (t) { + const db = testCommon.factory() + + t.is(db.supports.deferredOpen, true) + + testCommon.supports = db.supports + t.ok(testCommon.supports, 'can be accessed via testCommon') + + t.ok(db.supports.encodings.utf8, 'supports utf8') + t.ok(db.supports.encodings.json, 'supports json') + + return db.close() + }) +} diff --git a/test/open-create-if-missing-test.js b/test/open-create-if-missing-test.js new file mode 100644 index 0000000..29b443d --- /dev/null +++ b/test/open-create-if-missing-test.js @@ -0,0 +1,39 @@ +'use strict' + +exports.createIfMissing = function (test, testCommon) { + test('open() with createIfMissing: false', async function (t) { + t.plan(2) + + const db = testCommon.factory() + + try { + await db.open({ createIfMissing: false }) + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.ok(/does not exist/.test(err.cause.message), 'error is about dir not existing') + } + + // Should be a noop + return db.close() + }) + + test('open() with createIfMissing: false via constructor', async function (t) { + t.plan(2) + + const db = testCommon.factory({ createIfMissing: false }) + + try { + await db.open() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.ok(/does not exist/.test(err.cause.message), 'error is about dir not existing') + } + + // Should be a noop + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.createIfMissing(test, testCommon) +} diff --git a/test/open-error-if-exists-test.js b/test/open-error-if-exists-test.js new file mode 100644 index 0000000..d149d92 --- /dev/null +++ b/test/open-error-if-exists-test.js @@ -0,0 +1,25 @@ +'use strict' + +exports.errorIfExists = function (test, testCommon) { + test('open() with errorIfExists: true', async function (t) { + t.plan(2) + + const db = testCommon.factory() + await db.open() + await db.close() + + try { + await db.open({ createIfMissing: false, errorIfExists: true }) + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.ok(/exists/.test(err.cause.message), 'error is about already existing') + } + + // Should be a noop + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.errorIfExists(test, testCommon) +} diff --git a/test/open-test.js b/test/open-test.js new file mode 100644 index 0000000..87fb66d --- /dev/null +++ b/test/open-test.js @@ -0,0 +1,263 @@ +'use strict' + +exports.open = function (test, testCommon) { + test('open() and close(), no options', async function (t) { + const db = testCommon.factory() + t.is(db.status, 'opening') + + const promise1 = db.open() + t.is(db.status, 'opening') + await promise1 + + t.is(db.status, 'open') + + const promise2 = db.close() + t.is(db.status, 'closing') + await promise2 + t.is(db.status, 'closed') + }) + + test('open() and close(), with empty options', async function (t) { + const db = testCommon.factory() + await db.open({}) + return db.close() + }) + + test('open(), close() and open()', async function (t) { + const db = testCommon.factory() + + await db.open() + t.is(db.status, 'open') + + await db.close() + t.is(db.status, 'closed') + + await db.open() + t.is(db.status, 'open') + + return db.close() + }) + + test('open() and close() in same tick', function (t) { + t.plan(5) + + const db = testCommon.factory() + const order = [] + + db.open().then(function () { + order.push('A') + t.is(db.status, 'open', 'is open') + }) + + t.is(db.status, 'opening', 'is opening') + + // This eventually wins from the open() call + db.close().then(function () { + order.push('B') + t.same(order, ['open event', 'A', 'closed event', 'B'], 'order is correct') + t.is(db.status, 'closed', 'is closed') + }) + + // But open() is still in progress + t.is(db.status, 'opening', 'is still opening') + + db.on('open', () => { order.push('open event') }) + db.on('closed', () => { order.push('closed event') }) + }) + + test('open(), close() and open() in same tick', function (t) { + t.plan(8) + + const db = testCommon.factory() + const order = [] + + db.open().then(function () { + order.push('A') + t.is(db.status, 'open', 'is open') + }) + + t.is(db.status, 'opening', 'is opening') + + // This wins from the open() call + db.close().then(function () { + order.push('B') + t.is(db.status, 'closed', 'is closed') + }) + + t.is(db.status, 'opening', 'is still opening') + + // This wins from the close() call + db.open().then(function () { + order.push('C') + t.same(order, ['open event', 'A', 'closed event', 'B', 'open event', 'C'], 'callback order is the same as call order') + t.is(db.status, 'open', 'is open') + + db.close().then(() => t.pass('done')) + }) + + db.on('closed', () => { order.push('closed event') }) + db.on('open', () => { order.push('open event') }) + + t.is(db.status, 'opening', 'is still opening') + }) + + test('open() if already open (sequential)', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + await db.open() + t.is(db.status, 'open', 'is open') + + const promise = db.open() + t.is(db.status, 'open', 'not reopening') + db.on('open', t.fail.bind(t)) + + await promise + t.is(db.status, 'open', 'is open') + return db.close() + }) + + test('open() if already opening (parallel)', function (t) { + t.plan(4) + + const db = testCommon.factory() + let called = false + + db.open().then(function () { + called = true + t.is(db.status, 'open') + }) + + db.open().then(function () { + t.is(db.status, 'open') + t.ok(called) + db.close(() => t.pass('done')) + }) + + t.is(db.status, 'opening') + }) + + test('close() if already closed', async function (t) { + t.plan(3) + + const db = testCommon.factory() + + await db.open() + await db.close() + + t.is(db.status, 'closed', 'is closed') + const promise = db.close() + t.is(db.status, 'closed', 'is closed', 'not reclosing') + db.on('closed', t.fail.bind(t)) + await promise + t.is(db.status, 'closed', 'still closed') + }) + + test('close() if new', function (t) { + t.plan(4) + + const db = testCommon.factory() + t.is(db.status, 'opening', 'status ok') + + db.close().then(function () { + t.is(db.status, 'closed', 'status ok') + }) + + // This behaves differently in abstract-level v1: status remains 'opening' because + // the db let's opening finish (or start, really) and only then closes the db. + t.is(db.status, 'closing', 'status ok') + + if (!db.supports.deferredOpen) { + t.pass('skip') + db.on('closed', t.fail.bind(t, 'should not emit closed')) + } else { + db.on('closed', t.pass.bind(t, 'got closed event')) + } + }) + + for (const event of ['open', 'opening']) { + test(`close() on ${event} event`, function (t) { + t.plan(3) + + const db = testCommon.factory() + const order = [] + + db.on(event, function () { + order.push(`${event} event`) + + // This eventually wins from the in-progress open() call + db.close().then(function () { + order.push('B') + t.same(order, [`${event} event`, 'A', 'closed event', 'B'], 'order is correct') + t.is(db.status, 'closed', 'is closed') + }, t.fail.bind(t)) + }) + + db.open().then(function () { + order.push('A') + t.is(db.status, 'open', 'is open') + }, t.fail.bind(t)) + + db.on('closed', () => { order.push('closed event') }) + }) + } + + for (const event of ['closed', 'closing']) { + test(`open() on ${event} event`, function (t) { + t.plan(3) + + const db = testCommon.factory() + const order = [] + + db.on(event, function () { + order.push(`${event} event`) + + // This eventually wins from the in-progress close() call + db.open().then(function () { + order.push('B') + t.same(order, [`${event} event`, 'A', 'open event', 'B'], 'order is correct') + t.is(db.status, 'open', 'is open') + }, t.fail.bind(t)) + }) + + db.close().then(function () { + order.push('A') + t.is(db.status, 'closed', 'is closed') + }, t.fail.bind(t)) + + db.on('open', () => { order.push('open event') }) + }) + } + + test('passive open()', async function (t) { + t.plan(1) + const db = testCommon.factory() + await db.open({ passive: true }) // OK, already opening + await db.close() + await db.open({ passive: true }).catch(err => { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + }) + await db.open() + await db.open({ passive: true }) // OK, already open + return db.close() + }) + + test('passive option is ignored if set in constructor options', async function (t) { + const db = testCommon.factory({ passive: true }) + await new Promise((resolve) => db.once('open', resolve)) + return db.close() + }) + + // Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management) + Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) { + const db = testCommon.factory() + await db.open() + await db[Symbol.asyncDispose]() + t.is(db.status, 'closed') + }) +} + +exports.all = function (test, testCommon) { + exports.open(test, testCommon) +} diff --git a/test/put-get-del-test.js b/test/put-get-del-test.js new file mode 100644 index 0000000..d274675 --- /dev/null +++ b/test/put-get-del-test.js @@ -0,0 +1,86 @@ +'use strict' + +const { Buffer } = require('buffer') + +let db + +function makeTest (test, type, key, value, expectedValue) { + const stringValue = arguments.length === 5 ? expectedValue : value.toString() + + test('put(), get(), del() with ' + type, async function (t) { + await db.put(key, value) + t.is((await db.get(key)).toString(), stringValue) + await db.del(key) + t.is(await db.get(key), undefined, 'not found') + }) +} + +exports.setUp = function (test, testCommon) { + test('put(), get(), del() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.nonErrorKeys = function (test, testCommon) { + // valid falsey keys + makeTest(test, '`0` key', 0, 'foo 0') + makeTest(test, 'empty string key', 0, 'foo') + + // standard String key + makeTest( + test + , 'long String key' + , 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!' + , 'foo' + ) + + if (testCommon.supports.encodings.buffer) { + makeTest(test, 'Buffer key', Buffer.from('0080c0ff', 'hex'), 'foo') + makeTest(test, 'empty Buffer key', Buffer.alloc(0), 'foo') + } + + // non-empty Array as a value + makeTest(test, 'Array value', 'foo', [1, 2, 3, 4]) +} + +exports.nonErrorValues = function (test, testCommon) { + // valid falsey values + makeTest(test, '`false` value', 'foo false', false) + makeTest(test, '`0` value', 'foo 0', 0) + makeTest(test, '`NaN` value', 'foo NaN', NaN) + + // all of the following result in an empty-string value: + makeTest(test, 'empty String value', 'foo', '', '') + makeTest(test, 'empty Buffer value', 'foo', Buffer.alloc(0), '') + makeTest(test, 'empty Array value', 'foo', [], '') + + // String value + makeTest( + test + , 'long String value' + , 'foo' + , 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!' + ) + + // Buffer value + if (testCommon.supports.encodings.buffer) { + makeTest(test, 'Buffer value', 'foo', Buffer.from('0080c0ff', 'hex')) + } + + // non-empty Array as a key + makeTest(test, 'Array key', [1, 2, 3, 4], 'foo') +} + +exports.tearDown = function (test, testCommon) { + test('put(), get(), del() teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.nonErrorKeys(test, testCommon) + exports.nonErrorValues(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/put-test.js b/test/put-test.js new file mode 100644 index 0000000..cbf380a --- /dev/null +++ b/test/put-test.js @@ -0,0 +1,70 @@ +'use strict' + +const { illegalKeys, illegalValues, assertPromise } = require('./util') +const traits = require('./traits') + +let db + +exports.setUp = function (test, testCommon) { + test('put() setup', async function (t) { + db = testCommon.factory() + return db.open() + }) +} + +exports.args = function (test, testCommon) { + test('put() with illegal keys', function (t) { + t.plan(illegalKeys.length * 2) + + for (const { name, key } of illegalKeys) { + db.put(key, 'value').catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code') + }) + } + }) + + test('put() with illegal values', function (t) { + t.plan(illegalValues.length * 2) + + for (const { name, value } of illegalValues) { + db.put('key', value).catch(function (err) { + t.ok(err instanceof Error, name + ' - is Error') + t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code') + }) + } + }) +} + +exports.put = function (test, testCommon) { + test('simple put()', async function (t) { + t.is(await assertPromise(db.put('foo', 'bar')), undefined, 'void promise') + t.is(await db.get('foo'), 'bar') + await db.put('foo', 'new') + t.is(await db.get('foo'), 'new', 'value was overwritten') + await db.put('bar', 'foo', {}) // same but with {} + t.is(await db.get('bar'), 'foo') + }) + + traits.open('put()', testCommon, async function (t, db) { + t.is(await assertPromise(db.put('foo', 'bar')), undefined, 'void promise') + t.is(await db.get('foo'), 'bar', 'value is ok') + }) + + traits.closed('put()', testCommon, async function (t, db) { + return db.put('foo', 'bar') + }) +} + +exports.tearDown = function (test, testCommon) { + test('put() teardown', async function (t) { + return db.close() + }) +} + +exports.all = function (test, testCommon) { + exports.setUp(test, testCommon) + exports.args(test, testCommon) + exports.put(test, testCommon) + exports.tearDown(test, testCommon) +} diff --git a/test/self.js b/test/self.js new file mode 100644 index 0000000..e4396c5 --- /dev/null +++ b/test/self.js @@ -0,0 +1,905 @@ +'use strict' + +const test = require('tape') +const isBuffer = require('is-buffer') +const { Buffer } = require('buffer') +const { AbstractLevel, AbstractChainedBatch } = require('..') +const { MinimalLevel, createSpy } = require('./util') +const getRangeOptions = require('../lib/range-options') + +const testCommon = require('./common')({ + test, + factory () { + return new AbstractLevel({ encodings: { utf8: true } }) + } +}) + +const rangeOptions = ['gt', 'gte', 'lt', 'lte'] + +function implement (ctor, methods) { + class Test extends ctor {} + + for (const k in methods) { + Test.prototype[k] = methods[k] + } + + return Test +} + +/** + * Extensibility + */ + +test('test core extensibility', function (t) { + const Test = implement(AbstractLevel) + const test = new Test({ encodings: { utf8: true } }) + t.is(test.status, 'opening', 'status is opening') + t.end() +}) + +test('manifest is required', function (t) { + t.plan(3 * 2) + + const Test = implement(AbstractLevel) + + for (const args of [[], [null], [123]]) { + try { + // eslint-disable-next-line no-new + new Test(...args) + } catch (err) { + t.is(err.name, 'TypeError') + t.is(err.message, "The first argument 'manifest' must be an object") + } + } +}) + +test('test open() extensibility when new', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { createIfMissing: true, errorIfExists: false } + const Test = implement(AbstractLevel, { _open: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + + t.is(spy.callCount, 1, 'got _open() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _open() was correct') + t.is(spy.getCall(0).args.length, 1, 'got one argument') + t.same(spy.getCall(0).args[0], expectedOptions, 'got default options argument') + + const test2 = new Test({ encodings: { utf8: true } }) + await test2.open({ options: 1 }) + + expectedOptions.options = 1 + + t.is(spy.callCount, 2, 'got _open() call') + t.is(spy.getCall(1).thisValue, test2, '`this` on _open() was correct') + t.is(spy.getCall(1).args.length, 1, 'got one argument') + t.same(spy.getCall(1).args[0], expectedOptions, 'got expected options argument') +}) + +test('test open() extensibility when open', function (t) { + t.plan(2) + + const spy = createSpy(async function () {}) + const Test = implement(AbstractLevel, { _open: spy }) + const test = new Test({ encodings: { utf8: true } }) + + test.once('open', function () { + t.is(spy.callCount, 1, 'got _open() call') + + test.open().then(function () { + t.is(spy.callCount, 1, 'did not get second _open() call') + }) + }) +}) + +test('test opening explicitly gives a chance to capture an error', async function (t) { + t.plan(3) + + const spy = createSpy(async function (options) { throw new Error('_open error') }) + const Test = implement(AbstractLevel, { _open: spy }) + const test = new Test({ encodings: { utf8: true } }) + + try { + await test.open() + } catch (err) { + t.is(spy.callCount, 1, 'got _open() call') + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause.message, '_open error') + } +}) + +test('test constructor options are forwarded to open()', async function (t) { + const spy = createSpy(async function (options) { }) + const Test = implement(AbstractLevel, { _open: spy }) + const test = new Test({ encodings: { utf8: true } }, { + passive: true, + keyEncoding: 'json', + valueEncoding: 'json', + createIfMissing: false, + foo: 123 + }) + + await test.open() + + t.is(spy.callCount, 1, 'got _open() call') + t.same(spy.getCall(0).args[0], { + foo: 123, + createIfMissing: false, + errorIfExists: false + }, 'does not forward passive, keyEncoding and valueEncoding options') +}) + +test('test close() extensibility when open', async function (t) { + const spy = createSpy(async function () {}) + const Test = implement(AbstractLevel, { _close: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.close() + + t.is(spy.callCount, 1, 'got _close() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _close() was correct') + t.is(spy.getCall(0).args.length, 0, 'got 0 arguments') +}) + +test('test close() extensibility when new', async function (t) { + const spy = createSpy(async function () {}) + const Test = implement(AbstractLevel, { _close: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.close() + t.is(spy.callCount, 0, 'not called because _open was never called') +}) + +test('test open(), close(), open() with twice failed open', function (t) { + t.plan(7) + + const db = testCommon.factory() + const order = [] + + let opens = 0 + + db.on('open', t.fail.bind(t)) + db.on('closed', t.fail.bind(t)) + + db._open = async function (options) { + t.pass('called') + throw new Error('test' + (++opens)) + } + + db._close = async function () { + t.fail('should not be called') + } + + db.open().then(t.fail.bind(t), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause && err.cause.message, 'test1') + order.push('A') + }) + + db.close().then(function () { + order.push('B') + }) + + db.open().then(t.fail.bind(t), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause && err.cause.message, 'test2') + order.push('C') + t.same(order, ['A', 'B', 'C'], 'order is ok') + }) +}) + +test('test open(), close(), open() with first failed open', function (t) { + t.plan(6) + + const db = testCommon.factory() + const order = [] + + let opens = 0 + + db.on('open', () => { order.push('open event') }) + db.on('closed', t.fail.bind(t, 'should not emit closed')) + + db._open = async function (options) { + t.pass('called') + if (!opens++) throw new Error('test') + } + + db.open().then(t.fail.bind(t, 'should not open'), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(db.status, 'closed') + order.push('A') + }) + + db.close().then(function () { + // Status is actually 'opening' due to the parallel open() call, which starts + // its work after close() finished but before this then() handler. Can't be helped. + // t.is(db.status, 'closed') + + order.push('B') + }) + + db.open().then(function () { + t.is(db.status, 'open') + order.push('C') + t.same(order, ['A', 'B', 'open event', 'C'], 'order is ok') + }) +}) + +test('test open(), close(), open() with second failed open', function (t) { + t.plan(8) + + const db = testCommon.factory() + const order = [] + + let opens = 0 + + db.on('open', () => order.push('open event')) + db.on('closed', () => order.push('closed event')) + + db._open = async function (options) { + t.pass('called') + if (opens++) throw new Error('test') + } + + db.open().then(function () { + t.is(db.status, 'open') + order.push('A') + }) + + db.close().then(function () { + t.is(db.status, 'closed') + order.push('B') + }) + + db.open().then(t.fail.bind(t, 'should not open'), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause.message, 'test') + t.is(db.status, 'closed') + order.push('C') + t.same(order, ['open event', 'A', 'closed event', 'B', 'C'], 'order is ok') + }) +}) + +test('open() error is combined with resource error', async function (t) { + t.plan(4) + + const db = testCommon.factory() + const resource = db.iterator() + + db._open = async function (options) { + throw new Error('error from open') + } + + resource.close = async function () { + throw new Error('error from resource') + } + + try { + await db.open() + } catch (err) { + t.is(db.status, 'closed') + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause.name, 'CombinedError') + t.is(err.cause.message, 'error from open; error from resource') + } +}) + +test('test get() extensibility', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { keyEncoding: 'utf8', valueEncoding: 'utf8' } + const expectedKey = 'a key' + const Test = implement(AbstractLevel, { _get: spy }) + const test = new Test({ encodings: { utf8: true } }, { keyEncoding: 'utf8' }) + + await test.open() + await test.get(expectedKey) + + t.is(spy.callCount, 1, 'got _get() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _get() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument') + t.same(spy.getCall(0).args[1], expectedOptions, 'got default options argument') + + await test.get(expectedKey, { options: 1 }) + expectedOptions.options = 1 + + t.is(spy.callCount, 2, 'got _get() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _get() was correct') + t.is(spy.getCall(1).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument') + t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument') +}) + +test('test getMany() extensibility', async function (t) { + const spy = createSpy(async () => ['x']) + const expectedOptions = { keyEncoding: 'utf8', valueEncoding: 'utf8' } + const expectedKey = 'a key' + const Test = implement(AbstractLevel, { _getMany: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.getMany([expectedKey]) + + t.is(spy.callCount, 1, 'got _getMany() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _getMany() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.same(spy.getCall(0).args[0], [expectedKey], 'got expected keys argument') + t.same(spy.getCall(0).args[1], expectedOptions, 'got default options argument') + + await test.getMany([expectedKey], { options: 1 }) + expectedOptions.options = 1 + + t.is(spy.callCount, 2, 'got _getMany() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _getMany() was correct') + t.is(spy.getCall(1).args.length, 2, 'got 2 arguments') + t.same(spy.getCall(1).args[0], [expectedKey], 'got expected key argument') + t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument') +}) + +test('test del() extensibility', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { options: 1, keyEncoding: 'utf8' } + const expectedKey = 'a key' + const Test = implement(AbstractLevel, { _del: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.del(expectedKey) + + t.is(spy.callCount, 1, 'got _del() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _del() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument') + t.same(spy.getCall(0).args[1], { keyEncoding: 'utf8' }, 'got blank options argument') + + await test.del(expectedKey, expectedOptions) + + t.is(spy.callCount, 2, 'got _del() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _del() was correct') + t.is(spy.getCall(1).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument') + t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument') +}) + +test('test put() extensibility', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { options: 1, keyEncoding: 'utf8', valueEncoding: 'utf8' } + const expectedKey = 'a key' + const expectedValue = 'a value' + const Test = implement(AbstractLevel, { _put: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.put(expectedKey, expectedValue) + + t.is(spy.callCount, 1, 'got _put() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _put() was correct') + t.is(spy.getCall(0).args.length, 3, 'got 3 arguments') + t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument') + t.is(spy.getCall(0).args[1], expectedValue, 'got expected value argument') + t.same(spy.getCall(0).args[2], { keyEncoding: 'utf8', valueEncoding: 'utf8' }, 'got default options argument') + + await test.put(expectedKey, expectedValue, expectedOptions) + + t.is(spy.callCount, 2, 'got _put() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _put() was correct') + t.is(spy.getCall(1).args.length, 3, 'got 3 arguments') + t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument') + t.is(spy.getCall(1).args[1], expectedValue, 'got expected value argument') + t.same(spy.getCall(1).args[2], expectedOptions, 'got expected options argument') +}) + +test('batch([]) extensibility', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { options: 1 } + const expectedArray = [ + { type: 'put', key: '1', value: '1', keyEncoding: 'utf8', valueEncoding: 'utf8' }, + { type: 'del', key: '2', keyEncoding: 'utf8' } + ] + const Test = implement(AbstractLevel, { _batch: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.batch(expectedArray) + + t.is(spy.callCount, 1, 'got _batch() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _batch() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.same(spy.getCall(0).args[0], expectedArray, 'got expected array argument') + t.same(spy.getCall(0).args[1], {}, 'got expected options argument') + + await test.batch(expectedArray, expectedOptions) + + t.is(spy.callCount, 2, 'got _batch() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _batch() was correct') + t.is(spy.getCall(1).args.length, 2, 'got 2 arguments') + t.same(spy.getCall(1).args[0], expectedArray.map(o => ({ ...expectedOptions, ...o })), 'got expected array argument') + t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument') + + await test.batch(expectedArray, null) + + t.is(spy.callCount, 3, 'got _batch() call') + t.is(spy.getCall(2).thisValue, test, '`this` on _batch() was correct') + t.is(spy.getCall(2).args.length, 2, 'got 2 arguments') + t.same(spy.getCall(2).args[0], expectedArray, 'got expected array argument') + t.ok(spy.getCall(2).args[1], 'options should not be null') +}) + +test('batch([]) with empty array is a noop', function (t) { + t.plan(1) + + const spy = createSpy() + const Test = implement(AbstractLevel, { _batch: spy }) + const test = new Test({ encodings: { utf8: true } }) + + test.once('open', function () { + test.batch([]).then(function () { + t.is(spy.callCount, 0, '_batch() call was bypassed') + }) + }) +}) + +test('test chained batch() extensibility', async function (t) { + const spy = createSpy(async function () {}) + const expectedOptions = { options: 1 } + const Test = implement(AbstractLevel, { _batch: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + await test.batch().put('foo', 'bar').del('bang').write() + + t.is(spy.callCount, 1, 'got _batch() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _batch() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(0).args[0].length, 2, 'got expected array argument') + t.same(spy.getCall(0).args[0][0], { keyEncoding: 'utf8', valueEncoding: 'utf8', type: 'put', key: 'foo', value: 'bar' }, 'got expected array argument[0]') + t.same(spy.getCall(0).args[0][1], { keyEncoding: 'utf8', type: 'del', key: 'bang' }, 'got expected array argument[1]') + t.same(spy.getCall(0).args[1], {}, 'got expected options argument') + + await test.batch().put('foo', 'bar', expectedOptions).del('bang', expectedOptions).write(expectedOptions) + + t.is(spy.callCount, 2, 'got _batch() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _batch() was correct') + t.is(spy.getCall(1).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(1).args[0].length, 2, 'got expected array argument') + t.same(spy.getCall(1).args[0][0], { keyEncoding: 'utf8', valueEncoding: 'utf8', type: 'put', key: 'foo', value: 'bar', options: 1 }, 'got expected array argument[0]') + t.same(spy.getCall(1).args[0][1], { keyEncoding: 'utf8', type: 'del', key: 'bang', options: 1 }, 'got expected array argument[1]') + t.same(spy.getCall(1).args[1], { options: 1 }, 'got expected options argument') +}) + +test('test chained batch() with no operations is a noop', function (t) { + t.plan(1) + + const spy = createSpy(async function () {}) + const Test = implement(AbstractLevel, { _batch: spy }) + const test = new Test({ encodings: { utf8: true } }) + + test.once('open', function () { + test.batch().write().then(function () { + t.is(spy.callCount, 0, '_batch() call was bypassed') + }) + }) +}) + +test('test chained batch() (custom _chainedBatch) extensibility', async function (t) { + const spy = createSpy() + const Test = implement(AbstractLevel, { _chainedBatch: spy }) + const test = new Test({ encodings: { utf8: true } }) + + await test.open() + + test.batch() + + t.is(spy.callCount, 1, 'got _chainedBatch() call') + t.is(spy.getCall(0).thisValue, test, '`this` on _chainedBatch() was correct') + + test.batch() + + t.is(spy.callCount, 2, 'got _chainedBatch() call') + t.is(spy.getCall(1).thisValue, test, '`this` on _chainedBatch() was correct') +}) + +test('test AbstractChainedBatch extensibility', async function (t) { + const Batch = implement(AbstractChainedBatch) + const db = testCommon.factory() + await db.open() + const test = new Batch(db) + t.ok(test.db === db, 'instance has db reference') +}) + +test('test AbstractChainedBatch expects a db', function (t) { + t.plan(1) + + const Test = implement(AbstractChainedBatch) + + try { + // eslint-disable-next-line no-new + new Test() + } catch (err) { + t.is(err.message, 'The first argument must be an abstract-level database, received undefined') + } +}) + +test('test AbstractChainedBatch#write() extensibility', async function (t) { + t.plan(2) + + const Test = implement(AbstractChainedBatch, { + async _write (options) { + t.same(options, {}) + t.is(this, batch, 'thisArg on _write() is correct') + } + }) + + const db = testCommon.factory() + await db.open() + const batch = new Test(db) + + // Without any operations, _write isn't called + batch.put('foo', 'bar') + return batch.write() +}) + +test('test AbstractChainedBatch#write() extensibility with null options', async function (t) { + t.plan(2) + + const Test = implement(AbstractChainedBatch, { + async _write (options) { + t.same(options, {}) + t.is(this, batch, 'thisArg on _write() is correct') + } + }) + + const db = testCommon.factory() + await db.open() + const batch = new Test(db) + + // Without any operations, _write isn't called + batch.put('foo', 'bar') + return batch.write(null) +}) + +test('test AbstractChainedBatch#write() extensibility with options', async function (t) { + t.plan(2) + + const Test = implement(AbstractChainedBatch, { + async _write (options) { + t.same(options, { test: true }) + t.is(this, batch, 'thisArg on _write() is correct') + } + }) + + const db = testCommon.factory() + await db.open() + const batch = new Test(db) + + // Without any operations, _write isn't called + batch.put('foo', 'bar') + return batch.write({ test: true }) +}) + +test('test AbstractChainedBatch#put() extensibility', function (t) { + t.plan(8) + + const spy = createSpy() + const expectedKey = 'key' + const expectedValue = 'value' + const Test = implement(AbstractChainedBatch, { _put: spy }) + const db = testCommon.factory() + + db.once('open', function () { + const test = new Test(db) + const returnValue = test.put(expectedKey, expectedValue) + + t.is(spy.callCount, 1, 'got _put call') + t.is(spy.getCall(0).thisValue, test, '`this` on _put() was correct') + t.is(spy.getCall(0).args.length, 3, 'got 3 arguments') + t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument') + t.is(spy.getCall(0).args[1], expectedValue, 'got expected value argument') + + // May contain more options, just because it's cheaper to not remove them + t.is(spy.getCall(0).args[2].keyEncoding, 'utf8', 'got expected keyEncoding option') + t.is(spy.getCall(0).args[2].valueEncoding, 'utf8', 'got expected valueEncoding option') + + t.is(returnValue, test, 'get expected return value') + }) +}) + +test('test AbstractChainedBatch#del() extensibility', function (t) { + t.plan(6) + + const spy = createSpy() + const expectedKey = 'key' + const Test = implement(AbstractChainedBatch, { _del: spy }) + const db = testCommon.factory() + + db.once('open', function () { + const test = new Test(db) + const returnValue = test.del(expectedKey) + + t.is(spy.callCount, 1, 'got _del call') + t.is(spy.getCall(0).thisValue, test, '`this` on _del() was correct') + t.is(spy.getCall(0).args.length, 2, 'got 2 arguments') + t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument') + + // May contain more options, just because it's cheaper to not remove them + t.is(spy.getCall(0).args[1].keyEncoding, 'utf8', 'got expected keyEncoding option') + + t.is(returnValue, test, 'get expected return value') + }) +}) + +test('test AbstractChainedBatch#clear() extensibility', function (t) { + t.plan(4) + + const spy = createSpy() + const Test = implement(AbstractChainedBatch, { _clear: spy }) + const db = testCommon.factory() + + db.once('open', function () { + const test = new Test(db) + const returnValue = test.clear() + + t.is(spy.callCount, 1, 'got _clear call') + t.is(spy.getCall(0).thisValue, test, '`this` on _clear() was correct') + t.is(spy.getCall(0).args.length, 0, 'got zero arguments') + t.is(returnValue, test, 'get expected return value') + }) +}) + +test('test clear() extensibility', async function (t) { + t.plan((7 * 4) - 3) + + const spy = createSpy() + const Test = implement(AbstractLevel, { _clear: spy }) + const db = new Test({ encodings: { utf8: true } }) + + await db.open() + + call([], { keyEncoding: 'utf8', reverse: false, limit: -1 }) + call([null], { keyEncoding: 'utf8', reverse: false, limit: -1 }) + call([undefined], { keyEncoding: 'utf8', reverse: false, limit: -1 }) + call([{ custom: 1 }], { custom: 1, keyEncoding: 'utf8', reverse: false, limit: -1 }) + call([{ reverse: true, limit: 0 }], { keyEncoding: 'utf8', reverse: true, limit: 0 }, true) + call([{ reverse: 1 }], { keyEncoding: 'utf8', reverse: true, limit: -1 }) + call([{ reverse: null }], { keyEncoding: 'utf8', reverse: false, limit: -1 }) + + function call (args, expectedOptions, shouldSkipCall) { + db.clear.apply(db, args).catch(t.fail.bind(t)) + + t.is(spy.callCount, shouldSkipCall ? 0 : 1, 'got _clear() call') + + if (!shouldSkipCall) { + t.is(spy.getCall(0).thisValue, db, '`this` on _clear() was correct') + t.is(spy.getCall(0).args.length, 1, 'got 1 argument') + t.same(spy.getCall(0).args[0], expectedOptions, 'got expected options argument') + } + + spy.resetHistory() + } +}) + +// TODO: replace with encoding test +test.skip('test serialization extensibility (batch array is not mutated)', function (t) { + t.plan(7) + + const spy = createSpy() + const Test = implement(AbstractLevel, { + _batch: spy, + _serializeKey: function (key) { + t.is(key, 'no') + return 'foo' + }, + _serializeValue: function (value) { + t.is(value, 'nope') + return 'bar' + } + }) + + const test = new Test({ encodings: { utf8: true } }) + + test.once('open', function () { + const op = { type: 'put', key: 'no', value: 'nope' } + + test.batch([op], function () {}) + + t.is(spy.callCount, 1, 'got _batch() call') + t.is(spy.getCall(0).args[0][0].key, 'foo', 'got expected key') + t.is(spy.getCall(0).args[0][0].value, 'bar', 'got expected value') + + t.is(op.key, 'no', 'did not mutate input key') + t.is(op.value, 'nope', 'did not mutate input value') + }) +}) + +test('clear() does not delete empty or nullish range options', function (t) { + const rangeValues = [Uint8Array.from([]), '', null, undefined] + + t.plan(rangeOptions.length * rangeValues.length) + + rangeValues.forEach(function (value) { + const Test = implement(AbstractLevel, { + async _clear (options) { + rangeOptions.forEach(function (key) { + t.ok(key in options, key + ' option should not be deleted') + }) + } + }) + + const db = new Test({ encodings: { utf8: true } }) + const options = {} + + rangeOptions.forEach(function (key) { + options[key] = value + }) + + db.once('open', function () { + db.clear(options).catch(t.fail.bind(t)) + }) + }) +}) + +test('open error', function (t) { + t.plan(3) + + const Test = implement(AbstractLevel, { + async _open (options) { + throw new Error('_open error') + } + }) + + const test = new Test({ encodings: { utf8: true } }) + + test.open().then(t.fail.bind(t), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause && err.cause.message, '_open error') + t.is(test.status, 'closed') + }) +}) + +test('close error', function (t) { + t.plan(3) + + const Test = implement(AbstractLevel, { + async _close () { + throw new Error('_close error') + } + }) + + const test = new Test({ encodings: { utf8: true } }) + test.open().then(function () { + test.close().then(t.fail.bind(t), function (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_CLOSED') + t.is(err.cause && err.cause.message, '_close error') + t.is(test.status, 'open') + }) + }) +}) + +test('rangeOptions', function (t) { + const keys = rangeOptions.slice() + const db = new AbstractLevel({ + encodings: { + utf8: true, buffer: true, view: true + } + }) + + function setupOptions (create) { + const options = {} + for (const key of keys) { + options[key] = create() + } + return options + } + + function verifyOptions (t, options) { + for (const key of keys) { + t.ok(key in options, key + ' option should not be deleted') + } + t.end() + } + + t.plan(10) + t.test('setup', async (t) => db.open()) + + t.test('default options', function (t) { + t.same(getRangeOptions(undefined, db.keyEncoding('utf8')), { + reverse: false, + limit: -1 + }, 'correct defaults') + t.end() + }) + + t.test('set options', function (t) { + t.same(getRangeOptions({ reverse: false, limit: 20 }, db.keyEncoding('utf8')), { + reverse: false, + limit: 20 + }, 'options set correctly') + t.end() + }) + + t.test('ignores invalid limit', function (t) { + // Infinity is valid but is normalized to -1 for use in private API + for (const limit of [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, NaN, -2, 5.5]) { + t.same(getRangeOptions({ limit }, db.keyEncoding('utf8')).limit, -1) + } + t.end() + }) + + t.test('ignores not-own property', function (t) { + class Options {} + Options.prototype.limit = 20 + const options = new Options() + + t.is(options.limit, 20) + t.same(getRangeOptions(options, db.keyEncoding('utf8')), { + reverse: false, + limit: -1 + }) + t.end() + }) + + t.test('does not delete empty buffers', function (t) { + const options = setupOptions(() => Buffer.alloc(0)) + keys.forEach(function (key) { + t.is(isBuffer(options[key]), true, 'should be buffer') + t.is(options[key].byteLength, 0, 'should be empty') + }) + verifyOptions(t, getRangeOptions(options, db.keyEncoding('buffer'))) + }) + + t.test('does not delete empty views', function (t) { + const options = setupOptions(() => Uint8Array.from([])) + keys.forEach(function (key) { + t.is(options[key] instanceof Uint8Array, true, 'should be Uint8Array') + t.is(options[key].byteLength, 0, 'should be empty') + }) + verifyOptions(t, getRangeOptions(options, db.keyEncoding('view'))) + }) + + t.test('does not delete empty strings', function (t) { + const options = setupOptions(() => '') + keys.forEach(function (key) { + t.is(typeof options[key], 'string', 'should be string') + t.is(options[key].length, 0, 'should be empty') + }) + verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8'))) + }) + + t.test('does not delete null', function (t) { + const options = setupOptions(() => null) + keys.forEach(function (key) { + t.is(options[key], null) + }) + verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8'))) + }) + + t.test('does not delete undefined', function (t) { + const options = setupOptions(() => undefined) + keys.forEach(function (key) { + t.is(options[key], undefined) + }) + verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8'))) + }) +}) + +require('./self/deferred-queue-test') +require('./self/errors-test') +require('./self/defer-test') +require('./self/attach-resource-test') +require('./self/abstract-iterator-test') +require('./self/iterator-test') +require('./self/deferred-iterator-test') +require('./self/deferred-operations-test') +require('./self/async-iterator-test') +require('./self/encoding-test') +require('./self/sublevel-test') + +// Test the abstract test suite using a minimal implementation +require('./index')({ + test, + factory (options) { + return new MinimalLevel(options) + } +}) diff --git a/test/self/abstract-iterator-test.js b/test/self/abstract-iterator-test.js new file mode 100644 index 0000000..e032309 --- /dev/null +++ b/test/self/abstract-iterator-test.js @@ -0,0 +1,182 @@ +'use strict' + +const test = require('tape') +const { AbstractLevel, AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..') + +const testCommon = require('../common')({ + test, + factory: function () { + return new AbstractLevel({ encodings: { utf8: true } }) + } +}) + +for (const Ctor of [AbstractIterator, AbstractKeyIterator, AbstractValueIterator]) { + // Note, these tests don't create fully functional iterators, because they're not + // created via db.iterator() and therefore lack the options necessary to decode data. + // Not relevant for these tests. + + test(`test ${Ctor.name} extensibility`, function (t) { + const Test = class TestIterator extends Ctor {} + const db = testCommon.factory() + const test = new Test(db, {}) + t.ok(test.db === db, 'instance has db reference') + t.end() + }) + + test(`${Ctor.name} throws on invalid db argument`, function (t) { + t.plan(4 * 2) + + for (const args of [[], [null], [undefined], 'foo']) { + const hint = args[0] === null ? 'null' : typeof args[0] + + try { + // eslint-disable-next-line no-new + new Ctor(...args) + } catch (err) { + t.is(err.name, 'TypeError') + t.is(err.message, 'The first argument must be an abstract-level database, received ' + hint) + } + } + }) + + test(`${Ctor.name} throws on invalid options argument`, function (t) { + t.plan(4 * 2) + + for (const args of [[], [null], [undefined], 'foo']) { + try { + // eslint-disable-next-line no-new + new Ctor({}, ...args) + } catch (err) { + t.is(err.name, 'TypeError') + t.is(err.message, 'The second argument must be an options object') + } + } + }) + + test(`${Ctor.name}.next() extensibility`, async function (t) { + t.plan(2) + + class TestIterator extends Ctor { + async _next () { + t.is(this, it, 'thisArg on _next() was correct') + t.is(arguments.length, 0, 'got 0 arguments') + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.next() + await db.close() + }) + + test(`${Ctor.name}.nextv() extensibility`, async function (t) { + t.plan(4) + + class TestIterator extends Ctor { + async _nextv (size, options) { + t.is(this, it, 'thisArg on _nextv() was correct') + t.is(arguments.length, 2, 'got 2 arguments') + t.is(size, 100) + t.same(options, {}) + return [] + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.nextv(100) + await db.close() + }) + + test(`${Ctor.name}.nextv() extensibility (options)`, async function (t) { + t.plan(2) + + class TestIterator extends Ctor { + async _nextv (size, options) { + t.is(size, 100) + t.same(options, { foo: 123 }, 'got userland options') + return [] + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.nextv(100, { foo: 123 }) + + return db.close() + }) + + test(`${Ctor.name}.all() extensibility`, async function (t) { + t.plan(2 * 3) + + for (const args of [[], [{}]]) { + class TestIterator extends Ctor { + async _all (options) { + t.is(this, it, 'thisArg on _all() was correct') + t.is(arguments.length, 1, 'got 1 argument') + t.same(options, {}, '') + return [] + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.all(...args) + await db.close() + } + }) + + test(`${Ctor.name}.all() extensibility (options)`, async function (t) { + t.plan(1) + + class TestIterator extends Ctor { + async _all (options) { + t.same(options, { foo: 123 }, 'got userland options') + return [] + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.all({ foo: 123 }) + await db.close() + }) + + test(`${Ctor.name}.seek() throws if not implemented`, async function (t) { + t.plan(1) + + const db = testCommon.factory() + await db.open() + const it = new Ctor(db, {}) + + try { + it.seek('123') + } catch (err) { + t.is(err.code, 'LEVEL_NOT_SUPPORTED') + } + + return db.close() + }) + + test(`${Ctor.name}.close() extensibility`, async function (t) { + t.plan(2) + + class TestIterator extends Ctor { + async _close () { + t.is(this, it, 'thisArg on _close() was correct') + t.is(arguments.length, 0, 'got 0 arguments') + } + } + + const db = testCommon.factory() + await db.open() + const it = new TestIterator(db, {}) + await it.close() + await db.close() + }) +} diff --git a/test/self/async-iterator-test.js b/test/self/async-iterator-test.js new file mode 100644 index 0000000..40498cb --- /dev/null +++ b/test/self/async-iterator-test.js @@ -0,0 +1,242 @@ +'use strict' + +const test = require('tape') +const { AbstractLevel, AbstractIterator } = require('../..') +const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator') + +function withIterator (methods) { + class TestIterator extends AbstractIterator { } + + for (const k in methods) { + TestIterator.prototype[k] = methods[k] + } + + class Test extends AbstractLevel { + _iterator (options) { + return new TestIterator(this, options) + } + } + + return new Test({ encodings: { utf8: true } }) +} + +for (const mode of ['iterator', 'keys', 'values']) { + for (const type of ['explicit', 'deferred']) { + const verify = function (t, db, it) { + t.is(db.status, type === 'explicit' ? 'open' : 'opening') + + if (type === 'explicit') { + t.is( + it.constructor.name, + mode === 'iterator' ? 'TestIterator' : mode === 'keys' ? 'DefaultKeyIterator' : 'DefaultValueIterator' + ) + } else { + t.is( + it.constructor, + mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator + ) + } + } + + test(`for await...of ${mode}() (${type} open)`, async function (t) { + t.plan(4) + + const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }] + const output = [] + + const db = withIterator({ + async _next () { + const entry = input[n++] + return entry ? [entry.key, entry.value] : undefined + }, + + async _close () { + // Wait a tick + await undefined + closed = true + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' }) + verify(t, db, it) + + let n = 0 + let closed = false + + for await (const item of it) { + output.push(item) + } + + t.same(output, input.map(x => mode === 'iterator' ? [x.key, x.value] : mode === 'keys' ? x.key : x.value)) + t.ok(closed, 'closed') + }) + + test(`for await...of ${mode}() closes on user error (${type} open)`, async function (t) { + t.plan(4) + + const db = withIterator({ + async _next () { + if (n++ > 10) throw new Error('Infinite loop') + return [n.toString(), n.toString()] + }, + + async _close () { + // Wait a tick + await undefined + closed = true + throw new Error('close error') + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]() + verify(t, db, it) + + let n = 0 + let closed = false + + try { + // eslint-disable-next-line no-unused-vars, no-unreachable-loop + for await (const kv of it) { + throw new Error('user error') + } + } catch (err) { + t.is(err.message, 'user error') + t.ok(closed, 'closed') + } + }) + + test(`for await...of ${mode}() closes on iterator error (${type} open)`, async function (t) { + t.plan(5) + + const db = withIterator({ + async _next (callback) { + t.pass('nexted') + throw new Error('iterator error') + }, + + async _close (callback) { + // Wait a tick + await undefined + closed = true + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]() + verify(t, db, it) + + let closed = false + + try { + // eslint-disable-next-line no-unused-vars + for await (const kv of it) { + t.fail('should not yield items') + } + } catch (err) { + t.is(err.message, 'iterator error') + t.ok(closed, 'closed') + } + }) + + test(`for await...of ${mode}() combines errors (${type} open)`, async function (t) { + t.plan(6) + + const db = withIterator({ + async _next (callback) { + t.pass('nexted') + throw new Error('next error') + }, + + async _close (callback) { + closed = true + throw new Error('close error') + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]() + verify(t, db, it) + + let closed = false + + try { + // eslint-disable-next-line no-unused-vars + for await (const kv of it) { + t.fail('should not yield items') + } + } catch (err) { + t.is(err.name, 'CombinedError') + t.is(err.message, 'next error; close error') + t.ok(closed, 'closed') + } + }) + + test(`for await...of ${mode}() closes on user break (${type} open)`, async function (t) { + t.plan(4) + + const db = withIterator({ + async _next () { + if (n++ > 10) throw new Error('Infinite loop') + return [n.toString(), n.toString()] + }, + + async _close () { + // Wait a tick + await undefined + closed = true + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]() + verify(t, db, it) + + let n = 0 + let closed = false + + // eslint-disable-next-line no-unused-vars, no-unreachable-loop + for await (const kv of it) { + t.pass('got a chance to break') + break + } + + t.ok(closed, 'closed') + }) + + test(`for await...of ${mode}() closes on user return (${type} open)`, async function (t) { + t.plan(4) + + const db = withIterator({ + async _next () { + if (n++ > 10) throw new Error('Infinite loop') + return [n.toString(), n.toString()] + }, + + async _close (callback) { + // Wait a tick + await undefined + closed = true + } + }) + + if (type === 'explicit') await db.open() + const it = db[mode]() + verify(t, db, it) + + let n = 0 + let closed = false + + await (async () => { + // eslint-disable-next-line no-unused-vars, no-unreachable-loop + for await (const kv of it) { + t.pass('got a chance to return') + return + } + })() + + t.ok(closed, 'closed') + }) + } +} diff --git a/test/self/attach-resource-test.js b/test/self/attach-resource-test.js new file mode 100644 index 0000000..6f0b7ef --- /dev/null +++ b/test/self/attach-resource-test.js @@ -0,0 +1,74 @@ +'use strict' + +const test = require('tape') +const { mockLevel } = require('../util') + +test('resource must be an object with a close() method', async function (t) { + t.plan(4) + + const db = mockLevel() + + for (const invalid of [null, undefined, {}, { close: 123 }]) { + try { + db.attachResource(invalid) + } catch (err) { + t.is(err && err.message, 'The first argument must be a resource object') + } + } + + return db.close() +}) + +test('resource is closed on failed open', function (t) { + t.plan(2) + + const db = mockLevel({ + async _open (options) { + t.pass('opened') + throw new Error('_open error') + } + }) + + const resource = { + async close () { + // Note: resource shouldn't care about db.status + t.is(arguments.length, 0) + } + } + + db.attachResource(resource) +}) + +for (const open of [true, false]) { + test(`resource is closed on db.close() (explicit open: ${open})`, async function (t) { + t.plan(1) + + const db = mockLevel() + + const resource = { + async close () { + // Note: resource shouldn't care about db.status + t.pass('closed') + } + } + + if (open) await db.open() + db.attachResource(resource) + return db.close() + }) + + test(`resource is not closed on db.close() if detached (explicit open: ${open})`, async function (t) { + const db = mockLevel() + + const resource = { + async close () { + t.fail('should not be called') + } + } + + if (open) await db.open() + db.attachResource(resource) + db.detachResource(resource) + return db.close() + }) +} diff --git a/test/self/defer-test.js b/test/self/defer-test.js new file mode 100644 index 0000000..15e2481 --- /dev/null +++ b/test/self/defer-test.js @@ -0,0 +1,140 @@ +'use strict' + +const test = require('tape') +const { mockLevel } = require('../util') + +test('defer() and deferAsync() require valid function argument', async function (t) { + t.plan(6 * 2) + + const db = mockLevel() + + for (const invalid of [123, true, false, null, undefined, {}]) { + try { + db.defer(invalid) + } catch (err) { + t.is(err.message, 'The first argument must be a function') + } + + try { + await db.deferAsync(invalid) + } catch (err) { + t.is(err.message, 'The first argument must be a function') + } + } + + return db.close() +}) + +test('defer() custom operation', async function (t) { + t.plan(3) + + const db = mockLevel({ + custom (arg) { + t.is(this.status, 'opening') + t.is(arg, 123) + + this.defer(() => { + t.is(this.status, 'open') + }) + } + }) + + db.custom(123) + await db.open() + + return db.close() +}) + +test('deferAsync() custom operation', async function (t) { + t.plan(4) + + const db = mockLevel({ + async custom (arg) { + if (this.status === 'opening') { + t.is(arg, 123) + return this.deferAsync(() => this.custom(456)) + } else { + t.is(db.status, 'open') + t.is(arg, 456) + return 987 + } + } + }) + + const result = await db.custom(123) + t.is(result, 987, 'result ok') + + return db.close() +}) + +test('deferAsync() custom operation with promise rejection', async function (t) { + t.plan(4) + + const db = mockLevel({ + async custom (arg) { + if (this.status === 'opening') { + t.is(arg, 123) + return this.deferAsync(() => this.custom(456)) + } else { + t.is(db.status, 'open') + t.is(arg, 456) + throw new Error('test') + } + } + }) + + try { + await db.custom(123) + } catch (err) { + t.is(err.message, 'test', 'got error') + } + + return db.close() +}) + +test('deferAsync() custom operation with failed open', async function (t) { + t.plan(3) + + const db = mockLevel({ + async _open (options) { + t.pass('opened') + throw new Error('_open error') + }, + async custom (arg) { + if (this.status === 'opening') { + return this.deferAsync(() => this.custom(arg)) + } else { + t.is(db.status, 'closed') + throw new Error('Database is not open (from custom)') + } + } + }) + + try { + await db.custom() + } catch (err) { + t.is(err.message, 'Database is not open (from custom)') + } +}) + +test('defer() can drop custom synchronous operation', function (t) { + t.plan(3) + + const db = mockLevel({ + async _open (options) { + t.pass('opened') + throw new Error('_open error') + }, + custom (arg) { + if (this.status === 'opening') { + this.defer(() => this.custom(arg * 2)) + } else { + // Handling other states is a userland responsibility + t.is(db.status, 'closed') + t.is(arg, 246) + } + } + }) + + db.custom(123) +}) diff --git a/test/self/deferred-iterator-test.js b/test/self/deferred-iterator-test.js new file mode 100644 index 0000000..2834e51 --- /dev/null +++ b/test/self/deferred-iterator-test.js @@ -0,0 +1,314 @@ +'use strict' + +const test = require('tape') +const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator') +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..') +const { mockLevel } = require('../util') +const noop = () => {} +const identity = (v) => v + +for (const mode of ['iterator', 'keys', 'values']) { + const RealCtor = mode === 'iterator' ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator + const DeferredCtor = mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator + const nextArg = mode === 'iterator' ? ['key', 'value'] : mode === 'keys' ? 'key' : 'value' + const privateMethod = '_' + mode + const publicMethod = mode + + // NOTE: adapted from deferred-leveldown + test(`deferred ${mode}().next()`, async function (t) { + t.plan(5) + + const keyEncoding = { + format: 'utf8', + encode (key) { + t.is(key, 'foo', 'encoding got key') + return key.toUpperCase() + }, + decode: identity + } + + class MockIterator extends RealCtor { + async _next () { + return nextArg + } + + async _close () {} + } + + const db = mockLevel({ + [privateMethod]: function (options) { + t.is(options.gt, 'FOO', 'got encoded range option') + return new MockIterator(this, options) + }, + async _open (options) { + t.pass('opened') + } + }, { encodings: { utf8: true } }, { + keyEncoding + }) + + const it = db[publicMethod]({ gt: 'foo' }) + t.ok(it instanceof DeferredCtor, 'is deferred') + + t.is(await it.next(), nextArg) + return it.close() + }) + + // NOTE: adapted from deferred-leveldown + test(`deferred ${mode}(): non-deferred operations`, async function (t) { + t.plan(3) + + class MockIterator extends RealCtor { + _seek (target) { + t.is(target, '123') + } + + async _next () { + return nextArg + } + } + + const db = mockLevel({ + [privateMethod]: function (options) { + return new MockIterator(this, options) + } + }) + + const it = db[publicMethod]({ gt: 'foo' }) + t.ok(it instanceof DeferredCtor) + + await db.open() + it.seek(123) + t.is(await it.next(), nextArg) + + return it.close() + }) + + // NOTE: adapted from deferred-leveldown + test(`deferred ${mode}(): iterators are created in order`, function (t) { + t.plan(4) + + const order1 = [] + const order2 = [] + + class MockIterator extends RealCtor {} + + function db (order) { + return mockLevel({ + [privateMethod]: function (options) { + order.push('iterator created') + return new MockIterator(this, options) + }, + async _put (key, value, options) { + order.push('put') + } + }) + } + + const db1 = db(order1) + const db2 = db(order2) + + db1.open().then(function () { + t.same(order1, ['iterator created', 'put']) + }) + + db2.open().then(function () { + t.same(order2, ['put', 'iterator created']) + }) + + t.ok(db1[publicMethod]() instanceof DeferredCtor) + db1.put('key', 'value', noop) + + db2.put('key', 'value', noop) + t.ok(db2[publicMethod]() instanceof DeferredCtor) + }) + + for (const method of ['next', 'nextv', 'all']) { + test(`deferred ${mode}(): closed upon failed open, verified by ${method}()`, async function (t) { + t.plan(5) + + const db = mockLevel({ + async _open (options) { + t.pass('opening') + throw new Error('_open error') + }, + _iterator () { + t.fail('should not be called') + }, + [privateMethod] () { + t.fail('should not be called') + } + }) + + const it = db[publicMethod]() + t.ok(it instanceof DeferredCtor) + + const original = it._close + it._close = async function (...args) { + t.pass('closed') + return original.call(this, ...args) + } + + return verifyClosed(t, it, method) + }) + + test(`deferred ${mode}(): deferred and real iterators are closed on db.close(), verified by ${method}()`, async function (t) { + t.plan(7) + + class MockIterator extends RealCtor { + async _close () { + t.pass('closed') + } + } + + const db = mockLevel({ + [privateMethod] (options) { + return new MockIterator(this, options) + } + }) + + const it = db[publicMethod]() + t.ok(it instanceof DeferredCtor) + + const original = it._close + it._close = async function (...args) { + t.pass('closed') + return original.call(this, ...args) + } + + await db.open() + await db.close() + + await verifyClosed(t, it, method) + await db.open() + + // Should still be closed + await verifyClosed(t, it, method) + + return db.close() + }) + } + + test(`deferred ${mode}(): deferred and real iterators are detached on db.close()`, async function (t) { + class MockIterator extends RealCtor {} + + let real + const db = mockLevel({ + [privateMethod] (options) { + real = new MockIterator(this, options) + return real + } + }) + + const it = db[publicMethod]() + t.ok(it instanceof DeferredCtor) + + const calls = [] + const wrap = (obj, name) => { + const original = obj.close + + obj.close = async function (...args) { + calls.push(name) + return original.apply(this, args) + } + } + + // First open(), to also create the real iterator. + await db.open() + + wrap(it, 'deferred') + wrap(real, 'real') + + await db.close() + + // There may have been 2 real.close() calls: one by the db closing resources, and + // another by the deferred iterator that wraps real. Not relevant for this test. + t.same(calls.splice(0, calls.length).slice(0, 2), ['deferred', 'real']) + + // Reopen. Resources should be detached at this point. + await db.open() + await db.close() + + // So close() should not have been called again. + t.same(calls, [], 'no new calls') + }) + + test(`deferred ${mode}(): defers underlying close()`, function (t) { + t.plan(2) + + class MockIterator extends RealCtor { + async _close () { + order.push('_close') + } + } + + const order = [] + const db = mockLevel({ + async _open (options) { + order.push('_open') + }, + [privateMethod] (options) { + order.push(privateMethod) + return new MockIterator(this, options) + } + }) + + const it = db[publicMethod]() + t.ok(it instanceof DeferredCtor) + + it.close().then(function () { + t.same(order, ['_open', privateMethod, '_close']) + }) + }) + + globalThis.AbortController && test(`deferred ${mode}(): skips real iterator if aborted`, function (t) { + t.plan(3) + + const order = [] + const db = mockLevel({ + async _open (options) { + order.push('_open') + }, + [privateMethod] (options) { + t.fail('should not be called') + } + }) + + const ac = new globalThis.AbortController() + const it = db[publicMethod]({ signal: ac.signal }) + t.ok(it instanceof DeferredCtor) + + // Test synchronous call, which should be silently skipped on abort + it.seek('foo') + + // Test asynchronous call, which should be rejected + it.next().then(t.fail.bind(t, 'should not succeed'), function (err) { + t.is(err.code, 'LEVEL_ABORTED') + }) + + // Signal should prevent real iterator from being created. + ac.abort() + + it.close().then(function () { + t.same(order, ['_open']) + }) + }) + + const verifyClosed = async function (t, it, method) { + const requiredArgs = method === 'nextv' ? [10] : [] + + try { + await it[method](...requiredArgs) + t.fail('should not succeed') + } catch (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on first ${method}()`) + } + + try { + await it[method](...requiredArgs) + t.fail('should not succeed') + } catch (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on second ${method}()`) + } + } +} diff --git a/test/self/deferred-operations-test.js b/test/self/deferred-operations-test.js new file mode 100644 index 0000000..491c65f --- /dev/null +++ b/test/self/deferred-operations-test.js @@ -0,0 +1,86 @@ +'use strict' + +const test = require('tape') +const { mockLevel, mockIterator } = require('../util') + +// NOTE: copied from deferred-leveldown +test('deferred operations are called in order', function (t) { + t.plan(3) + + const calls = [] + const db = mockLevel({ + async _put (key, value, options) { + calls.push({ type: 'put', key, value, options }) + }, + async _get (key, options) { + calls.push({ type: 'get', key, options }) + }, + async _del (key, options) { + calls.push({ type: 'del', key, options }) + }, + async _batch (arr, options) { + calls.push({ type: 'batch', keys: arr.map(op => op.key).join(',') }) + }, + async _clear (options) { + calls.push({ ...options, type: 'clear' }) + }, + _iterator (options) { + calls.push({ type: 'iterator' }) + return mockIterator(this, options, { + async _next () { + calls.push({ type: 'iterator.next' }) + } + }) + }, + async _open (options) { + t.is(calls.length, 0, 'not yet called') + } + }, { + encodings: { + utf8: true, + buffer: true + } + }, { + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }) + + db.open().then(function () { + t.same(calls, [ + { type: 'put', key: '001', value: 'bar1', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } }, + { type: 'get', key: '002', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } }, + { type: 'clear', reverse: false, limit: -1, keyEncoding: 'utf8' }, + { type: 'put', key: '010', value: 'bar2', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } }, + { type: 'get', key: Buffer.from('011'), options: { keyEncoding: 'buffer', valueEncoding: 'utf8' } }, + { type: 'del', key: '020', options: { customOption: 123, keyEncoding: 'utf8' } }, + { type: 'del', key: '021', options: { keyEncoding: 'utf8' } }, + { type: 'batch', keys: '040,041' }, + { type: 'iterator' }, + { type: 'batch', keys: '050,051' }, + { type: 'iterator.next' }, + { type: 'clear', gt: '060', reverse: false, limit: -1, keyEncoding: 'utf8' } + ], 'calls correctly behaved') + }) + + // We have dangling promises here, but it's a self test, so no worries. + db.put('001', 'bar1') + db.get('002') + db.clear() + db.put('010', 'bar2') + db.get('011', { keyEncoding: 'buffer' }) + db.del('020', { customOption: 123 }) + db.del('021') + db.batch([ + { type: 'put', key: '040', value: 'a' }, + { type: 'put', key: '041', value: 'b' } + ]) + const it = db.iterator() + db.batch([ + { type: 'put', key: '050', value: 'c' }, + { type: 'put', key: '051', value: 'd' } + ]) + it.next() + db.clear({ gt: '060' }) + + t.is(calls.length, 0, 'not yet called') +}) diff --git a/test/self/deferred-queue-test.js b/test/self/deferred-queue-test.js new file mode 100644 index 0000000..5b27646 --- /dev/null +++ b/test/self/deferred-queue-test.js @@ -0,0 +1,93 @@ +'use strict' + +const test = require('tape') +const { DeferredQueue } = require('../../lib/deferred-queue') +const supported = !!globalThis.AbortController + +test('DeferredQueue calls operations in FIFO order', async function (t) { + const queue = new DeferredQueue() + const calls = [] + + queue.add(() => { calls.push(1) }) + queue.add(() => { calls.push(2) }) + queue.add(() => { calls.push(3) }) + + queue.drain() + t.same(calls, [1, 2, 3]) +}) + +test('DeferredQueue only calls operation once', async function (t) { + const queue = new DeferredQueue() + + let calls = 0 + queue.add(() => { calls++ }) + + queue.drain() + t.same(calls, 1) + + queue.drain() + t.same(calls, 1, 'no new calls') +}) + +supported && test('DeferredQueue does not add operation if given an aborted signal', async function (t) { + const ac = new globalThis.AbortController() + const queue = new DeferredQueue() + const calls = [] + + ac.abort() + queue.add((abortError) => { calls.push(abortError) }, { signal: ac.signal }) + + t.is(calls.length, 1) + t.is(calls[0].code, 'LEVEL_ABORTED') + + queue.drain() + t.is(calls.length, 1, 'not called again') +}) + +supported && test('DeferredQueue aborts operation on signal abort', async function (t) { + const ac1 = new globalThis.AbortController() + const ac2 = new globalThis.AbortController() + const queue = new DeferredQueue() + const calls = [] + + queue.add((abortError) => { calls.push([1, abortError]) }, { signal: ac1.signal }) + queue.add((abortError) => { calls.push([2, abortError]) }, { signal: ac2.signal }) + t.is(calls.length, 0, 'not yet called') + + ac1.abort() + t.is(calls.length, 1, 'called') + t.is(calls[0][0], 1, 'signal1') + t.is(calls[0][1].code, 'LEVEL_ABORTED') + + ac2.abort() + t.is(calls.length, 2, 'called') + t.is(calls[1][0], 2, 'signal2') + t.is(calls[1][1].code, 'LEVEL_ABORTED') + + queue.drain() + ac2.abort() + t.is(calls.length, 2, 'not called again') +}) + +supported && test('DeferredQueue calls operation if signal is not aborted', async function (t) { + const ac1 = new globalThis.AbortController() + const ac2 = new globalThis.AbortController() + const queue = new DeferredQueue() + const calls = [] + + queue.add((abortError) => { calls.push([1, abortError]) }, { signal: ac1.signal }) + queue.add((abortError) => { calls.push([2, abortError]) }, { signal: ac2.signal }) + t.is(calls.length, 0, 'not yet called') + + queue.drain() + t.is(calls.length, 2, 'called') + t.is(calls[0][0], 1, 'signal1') + t.is(calls[0][1], undefined, 'no abort error') + t.is(calls[1][0], 2, 'signal2') + t.is(calls[1][1], undefined, 'no abort error') + + queue.drain() + ac1.abort() + ac2.abort() + t.is(calls.length, 2, 'not called again') +}) diff --git a/test/self/encoding-test.js b/test/self/encoding-test.js new file mode 100644 index 0000000..5a8c7ba --- /dev/null +++ b/test/self/encoding-test.js @@ -0,0 +1,391 @@ +'use strict' + +// TODO: move to per-method test files + +const test = require('tape') +const { Buffer } = require('buffer') +const { mockLevel, mockChainedBatch, nullishEncoding } = require('../util') +const identity = (v) => v + +const utf8Manifest = { encodings: { utf8: true } } +const dualManifest = { encodings: { utf8: true, buffer: true } } +const hasOwnProperty = Object.prototype.hasOwnProperty + +for (const deferred of [false, true]) { + // NOTE: adapted from encoding-down + test(`get() encodes utf8 key (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _get (key, options) { + t.is(key, '8') + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + return 'foo' + } + }, utf8Manifest) + + if (!deferred) await db.open() + t.same(await db.get(8), 'foo') + }) + + // NOTE: adapted from encoding-down + test(`get() takes encoding options (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _get (key, options) { + t.is(key, '[1,"2"]') + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + return '123' + } + }, utf8Manifest) + + if (!deferred) await db.open() + t.same(await db.get([1, '2'], { keyEncoding: 'json', valueEncoding: 'json' }), 123) + }) + + // NOTE: adapted from encoding-down + test(`get() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const db = mockLevel({ + async _get (key, options) { + t.same(key, 'key') + t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' }) + return Buffer.alloc(1) + } + }, dualManifest, { + keyEncoding: 'utf8', + valueEncoding: { encode: identity, decode: identity, format: 'buffer' } + }) + + if (!deferred) await db.open() + t.same(await db.get('key'), Buffer.alloc(1)) + }) + + // NOTE: adapted from encoding-down + test(`get() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const db = mockLevel({ + async _get (key, options) { + t.same(key, Buffer.from('key')) + t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' }) + return 'x' + } + }, dualManifest, { + keyEncoding: 'buffer', + valueEncoding: { encode: identity, decode: identity, format: 'utf8' } + }) + + if (!deferred) await db.open() + t.same(await db.get(Buffer.from('key')), 'x') + }) + + // NOTE: adapted from encoding-down + test(`put() encodes utf8 key and value (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _put (key, value, options) { + t.is(key, '8') + t.is(value, '4') + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.put(8, 4) + }) + + // NOTE: adapted from encoding-down + test(`put() takes encoding options (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _put (key, value, options) { + t.is(key, '[1,"2"]') + t.is(value, '{"x":3}') + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.put([1, '2'], { x: 3 }, { keyEncoding: 'json', valueEncoding: 'json' }) + }) + + // NOTE: adapted from encoding-down + test(`del() encodes utf8 key (deferred: ${deferred})`, async function (t) { + t.plan(2) + + const db = mockLevel({ + async _del (key, options) { + t.is(key, '2') + t.is(options.keyEncoding, 'utf8') + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.del(2) + }) + + // NOTE: adapted from encoding-down + test(`del() takes keyEncoding option (deferred: ${deferred})`, async function (t) { + t.plan(2) + + const db = mockLevel({ + async _del (key, options) { + t.is(key, '[1,"2"]') + t.is(options.keyEncoding, 'utf8') + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.del([1, '2'], { keyEncoding: 'json' }) + }) + + test(`getMany() encodes utf8 key (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _getMany (keys, options) { + t.same(keys, ['8', '29']) + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + return ['foo', 'bar'] + } + }, utf8Manifest) + + if (!deferred) await db.open() + t.same(await db.getMany([8, 29]), ['foo', 'bar']) + }) + + test(`getMany() takes encoding options (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _getMany (keys, options) { + t.same(keys, ['[1,"2"]', '"x"']) + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + return ['123', '"hi"'] + } + }, utf8Manifest) + + if (!deferred) await db.open() + t.same(await db.getMany([[1, '2'], 'x'], { keyEncoding: 'json', valueEncoding: 'json' }), [123, 'hi']) + }) + + test(`getMany() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const db = mockLevel({ + async _getMany (keys, options) { + t.same(keys, ['key']) + t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' }) + return [Buffer.alloc(1)] + } + }, dualManifest, { + keyEncoding: 'utf8', + valueEncoding: { encode: identity, decode: identity, format: 'buffer' } + }) + + if (!deferred) await db.open() + t.same(await db.getMany(['key']), [Buffer.alloc(1)]) + }) + + test(`getMany() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const db = mockLevel({ + async _getMany (keys, options) { + t.same(keys, [Buffer.from('key')]) + t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' }) + return ['x'] + } + }, dualManifest, { + keyEncoding: 'buffer', + valueEncoding: { encode: identity, decode: identity, format: 'utf8' } + }) + + if (!deferred) await db.open() + t.same(await db.getMany([Buffer.from('key')]), ['x']) + }) + + // NOTE: adapted from encoding-down + deferred || test('chainedBatch.put() and del() encode utf8 key and value', async function (t) { + t.plan(5) + + const db = mockLevel({ + _chainedBatch () { + return mockChainedBatch(this, { + _put: function (key, value, options) { + t.same({ key, value }, { key: '1', value: '2' }) + + // May contain additional options just because it's cheaper to not remove them + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + }, + _del: function (key, options) { + t.is(key, '3') + t.is(options.keyEncoding, 'utf8') + } + }) + } + }, utf8Manifest) + + await db.open() + await db.batch().put(1, 2).del(3).write() + }) + + // NOTE: adapted from encoding-down + deferred || test('chainedBatch.put() and del() take encoding options', async function (t) { + t.plan(5) + + const putOptions = { keyEncoding: 'json', valueEncoding: 'json' } + const delOptions = { keyEncoding: 'json' } + + const db = mockLevel({ + _chainedBatch () { + return mockChainedBatch(this, { + _put: function (key, value, options) { + t.same({ key, value }, { key: '"1"', value: '{"x":[2]}' }) + + // May contain additional options just because it's cheaper to not remove them + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + }, + _del: function (key, options) { + t.is(key, '"3"') + t.is(options.keyEncoding, 'utf8') + } + }) + } + }, utf8Manifest) + + await db.open() + await db.batch().put('1', { x: [2] }, putOptions).del('3', delOptions).write() + }) + + // NOTE: adapted from encoding-down + test(`clear() receives keyEncoding option (deferred: ${deferred})`, async function (t) { + t.plan(1) + + const db = mockLevel({ + async _clear (options) { + t.same(options, { keyEncoding: 'utf8', reverse: false, limit: -1 }) + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.clear() + }) + + test(`clear() takes keyEncoding option (deferred: ${deferred})`, async function (t) { + t.plan(1) + + const db = mockLevel({ + async _clear (options) { + t.same(options, { keyEncoding: 'utf8', gt: '"a"', reverse: false, limit: -1 }) + } + }, utf8Manifest) + + if (!deferred) await db.open() + await db.clear({ keyEncoding: 'json', gt: 'a' }) + }) + + // NOTE: adapted from encoding-down + test(`clear() encodes range options (deferred: ${deferred})`, async function (t) { + t.plan(5) + + const keyEncoding = { + format: 'utf8', + encode: function (key) { + return 'encoded_' + key + }, + decode: identity + } + + const db = mockLevel({ + async _clear (options) { + t.is(options.gt, 'encoded_1') + t.is(options.gte, 'encoded_2') + t.is(options.lt, 'encoded_3') + t.is(options.lte, 'encoded_4') + t.is(options.foo, 5) + } + }, utf8Manifest, { keyEncoding }) + + if (!deferred) await db.open() + await db.clear({ gt: 1, gte: 2, lt: 3, lte: 4, foo: 5 }) + }) + + // NOTE: adapted from encoding-down + test(`clear() does not strip nullish range options (deferred: ${deferred})`, async function (t) { + t.plan(12) + + const db1 = mockLevel({ + async _clear (options) { + t.is(options.gt, '\x00', 'encoded null') + t.is(options.gte, '\x00', 'encoded null') + t.is(options.lt, '\x00', 'encoded null') + t.is(options.lte, '\x00', 'encoded null') + } + }, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding }) + + const db2 = mockLevel({ + async _clear (options) { + t.is(hasOwnProperty.call(options, 'gt'), true) + t.is(hasOwnProperty.call(options, 'gte'), true) + t.is(hasOwnProperty.call(options, 'lt'), true) + t.is(hasOwnProperty.call(options, 'lte'), true) + + t.is(options.gt, '\xff', 'encoded undefined') + t.is(options.gte, '\xff', 'encoded undefined') + t.is(options.lt, '\xff', 'encoded undefined') + t.is(options.lte, '\xff', 'encoded undefined') + } + }, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding }) + + if (!deferred) { + await Promise.all([db1.open(), db2.open()]) + } + + const promise1 = db1.clear({ + gt: null, + gte: null, + lt: null, + lte: null + }) + + const promise2 = db2.clear({ + gt: undefined, + gte: undefined, + lt: undefined, + lte: undefined + }) + + await Promise.all([promise1, promise2]) + }) + + // NOTE: adapted from encoding-down + test(`clear() does not add nullish range options (deferred: ${deferred})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + async _clear (options) { + t.is(hasOwnProperty.call(options, 'gt'), false) + t.is(hasOwnProperty.call(options, 'gte'), false) + t.is(hasOwnProperty.call(options, 'lt'), false) + t.is(hasOwnProperty.call(options, 'lte'), false) + } + }) + + if (!deferred) await db.open() + await db.clear({}) + }) +} diff --git a/test/self/errors-test.js b/test/self/errors-test.js new file mode 100644 index 0000000..d7b685d --- /dev/null +++ b/test/self/errors-test.js @@ -0,0 +1,11 @@ +'use strict' + +const test = require('tape') +const { AbortError } = require('../../lib/errors') + +test('AbortError', function (t) { + const err = new AbortError() + t.is(err.code, 'LEVEL_ABORTED') + t.is(err.name, 'AbortError') + t.end() +}) diff --git a/test/self/iterator-test.js b/test/self/iterator-test.js new file mode 100644 index 0000000..eacc3a6 --- /dev/null +++ b/test/self/iterator-test.js @@ -0,0 +1,1050 @@ +'use strict' + +const test = require('tape') +const { Buffer } = require('buffer') +const { AbstractLevel } = require('../..') +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..') +const { mockLevel, mockIterator, nullishEncoding } = require('../util') + +const identity = (v) => v +const utf8Manifest = { encodings: { utf8: true } } +const dualManifest = { encodings: { utf8: true, buffer: true } } +const tripleManifest = { encodings: { utf8: true, buffer: true, view: true } } + +for (const deferred of [false, true]) { + // Also test default fallback implementations of keys() and values() + for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) { + const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator + const privateMethod = def ? '_iterator' : '_' + mode + const publicMethod = mode + + test(`${mode}() (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(4) + + let called = false + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(this, db, 'thisArg is correct') + t.is(arguments.length, 1, 'got one argument') + + const kvOptions = mode === 'iterator' || def + ? { keys: mode !== 'values', values: mode !== 'keys' } + : {} + + t.same(options, { + reverse: false, + limit: -1, + keyEncoding: 'utf8', + valueEncoding: 'utf8', + ...kvOptions + }) + + called = true + return new Ctor(this, options) + } + } + + const db = new MockLevel(tripleManifest) + if (!deferred) await db.open() + + db[publicMethod]() + t.is(called, !deferred) + if (deferred) await db.open() + }) + + test(`${mode}() with custom options (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(3) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.foo, 123) + t.is(options.reverse, true) + t.is(options.limit, 1) + + return new Ctor(this, options) + } + } + + const db = new MockLevel(tripleManifest) + if (!deferred) await db.open() + db[publicMethod]({ foo: 123, reverse: true, limit: 1 }) + if (deferred) await db.open() + }) + + test(`${mode}().next() skips _next() if it previously signaled end (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let calls = 0 + + class MockIterator extends Ctor { + async _next () { + if (calls++) return undefined + + if (mode === 'iterator' || def) { + return ['a', 'a'] + } else { + return 'a' + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + const it = db[publicMethod]() + + t.same(await it.next(), mode === 'iterator' ? ['a', 'a'] : 'a') + t.is(calls, 1, 'got one _next() call') + + t.is(await it.next(), undefined) + t.is(calls, 2, 'got another _next() call') + + t.is(await it.next(), undefined) + t.is(calls, 2, 'not called again') + }) + + for (const limit of [2, 0]) { + test(`${mode}().next() skips _next() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let calls = 0 + let yielded = 0 + + class MockIterator extends Ctor { + async _next () { + calls++ + + if (mode === 'iterator' || def) { + return ['a', 'a'] + } else { + return 'a' + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const it = db[publicMethod]({ limit }) + + for (let i = 0; i < limit + 2; i++) { + const item = await it.next() + if (item === undefined) break + yielded++ + } + + t.is(it.count, limit, 'final count matches limit') + t.is(calls, limit) + t.is(yielded, limit) + }) + + test(`${mode}().nextv() skips _nextv() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let calls = 0 + let yielded = 0 + + class MockIterator extends Ctor { + async _nextv (size, options) { + calls++ + + if (mode === 'iterator' || def) { + return [['a', 'a']] + } else { + return ['a'] + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const it = db[publicMethod]({ limit }) + + for (let i = 0; i < limit + 2; i++) { + const items = await it.nextv(1) + yielded += items.length + if (items.length === 0) break + } + + t.is(it.count, limit, 'final count matches limit') + t.is(calls, limit) + t.is(yielded, limit) + }) + + test(`${mode}().all() skips _all() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let nextCount = 0 + class MockIterator extends Ctor { + async _next () { + if (++nextCount > 10) { + throw new Error('Potential infinite loop') + } else if (mode === 'iterator' || def) { + return ['a', 'a'] + } else { + return 'a' + } + } + + _all (options, callback) { + t.fail('should not be called') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const it = db[publicMethod]({ limit }) + + // Use next() because all() auto-closes and thus can't be used twice + for (let i = 0; i < limit; i++) await it.next() + + t.same(await it.all(), []) + }) + } + + test(`${mode}().nextv() skips _nextv() if it previously signaled end (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let calls = 0 + + class MockIterator extends Ctor { + async _nextv () { + if (calls++) return [] + + if (mode === 'iterator' || def) { + return [['a', 'a']] + } else { + return ['a'] + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + const it = db[publicMethod]() + + t.same(await it.nextv(100), [mode === 'iterator' ? ['a', 'a'] : 'a']) + t.is(calls, 1, 'got one _nextv() call') + + t.same(await it.nextv(100), []) + t.is(calls, 2, 'got another _nextv() call') + + t.same(await it.nextv(100), []) + t.is(calls, 2, 'not called again') + }) + + test(`${mode}().nextv() reduces size for _nextv() when near limit (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _nextv (size, options) { + if (mode === 'iterator' || def) { + return Array(size).fill(['a', 'a']) + } else { + return Array(size).fill('a') + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const it = db[publicMethod]({ limit: 3 }) + + t.is((await it.nextv(2)).length, 2) + t.is((await it.nextv(2)).length, 1) + t.is((await it.nextv(2)).length, 0) + }) + + test(`${mode}().count increments by next(), nextv() and all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return ['a', 'a'] + } else { + return 'a' + } + } + + async _nextv (size, options) { + if (mode === 'iterator' || def) { + return [['a', 'a'], ['b', 'b']] + } else { + return ['a', 'b'] + } + } + + async _all (options) { + if (mode === 'iterator' || def) { + return [['c', 'c'], ['d', 'd'], ['e', 'e']] + } else { + return ['c', 'd', 'e'] + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const it = db[publicMethod]() + + for (let i = 0; i < 2; i++) { + t.isNot(await it.next(), undefined) // 2 * 1 = 2 + t.is((await it.nextv(2)).length, 2) // 2 * 2 = 4 + } + + t.is(it.count, 2 + 4) + t.is((await it.all()).length, 3) + t.is(it.count, 2 + 4 + 3) + }) + + test(`${mode}() forwards encoding options (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(3) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'buffer') + + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + _next () { + if (mode === 'iterator' || def) { + return ['281', Buffer.from('a')] + } else if (mode === 'keys') { + return '281' + } else { + return Buffer.from('a') + } + } + } + + const db = new MockLevel(dualManifest) + if (!deferred) await db.open() + + const item = await db[publicMethod]({ keyEncoding: 'json', valueEncoding: 'hex' }).next() + t.same(item, mode === 'iterator' ? [281, '61'] : mode === 'keys' ? 281 : '61') + }) + + // NOTE: adapted from encoding-down + test(`${mode}() with custom encodings that want a buffer (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(5) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.keyEncoding, 'buffer') + t.is(options.valueEncoding, 'buffer') + + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return [Buffer.from('a'), Buffer.from('b')] + } else if (mode === 'keys') { + return Buffer.from('a') + } else { + return Buffer.from('b') + } + } + } + + const db = new MockLevel(dualManifest) + const encoding = { encode: spy(identity), decode: spy(identity), format: 'buffer' } + if (!deferred) await db.open() + + const it = db[publicMethod]({ keyEncoding: encoding, valueEncoding: encoding }) + const item = await it.next() + + t.is(encoding.encode.calls, 0, 'did not need to encode anything') + t.is(encoding.decode.calls, mode === 'iterator' ? 2 : 1) + t.same(item, mode === 'iterator' ? [Buffer.from('a'), Buffer.from('b')] : Buffer.from(mode === 'keys' ? 'a' : 'b')) + }) + + // NOTE: adapted from encoding-down + test(`${mode}() with custom encodings that want a string (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(5) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.keyEncoding, 'utf8') + t.is(options.valueEncoding, 'utf8') + + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return ['a', 'b'] + } else if (mode === 'keys') { + return 'a' + } else { + return 'b' + } + } + } + + const db = new MockLevel(dualManifest) + const encoding = { encode: spy(identity), decode: spy(identity), format: 'utf8' } + if (!deferred) await db.open() + + const it = db[publicMethod]({ keyEncoding: encoding, valueEncoding: encoding }) + const item = await it.next() + + t.is(encoding.encode.calls, 0, 'did not need to encode anything') + t.is(encoding.decode.calls, mode === 'iterator' ? 2 : 1) + t.same(item, mode === 'iterator' ? ['a', 'b'] : mode === 'keys' ? 'a' : 'b') + }) + + // NOTE: adapted from encoding-down + test(`${mode}() encodes range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(6) + + let calls = 0 + const keyEncoding = { + format: 'utf8', + encode (key) { + calls++ + return 'encoded_' + key + }, + decode: identity + } + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.gt, 'encoded_3') + t.is(options.gte, 'encoded_4') + t.is(options.lt, 'encoded_5') + t.is(options.lte, 'encoded_6') + t.is(options.foo, 7) + return new Ctor(this, options) + } + } + + const db = new MockLevel(utf8Manifest, { keyEncoding }) + if (!deferred) await db.open() + await db[publicMethod]({ gt: 3, gte: 4, lt: 5, lte: 6, foo: 7 }).next() + t.is(calls, 4) + }) + + // NOTE: adapted from encoding-down + test(`${mode}() does not strip nullish range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(12) + + const db1 = mockLevel({ + [privateMethod] (options) { + t.is(options.gt, '\x00', 'encoded null') + t.is(options.gte, '\x00', 'encoded null') + t.is(options.lt, '\x00', 'encoded null') + t.is(options.lte, '\x00', 'encoded null') + + return new Ctor(this, options) + } + }, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding }) + + const db2 = mockLevel({ + [privateMethod] (options) { + t.is(hasOwnProperty.call(options, 'gt'), true) + t.is(hasOwnProperty.call(options, 'gte'), true) + t.is(hasOwnProperty.call(options, 'lt'), true) + t.is(hasOwnProperty.call(options, 'lte'), true) + + t.is(options.gt, '\xff', 'encoded undefined') + t.is(options.gte, '\xff', 'encoded undefined') + t.is(options.lt, '\xff', 'encoded undefined') + t.is(options.lte, '\xff', 'encoded undefined') + + return new Ctor(this, options) + } + }, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding }) + + if (!deferred) { + await Promise.all([db1.open(), db2.open()]) + } + + const promise1 = db1[publicMethod]({ + gt: null, + gte: null, + lt: null, + lte: null + }).next() + + const promise2 = db2[publicMethod]({ + gt: undefined, + gte: undefined, + lt: undefined, + lte: undefined + }).next() + + return Promise.all([promise1, promise2]) + }) + + // NOTE: adapted from encoding-down + test(`${mode}() does not add nullish range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(4) + + const db = mockLevel({ + [privateMethod] (options) { + t.is(hasOwnProperty.call(options, 'gt'), false) + t.is(hasOwnProperty.call(options, 'gte'), false) + t.is(hasOwnProperty.call(options, 'lt'), false) + t.is(hasOwnProperty.call(options, 'lte'), false) + + return new Ctor(this, options) + } + }) + + if (!deferred) await db.open() + await db[publicMethod]({}).next() + }) + + // NOTE: adapted from encoding-down + test(`${mode}() encodes seek target (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(2) + + const db = mockLevel({ + [privateMethod] (options) { + return new MockIterator(this, options) + } + }, utf8Manifest, { keyEncoding: 'json' }) + + class MockIterator extends Ctor { + _seek (target, options) { + t.is(target, '"a"', 'encoded once') + t.same(options, { keyEncoding: 'utf8' }) + } + } + + if (!deferred) await db.open() + const it = db[publicMethod]() + it.seek('a') + await it.next() + }) + + // NOTE: adapted from encoding-down + test(`${mode}() encodes seek target with custom encoding (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(1) + + const targets = [] + const db = mockLevel({ + [privateMethod] (options) { + return new MockIterator(this, options) + } + }, utf8Manifest) + + class MockIterator extends Ctor { + _seek (target) { + targets.push(target) + } + } + + if (!deferred) await db.open() + + db[publicMethod]().seek('a') + db[publicMethod]({ keyEncoding: 'json' }).seek('a') + db[publicMethod]().seek('b', { keyEncoding: 'json' }) + + await db.open() + t.same(targets, ['a', '"a"', '"b"'], 'encoded targets') + }) + + // NOTE: adapted from encoding-down + test(`${mode}() encodes nullish seek target (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(1) + + const targets = [] + const db = mockLevel({ + [privateMethod] (options) { + return new MockIterator(this, options) + } + }, utf8Manifest, { keyEncoding: { encode: String, decode: identity, format: 'utf8' } }) + + class MockIterator extends Ctor { + _seek (target) { + targets.push(target) + } + } + + if (!deferred) await db.open() + + // Unlike keys, nullish targets should not be rejected; + // assume that the encoding gives these types meaning. + db[publicMethod]().seek(null) + db[publicMethod]().seek(undefined) + + await db.open() + t.same(targets, ['null', 'undefined'], 'encoded') + }) + + test(`${mode}() has default nextv() (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + const sizes = [[1, [0]], [1, [1]], [2, [2]], [3, [3]]] + t.plan(sizes.length * 2) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let pos = 0 + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return ['k' + pos, 'v' + (pos++)] + } else if (mode === 'keys') { + return 'k' + (pos++) + } else { + return 'v' + (pos++) + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + let expectedPos = 0 + const it = db[publicMethod]() + + for (const [size, args] of sizes) { + const actual = await it.nextv(...args) + const expected = [] + + for (let i = 0; i < size; i++) { + const pos = expectedPos++ + if (mode === 'iterator') expected.push(['k' + pos, 'v' + pos]) + else if (mode === 'keys') expected.push('k' + pos) + else expected.push('v' + pos) + } + + t.is(actual.length, size) + t.same(actual, expected) + } + }) + + test(`${mode}() default nextv() forwards next() error (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(2) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + t.pass('called') + throw new Error('test') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + try { + await db[publicMethod]().nextv(10) + } catch (err) { + t.is(err.message, 'test') + } + }) + + test(`${mode}() default nextv() stops when natural end is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + let calls = 0 + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + if (calls++) return undefined + + if (mode === 'iterator' || def) { + return ['a', 'a'] + } else { + return 'a' + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + const it = await db[publicMethod]() + + t.same(await it.nextv(10), [mode === 'iterator' ? ['a', 'a'] : 'a']) + t.is(calls, 2) + + t.same(await it.nextv(10), [], 'ended') + t.is(calls, 2, 'not called again') + }) + + test(`${mode}() has default all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(8) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + let pos = 0 + let closes = 0 + class MockIterator extends Ctor { + async _nextv (size, options) { + t.is(size, 1000) + t.same(options, {}) + + if (pos === 4) { + return [] + } else if (mode === 'iterator' || def) { + return [[String(pos++), 'a'], [String(pos++), 'b']] + } else if (mode === 'keys') { + return [String(pos++), String(pos++)] + } else { + pos += 2 + return ['a', 'b'] + } + } + + async _close () { + t.is(++closes, 1) + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + t.same(await db[publicMethod]().all(), [ + ['0', 'a'], + ['1', 'b'], + ['2', 'a'], + ['3', 'b'] + ].map(kv => mode === 'iterator' ? kv : kv[mode === 'keys' ? 0 : 1])) + }) + + test(`${mode}() default all() forwards nextv() error (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(2) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _nextv (size, options) { + t.pass('called') + throw new Error('test') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + try { + await db[publicMethod]().all() + } catch (err) { + t.is(err.message, 'test') + } + }) + + test(`${mode}() default all() stops when limit is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(2) + let calls = 0 + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _nextv (size, options) { + calls++ + if (mode === 'iterator' || def) { + return [[String(calls), String(calls)]] + } else { + return [String(calls)] + } + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + const items = await db[publicMethod]({ limit: 2 }).all() + t.is(items.length, 2) + t.is(calls, 2) + }) + + test(`${mode}() custom all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(3) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _all (options) { + t.same(options, {}) + + if (mode === 'iterator' || def) { + return [['k0', 'v0'], ['k1', 'v1']] + } else if (mode === 'keys') { + return ['k0', 'k1'] + } else { + return ['v0', 'v1'] + } + } + + async _close () { + t.pass('closed') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + t.same(await db[publicMethod]().all(), [ + ['k0', 'v0'], + ['k1', 'v1'] + ].map(kv => mode === 'iterator' ? kv : kv[mode === 'keys' ? 0 : 1])) + }) + + test(`${mode}() custom all() forwards error and closes (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(3) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _all (options) { + t.pass('_all called') + throw new Error('test') + } + + async _close () { + t.pass('closed') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + try { + await db[publicMethod]().all() + } catch (err) { + t.is(err.message, 'test') + } + }) + + test(`${mode}() all() combines errors (deferred: ${deferred}, default implementation: ${def})`, async function (t) { + t.plan(4) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _all (options) { + t.pass('_all called') + throw new Error('all error') + } + + async _close () { + t.pass('closed') + throw new Error('close error') + } + } + + const db = new MockLevel(utf8Manifest) + if (!deferred) await db.open() + + try { + await db[publicMethod]().all() + } catch (err) { + t.is(err.name, 'CombinedError') + t.is(err.message, 'all error; close error') + } + }) + } +} + +for (const deferred of [false, true]) { + // NOTE: adapted from encoding-down + test(`iterator().next() skips decoding keys if options.keys is false (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const keyEncoding = { + format: 'utf8', + decode (key) { + t.fail('should not be called') + }, + encode: identity + } + + const db = mockLevel({ + _iterator (options) { + t.is(options.keys, false) + + return mockIterator(this, options, { + async _next () { + return ['', 'value'] + } + }) + } + }, utf8Manifest, { keyEncoding }) + + if (!deferred) await db.open() + const [key, value] = await db.iterator({ keys: false }).next() + + t.is(key, undefined, 'normalized key to undefined') + t.is(value, 'value', 'got value') + }) + + // NOTE: adapted from encoding-down + test(`iterator().next() skips decoding values if options.values is false (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const valueEncoding = { + format: 'utf8', + decode (value) { + t.fail('should not be called') + }, + encode: identity + } + + const db = mockLevel({ + _iterator (options) { + t.is(options.values, false) + + return mockIterator(this, options, { + async _next () { + return ['key', ''] + } + }) + } + }, utf8Manifest, { valueEncoding }) + + if (!deferred) await db.open() + const [key, value] = await db.iterator({ values: false }).next() + + t.is(key, 'key', 'got key') + t.is(value, undefined, 'normalized value to undefined') + }) + + test(`keys().all() default skips decoding undefined keys (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const keyEncoding = { + format: 'utf8', + decode (key) { + t.isNot(key, undefined) + return key + }, + encode: identity + } + + class MockIterator extends AbstractKeyIterator { + async _all () { + // Note, this is technically invalid + return ['1', undefined, '3'] + } + } + + const db = mockLevel({ + _keys (options) { + return new MockIterator(this, options) + } + }, utf8Manifest, { keyEncoding }) + + if (!deferred) await db.open() + + t.same(await db.keys().all(), ['1', undefined, '3']) + }) + + test(`values().all() default skips decoding undefined values (deferred: ${deferred})`, async function (t) { + t.plan(3) + + const valueEncoding = { + format: 'utf8', + decode (value) { + t.isNot(value, undefined) + return value + }, + encode: identity + } + + class MockIterator extends AbstractValueIterator { + async _all () { + // Note, this is technically invalid + return ['1', undefined, '3'] + } + } + + const db = mockLevel({ + _values (options) { + return new MockIterator(this, options) + } + }, utf8Manifest, { valueEncoding }) + + if (!deferred) await db.open() + + t.same(await db.values().all(), ['1', undefined, '3']) + }) +} + +function spy (fn) { + const wrapped = function (...args) { + wrapped.calls++ + return fn(...args) + } + wrapped.calls = 0 + return wrapped +} diff --git a/test/self/sublevel-test.js b/test/self/sublevel-test.js new file mode 100644 index 0000000..9ec4a4a --- /dev/null +++ b/test/self/sublevel-test.js @@ -0,0 +1,1039 @@ +'use strict' + +const test = require('tape') +const { Buffer } = require('buffer') +const { AbstractLevel, AbstractSublevel } = require('../..') +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..') + +class NoopLevel extends AbstractLevel { + constructor (...args) { + super( + { encodings: { utf8: true, buffer: true, view: true } }, + ...args + ) + } +} + +test('sublevel is extensible', function (t) { + t.plan(6) + + class MockLevel extends AbstractLevel { + _sublevel (name, options) { + t.is(name, 'test') + t.same(options, { separator: '!', customOption: 123 }) + + return new MockSublevel(this, name, { + ...options, + manifest: { + encodings: { ignored: true }, + additionalMethods: { test: true }, + events: { foo: true } + } + }) + } + } + + class MockSublevel extends AbstractSublevel { + test () { + this.emit('foo') + } + } + + const db = new MockLevel({ + encodings: { utf8: true }, + additionalMethods: { ignored: true }, + events: { ignored: true } + }) + + const sub = db.sublevel('test', { customOption: 123 }) + + t.is(sub.supports.encodings.ignored, undefined) + t.same(sub.supports.additionalMethods, { test: true }) + t.same(sub.supports.events, { + foo: true, + + // Added by AbstractLevel + opening: true, + open: true, + closing: true, + closed: true, + write: true, + clear: true + }) + + sub.on('foo', () => t.pass('emitted')) + sub.test() +}) + +// NOTE: adapted from subleveldown +test('sublevel name and options', function (t) { + t.test('empty name', function (t) { + const sub = new NoopLevel().sublevel('') + t.is(sub.prefix, '!!') + t.same(sub.path(), ['']) + t.end() + }) + + t.test('name without options', function (t) { + const sub = new NoopLevel().sublevel('name') + t.is(sub.prefix, '!name!') + t.same(sub.path(), ['name']) + t.end() + }) + + t.test('name and separator option', function (t) { + const sub = new NoopLevel().sublevel('name', { separator: '%' }) + t.is(sub.prefix, '%name%') + t.same(sub.path(), ['name']) + t.end() + }) + + t.test('array name', function (t) { + const sub = new NoopLevel().sublevel(['a', 'b']) + const alt = new NoopLevel().sublevel('a').sublevel('b') + + t.is(sub.prefix, '!a!!b!') + t.same(sub.path(), ['a', 'b']) + t.same(sub.path(true), ['a', 'b']) + + t.is(alt.prefix, sub.prefix) + t.same(alt.path(), ['a', 'b']) + t.same(alt.path(true), ['b']) + + t.end() + }) + + t.test('empty array name', function (t) { + const sub = new NoopLevel().sublevel(['', '']) + t.is(sub.prefix, '!!!!') + const alt = new NoopLevel().sublevel('').sublevel('') + t.is(alt.prefix, sub.prefix) + t.end() + }) + + t.test('array name with single element', function (t) { + const sub = new NoopLevel().sublevel(['a']) + t.is(sub.prefix, '!a!') + t.same(sub.path(), ['a']) + + const alt = new NoopLevel().sublevel('a') + t.is(alt.prefix, sub.prefix) + t.same(sub.path(), alt.path()) + + t.end() + }) + + t.test('array name and separator option', function (t) { + const sub = new NoopLevel().sublevel(['a', 'b'], { separator: '%' }) + t.is(sub.prefix, '%a%%b%') + t.same(sub.path(), ['a', 'b']) + + const alt = new NoopLevel().sublevel('a', { separator: '%' }).sublevel('b', { separator: '%' }) + t.is(alt.prefix, sub.prefix) + t.same(alt.path(), ['a', 'b']) + + t.end() + }) + + t.test('separator is trimmed from name', function (t) { + const sub1 = new NoopLevel().sublevel('!name') + t.is(sub1.prefix, '!name!') + t.same(sub1.path(), ['name']) + + const sub2 = new NoopLevel().sublevel('name!') + t.is(sub2.prefix, '!name!') + t.same(sub2.path(), ['name']) + + const sub3 = new NoopLevel().sublevel('!!name!!') + t.is(sub3.prefix, '!name!') + t.same(sub3.path(), ['name']) + + const sub4 = new NoopLevel().sublevel('@name@', { separator: '@' }) + t.is(sub4.prefix, '@name@') + t.same(sub4.path(), ['name']) + + const sub5 = new NoopLevel().sublevel(['!!!a', 'b!!!']) + t.is(sub5.prefix, '!a!!b!') + t.same(sub5.path(), ['a', 'b']) + + const sub6 = new NoopLevel().sublevel(['a@@@', '@@@b'], { separator: '@' }) + t.is(sub6.prefix, '@a@@b@') + t.same(sub6.path(), ['a', 'b']) + + t.end() + }) + + t.test('repeated separator can not result in empty prefix', function (t) { + const sub1 = new NoopLevel().sublevel('!!!!') + t.is(sub1.prefix, '!!') + t.same(sub1.path(), ['']) + + const sub2 = new NoopLevel().sublevel(['!!!!', '!!!!']) + t.is(sub2.prefix, '!!!!') + t.same(sub2.path(), ['', '']) + + t.end() + }) + + t.test('invalid sublevel prefix', function (t) { + t.throws(() => new NoopLevel().sublevel('foo\x05'), (err) => err.code === 'LEVEL_INVALID_PREFIX') + t.throws(() => new NoopLevel().sublevel('foo\xff'), (err) => err.code === 'LEVEL_INVALID_PREFIX') + t.throws(() => new NoopLevel().sublevel(['ok', 'foo\xff']), (err) => err.code === 'LEVEL_INVALID_PREFIX') + t.throws(() => new NoopLevel().sublevel('foo!', { separator: '@' }), (err) => err.code === 'LEVEL_INVALID_PREFIX') + t.throws(() => new NoopLevel().sublevel(['ok', 'foo!'], { separator: '@' }), (err) => err.code === 'LEVEL_INVALID_PREFIX') + t.end() + }) + + // See https://github.com/Level/subleveldown/issues/78 + t.test('doubly nested sublevel has correct prefix', async function (t) { + t.plan(1) + + const keys = [] + class MockLevel extends AbstractLevel { + async _put (key, value, options) { + keys.push(key) + } + } + + const db = new MockLevel({ encodings: { utf8: true } }) + const sub1 = db.sublevel('1') + const sub2 = sub1.sublevel('2') + const sub3 = sub2.sublevel('3') + + await sub1.put('a', 'value') + await sub2.put('b', 'value') + await sub3.put('c', 'value') + + t.same(keys.sort(), [ + '!1!!2!!3!c', + '!1!!2!b', + '!1!a' + ]) + }) + + t.end() +}) + +test('sublevel.prefixKey()', function (t) { + const db = new AbstractLevel({ encodings: { utf8: true, buffer: true, view: true } }) + const sub = db.sublevel('test') + const textEncoder = new TextEncoder() + + t.same(sub.prefixKey('', 'utf8'), '!test!') + t.same(sub.prefixKey('a', 'utf8'), '!test!a') + t.same(sub.prefixKey('', 'utf8', false), '!test!', 'explicitly global') + t.same(sub.prefixKey('a', 'utf8', false), '!test!a', 'explicitly global') + t.same(sub.prefixKey('', 'utf8', true), '!test!', 'local') + t.same(sub.prefixKey('a', 'utf8', true), '!test!a', 'local') + + t.same(sub.prefixKey(Buffer.from(''), 'buffer'), Buffer.from('!test!')) + t.same(sub.prefixKey(Buffer.from('a'), 'buffer'), Buffer.from('!test!a')) + + t.same(sub.prefixKey(textEncoder.encode(''), 'view'), textEncoder.encode('!test!')) + t.same(sub.prefixKey(textEncoder.encode('a'), 'view'), textEncoder.encode('!test!a')) + + const nested = sub.sublevel('nested') + t.same(nested.prefixKey('', 'utf8'), '!test!!nested!') + t.same(nested.prefixKey('a', 'utf8'), '!test!!nested!a') + t.same(nested.prefixKey('', 'utf8', false), '!test!!nested!', 'explicitly global') + t.same(nested.prefixKey('a', 'utf8', false), '!test!!nested!a', 'explicitly global') + t.same(nested.prefixKey('', 'utf8', true), '!nested!', 'local') + t.same(nested.prefixKey('a', 'utf8', true), '!nested!a', 'local') + + t.end() +}) + +// NOTE: adapted from subleveldown +test('sublevel manifest and parent db', function (t) { + t.test('sublevel inherits manifest from parent db', function (t) { + const parent = new AbstractLevel({ + encodings: { utf8: true }, + explicitSnapshots: true, + foo: true + }) + const sub = parent.sublevel('') + t.is(sub.supports.foo, true, 'AbstractSublevel inherits from parent') + t.is(sub.supports.explicitSnapshots, true, 'AbstractSublevel inherits from parent') + t.end() + }) + + t.test('sublevel does not support additionalMethods', function (t) { + const parent = new AbstractLevel({ + encodings: { utf8: true }, + additionalMethods: { foo: true } + }) + + // We're expecting that AbstractSublevel removes the additionalMethod + // because it can't automatically prefix any key(-like) arguments + const sub = parent.sublevel('') + t.same(sub.supports.additionalMethods, {}) + t.same(parent.supports.additionalMethods, { foo: true }) + t.is(typeof sub.foo, 'undefined', 'AbstractSublevel does not expose method') + t.end() + }) + + t.test('sublevel.db is set to root db', function (t) { + const db = new NoopLevel() + const sub = db.sublevel('test') + const nested = sub.sublevel('nested') + t.ok(sub.db === db) + t.ok(nested.db === db) + t.end() + }) + + t.test('sublevel.parent is set to parent db', function (t) { + const db = new NoopLevel() + const sub = db.sublevel('test') + const nested = sub.sublevel('nested') + t.ok(sub.parent === db) + t.ok(nested.parent === sub) + t.end() + }) + + t.test('root db has a null parent', function (t) { + const db = new NoopLevel() + t.is(db.parent, null) + t.end() + }) + + t.end() +}) + +// NOTE: adapted from subleveldown +test('opening & closing sublevel', function (t) { + t.test('error from open() does not bubble up to sublevel', function (t) { + t.plan(5) + + class MockLevel extends AbstractLevel { + async _open (opts) { + throw new Error('test') + } + } + + const db = new MockLevel({ encodings: { buffer: true } }) + const sub = db.sublevel('test') + + db.open().catch((err) => { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause && err.cause.message, 'test') + }) + + sub.open().catch((err) => { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(err.cause && err.cause.code, 'LEVEL_DATABASE_NOT_OPEN') // from db + t.is(err.cause && err.cause.cause, undefined) // but does not have underlying error + }) + }) + + t.test('cannot create a sublevel on a closed db', async function (t) { + t.plan(2) + + const db = new NoopLevel() + await db.open() + await db.close() + + const sub = db.sublevel('test') + + try { + await sub.open() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN', 'sublevel not opened') + t.is(err.message, 'Database failed to open') + } + + await db.open() + await sub.open() + await db.sublevel('test2').open() + }) + + t.test('can close db and sublevel once opened', async function (t) { + const db = new NoopLevel() + await db.open() + const sub = db.sublevel('test') + await sub.open() + await db.close() + await sub.close() + }) + + t.test('sublevel is closed by parent', async function (t) { + t.plan(4) + + const db = new NoopLevel() + await db.open() + const sub = db.sublevel('test') + + await db.open() + await sub.open() + + const promise = db.close() + + t.is(db.status, 'closing') + t.is(sub.status, 'closing') + + await promise + + t.is(db.status, 'closed') + t.is(sub.status, 'closed') + }) + + t.test('sublevel rejects operations if parent db is closed', async function (t) { + t.plan(6) + + const db = new NoopLevel() + await db.open() + + const sub = db.sublevel('test') + const it = sub.iterator() + + await sub.open() + await db.close() + + const promises = [ + sub.put('foo', 'bar').catch(verify), + sub.get('foo').catch(verify), + sub.del('foo').catch(verify), + sub.clear().catch(verify), + sub.batch([{ type: 'del', key: 'foo' }]).catch(verify), + it.next().catch(function (err) { + t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN') + return it.close() + }) + ] + + await Promise.all(promises) + + function verify (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + } + }) + + t.test('close db while sublevel is opening', async function (t) { + t.plan(7) + + const db = new NoopLevel() + await db.open() + const sub = db.sublevel('test') + + t.is(db.status, 'open') + t.is(sub.status, 'opening') + + const promises = [ + db.close().then(async function () { + // Ideally it'd be 'closed' but it's still 'opening' at this point. + // TODO: use a signal to abort the open() to transition to 'closed' faster + // t.is(sub.status, 'closed') + + t.is(db.status, 'closed') + + return sub.get('foo').then(t.fail.bind(t, 'should not succeed'), (err) => { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(sub.status, 'closed') + }) + }), + sub.get('foo').then(t.fail.bind(t, 'should not succeed'), (err) => { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(sub.status, 'closed') + }) + ] + + await Promise.all(promises) + }) + + t.test('cannot create sublevel while db is closing', async function (t) { + t.plan(2) + + const db = new NoopLevel() + await db.open() + const promise = db.close() + const sub = db.sublevel('test') + + try { + await sub.open() + } catch (err) { + t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN') + t.is(sub.status, 'closed') + } + + return promise + }) + + t.test('can wrap a sublevel and reopen the wrapped sublevel', async function (t) { + const db = new NoopLevel() + const sub1 = db.sublevel('test1') + const sub2 = sub1.sublevel('test2') + + await sub2.open() + verify() + + // Prefixes should be the same after closing & reopening + // See https://github.com/Level/subleveldown/issues/78 + await sub2.close() + await sub2.open() + verify() + + function verify () { + t.is(sub1.prefix, '!test1!', 'sub1 prefix ok') + t.is(sub2.prefix, '!test1!!test2!', 'sub2 prefix ok') + t.ok(sub1.db === db, 'root is ok') + t.ok(sub2.db === db, 'root is ok') + t.ok(sub1.parent === db, 'parent is ok') + t.ok(sub2.parent === sub1, 'parent is ok') + } + }) + + // Also test default fallback implementations of keys() and values() + for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) { + const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator + const privateMethod = def ? '_iterator' : '_' + mode + const publicMethod = mode + + t.test(`error from sublevel.${mode}() bubbles up (default implementation: ${def})`, async function (t) { + t.plan(1) + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + class MockIterator extends Ctor { + async _next () { + throw new Error('next() error from parent database') + } + } + + const db = new MockLevel({ encodings: { buffer: true } }) + const sub = db.sublevel('test') + const it = sub[publicMethod]() + + try { + await it.next() + } catch (err) { + t.is(err.message, 'next() error from parent database') + } finally { + await it.close() + } + }) + } + + t.end() +}) + +test('sublevel operations are prefixed', function (t) { + t.test('sublevel.getMany() is prefixed', async function (t) { + t.plan(2) + + class MockLevel extends AbstractLevel { + async _getMany (keys, options) { + t.same(keys, ['!test!a', '!test!b']) + t.same(options, { keyEncoding: 'utf8', valueEncoding: 'utf8' }) + return ['1', '2'] + } + } + + const db = new MockLevel({ encodings: { utf8: true } }) + const sub = db.sublevel('test') + + await sub.open() + await sub.getMany(['a', 'b']) + }) + + // Also test default fallback implementations of keys() and values() + for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) { + const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator + const privateMethod = def ? '_iterator' : '_' + mode + const publicMethod = mode + + for (const deferred of [false, true]) { + t.test(`sublevel ${mode}.seek() target is prefixed (default implementation: ${def}, deferred: ${deferred})`, async function (t) { + t.plan(2) + + class MockIterator extends Ctor { + _seek (target, options) { + t.is(target, '!sub!123') + t.is(options.keyEncoding, 'utf8') + } + } + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + const db = new MockLevel({ encodings: { utf8: true } }) + const sub = db.sublevel('sub', { keyEncoding: 'json' }) + + if (!deferred) await sub.open() + + const it = sub[publicMethod]() + it.seek(123) + + if (deferred) await sub.open() + }) + } + } + + t.test('sublevel.clear() is prefixed', async function (t) { + t.plan(4) + + const calls = [] + class MockLevel extends AbstractLevel { + async _clear (options) { + calls.push(options) + } + } + + const db = new MockLevel({ encodings: { utf8: true } }) + const sub = db.sublevel('sub') + + const test = async (options, expected) => { + await sub.clear(options) + t.same(calls.shift(), expected) + } + + await sub.open() + + await test(undefined, { + gte: '!sub!', + lte: '!sub"', + keyEncoding: 'utf8', + reverse: false, + limit: -1 + }) + + await test({ gt: 'a' }, { + gt: '!sub!a', + lte: '!sub"', + keyEncoding: 'utf8', + reverse: false, + limit: -1 + }) + + await test({ gte: 'a', lt: 'x' }, { + gte: '!sub!a', + lt: '!sub!x', + keyEncoding: 'utf8', + reverse: false, + limit: -1 + }) + + await test({ lte: 'x' }, { + gte: '!sub!', + lte: '!sub!x', + keyEncoding: 'utf8', + reverse: false, + limit: -1 + }) + }) + + t.end() +}) + +test('sublevel encodings', function (t) { + // NOTE: adapted from subleveldown + t.test('different sublevels can have different encodings', async function (t) { + t.plan(6) + + const puts = [] + const gets = [] + + class MockLevel extends AbstractLevel { + async _put (key, value, { keyEncoding, valueEncoding }) { + puts.push({ key, value, keyEncoding, valueEncoding }) + } + + async _get (key, { keyEncoding, valueEncoding }) { + gets.push({ key, keyEncoding, valueEncoding }) + return puts.shift().value + } + } + + const db = new MockLevel({ encodings: { buffer: true, utf8: true } }) + const sub1 = db.sublevel('test1', { valueEncoding: 'json' }) + const sub2 = db.sublevel('test2', { keyEncoding: 'buffer', valueEncoding: 'buffer' }) + + await sub1.put('foo', { some: 'json' }) + + t.same(puts, [{ + key: '!test1!foo', + value: '{"some":"json"}', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }]) + + t.same(await sub1.get('foo'), { some: 'json' }) + t.same(gets.shift(), { + key: '!test1!foo', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }) + + await sub2.put(Buffer.from([1, 2]), Buffer.from([3])) + + t.same(puts, [{ + key: Buffer.from('!test2!\x01\x02'), + value: Buffer.from([3]), + keyEncoding: 'buffer', + valueEncoding: 'buffer' + }]) + + t.same(await sub2.get(Buffer.from([1, 2])), Buffer.from([3])) + + t.same(gets.shift(), { + key: Buffer.from('!test2!\x01\x02'), + keyEncoding: 'buffer', + valueEncoding: 'buffer' + }) + }) + + t.test('sublevel indirectly supports transcoded encoding', async function (t) { + t.plan(3) + + class MockLevel extends AbstractLevel { + async _put (key, value, { keyEncoding, valueEncoding }) { + t.same({ key, value, keyEncoding, valueEncoding }, { + key: Buffer.from('!test!foo'), + value: Buffer.from('{"some":"json"}'), + keyEncoding: 'buffer', + valueEncoding: 'buffer' + }) + } + + async _get (key, { keyEncoding, valueEncoding }) { + t.same({ key, keyEncoding, valueEncoding }, { + key: Buffer.from('!test!foo'), + keyEncoding: 'buffer', + valueEncoding: 'buffer' + }) + return Buffer.from('{"some":"json"}') + } + } + + const db = new MockLevel({ encodings: { buffer: true } }) + const sub = db.sublevel('test', { valueEncoding: 'json' }) + + await sub.put('foo', { some: 'json' }) + t.same(await sub.get('foo'), { some: 'json' }) + }) + + t.test('concatenating sublevel Buffer keys', async function (t) { + t.plan(8) + + const key = Buffer.from('00ff', 'hex') + const prefixedKey = Buffer.concat([Buffer.from('!test!'), key]) + + class MockLevel extends AbstractLevel { + async _put (key, value, options) { + t.is(options.keyEncoding, 'buffer') + t.is(options.valueEncoding, 'buffer') + t.same(key, prefixedKey) + t.same(value, Buffer.from('bar')) + } + + async _get (key, options) { + t.is(options.keyEncoding, 'buffer') + t.is(options.valueEncoding, 'buffer') + t.same(key, prefixedKey) + return Buffer.from('bar') + } + } + + const db = new MockLevel({ encodings: { buffer: true } }) + const sub = db.sublevel('test', { keyEncoding: 'buffer' }) + + await sub.put(key, 'bar') + t.same(await sub.get(key), 'bar') + }) + + t.test('concatenating sublevel Uint8Array keys', async function (t) { + t.plan(8) + + const key = new Uint8Array([0, 255]) + const textEncoder = new TextEncoder() + const prefix = textEncoder.encode('!test!') + const prefixedKey = new Uint8Array(prefix.byteLength + key.byteLength) + + prefixedKey.set(prefix, 0) + prefixedKey.set(key, prefix.byteLength) + + class MockLevel extends AbstractLevel { + async _put (key, value, options) { + t.is(options.keyEncoding, 'view') + t.is(options.valueEncoding, 'view') + t.same(key, prefixedKey) + t.same(value, textEncoder.encode('bar')) + } + + async _get (key, options) { + t.is(options.keyEncoding, 'view') + t.is(options.valueEncoding, 'view') + t.same(key, prefixedKey) + return textEncoder.encode('bar') + } + } + + const db = new MockLevel({ encodings: { view: true } }) + const sub = db.sublevel('test', { keyEncoding: 'view' }) + + await sub.put(key, 'bar') + t.same(await sub.get(key), 'bar') + }) + + // Also test default fallback implementations of keys() and values() + for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) { + const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator + const privateMethod = def ? '_iterator' : '_' + mode + const publicMethod = mode + + t.test(`unfixing sublevel.${mode}() Buffer keys (default implementation: ${def})`, async function (t) { + t.plan(3) + + const testKey = Buffer.from('00ff', 'hex') + const prefixedKey = Buffer.concat([Buffer.from('!test!'), testKey]) + + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return [prefixedKey, 'bar'] + } else if (mode === 'keys') { + return prefixedKey + } else { + return 'bar' + } + } + } + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.keyEncoding, 'buffer') + t.is(options.valueEncoding, 'utf8') + return new MockIterator(this, options) + } + } + + const db = new MockLevel({ encodings: { buffer: true, view: true, utf8: true } }) + const sub = db.sublevel('test', { keyEncoding: 'buffer' }) + const item = await sub[publicMethod]().next() + + if (mode === 'iterator') { + t.same(item, [testKey, 'bar']) + } else { + t.same(item, mode === 'values' ? 'bar' : testKey) + } + }) + + t.test(`unfixing sublevel.${mode}() Uint8Array keys (default implementation: ${def})`, async function (t) { + t.plan(3) + + const testKey = new Uint8Array([0, 255]) + const textEncoder = new TextEncoder() + const prefix = textEncoder.encode('!test!') + const prefixedKey = new Uint8Array(prefix.byteLength + testKey.byteLength) + + prefixedKey.set(prefix, 0) + prefixedKey.set(testKey, prefix.byteLength) + + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return [prefixedKey, 'bar'] + } else if (mode === 'keys') { + return prefixedKey + } else { + return 'bar' + } + } + } + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + t.is(options.keyEncoding, 'view') + t.is(options.valueEncoding, 'utf8') + return new MockIterator(this, options) + } + } + + const db = new MockLevel({ encodings: { buffer: true, view: true, utf8: true } }) + const sub = db.sublevel('test', { keyEncoding: 'view' }) + const item = await sub[publicMethod]().next() + + if (mode === 'iterator') { + t.same(item, [testKey, 'bar']) + } else { + t.same(item, mode === 'values' ? 'bar' : testKey) + } + }) + + mode === 'values' || t.test(`sublevel.${mode}() skips unfixing undefined keys (default implementation: ${def})`, async function (t) { + // Note, this iterator technically returns invalid data + class MockIterator extends Ctor { + async _next () { + if (mode === 'iterator' || def) { + return [undefined, 'foo'] + } else { + return undefined + } + } + + async _nextv () { + if (mode === 'iterator' || def) { + return [[undefined, 'foo']] + } else { + return [undefined] + } + } + + async _all () { + if (mode === 'iterator' || def) { + return [[undefined, 'foo']] + } else { + return [undefined] + } + } + } + + class MockLevel extends AbstractLevel { + [privateMethod] (options) { + return new MockIterator(this, options) + } + } + + const db = new MockLevel({ encodings: { utf8: true } }) + const sub = db.sublevel('test') + + t.same(await sub[publicMethod]().next(), mode === 'iterator' ? [undefined, 'foo'] : undefined) + t.same(await sub[publicMethod]().nextv(1), mode === 'iterator' ? [[undefined, 'foo']] : [undefined]) + t.same(await sub[publicMethod]().all(), mode === 'iterator' ? [[undefined, 'foo']] : [undefined]) + }) + } + + t.end() +}) + +for (const chained of [false, true]) { + // Chained batch does not support deferred open + for (const deferred of (chained ? [false] : [false, true])) { + test(`batch() with sublevel per operation (chained: ${chained}, deferred: ${deferred})`, async function (t) { + t.plan(6) + + class MockLevel extends AbstractLevel { + async _batch (operations, options) { + t.same(operations, [ + { + type: 'put', + sublevel: null, + key: '!1!a', + value: '{"foo":123}', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }, + { + type: 'put', + sublevel: null, + key: '!2!a-y', + value: '[object Object]', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }, + { + type: 'put', + sublevel: null, + key: '!1!b', + value: '[object Object]', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }, + { + type: 'put', + sublevel: null, + key: '!2!b', + value: 'b', + keyEncoding: 'utf8', + valueEncoding: 'utf8' + }, + { + type: 'del', + sublevel: null, + key: '!2!c1', + keyEncoding: 'utf8' + }, + { + type: 'del', + sublevel: null, + key: '!2!c2-y', + keyEncoding: 'utf8' + }, + { + type: 'del', + key: 'd-x', + keyEncoding: 'utf8' + } + ]) + t.same(options, {}) + } + } + + const db = new MockLevel({ encodings: { utf8: true } }, { + keyEncoding: { + encode: (key) => key + '-x', + decode: (key) => key.slice(0, -2), + name: 'x', + format: 'utf8' + } + }) + + const sub1 = db.sublevel('1', { valueEncoding: 'json' }) + const sub2 = db.sublevel('2', { + keyEncoding: { + encode: (key) => key + '-y', + decode: (key) => key.slice(0, -2), + name: 'y', + format: 'utf8' + } + }) + + if (!deferred) await sub1.open() + + t.is(sub1.keyEncoding().name, 'utf8') + t.is(sub1.valueEncoding().name, 'json') + t.is(sub2.keyEncoding().name, 'y') + t.is(sub2.valueEncoding().name, 'utf8') + + if (chained) { + await db.batch() + // keyEncoding: utf8 (sublevel), valueEncoding: json (sublevel) + .put('a', { foo: 123 }, { sublevel: sub1 }) + + // keyEncoding: y (sublevel), valueEncoding: utf8 (sublevel) + .put('a', { foo: 123 }, { sublevel: sub2 }) + + // keyEncoding: utf8 (sublevel), valueEncoding: utf8 (operation) + .put('b', { foo: 123 }, { sublevel: sub1, valueEncoding: 'utf8' }) + + // keyEncoding: utf8 (operation), valueEncoding: utf8 (sublevel) + .put('b', 'b', { sublevel: sub2, keyEncoding: 'utf8' }) + + // keyEncoding: utf8 (operation) + .del('c1', { sublevel: sub2, keyEncoding: 'utf8' }) + + // keyEncoding: y (sublevel) + .del('c2', { sublevel: sub2 }) + + // keyEncoding: x (db). Should not affect sublevels. + .del('d') + .write() + } else { + await db.batch([ + { type: 'put', sublevel: sub1, key: 'a', value: { foo: 123 } }, + { type: 'put', sublevel: sub2, key: 'a', value: { foo: 123 } }, + { type: 'put', sublevel: sub1, key: 'b', value: { foo: 123 }, valueEncoding: 'utf8' }, + { type: 'put', sublevel: sub2, key: 'b', value: 'b', keyEncoding: 'utf8' }, + { type: 'del', key: 'c1', sublevel: sub2, keyEncoding: 'utf8' }, + { type: 'del', key: 'c2', sublevel: sub2 }, + { type: 'del', key: 'd' } + ]) + } + }) + } +} diff --git a/test/sublevel-test.js b/test/sublevel-test.js new file mode 100644 index 0000000..7fc7336 --- /dev/null +++ b/test/sublevel-test.js @@ -0,0 +1,209 @@ +'use strict' + +const { Buffer } = require('buffer') + +exports.all = function (test, testCommon) { + for (const deferred of [false, true]) { + // NOTE: adapted from subleveldown + test(`sublevel.clear() (deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + const sub1 = db.sublevel('1') + const sub2 = db.sublevel('2') + + if (!deferred) await sub1.open() + if (!deferred) await sub2.open() + + await populate([sub1, sub2], ['a', 'b']) + await verify(['!1!a', '!1!b', '!2!a', '!2!b']) + + await clear([sub1], {}) + await verify(['!2!a', '!2!b']) + + await populate([sub1], ['a', 'b']) + await clear([sub2], { lt: 'b' }) + await verify(['!1!a', '!1!b', '!2!b']) + await db.close() + + async function populate (subs, items) { + return Promise.all(subs.map(sub => { + return sub.batch(items.map(function (item) { + return { type: 'put', key: item, value: item } + })) + })) + } + + async function clear (subs, opts) { + return Promise.all(subs.map(sub => { + return sub.clear(opts) + })) + } + + async function verify (expected) { + const keys = await db.keys().all() + t.same(keys, expected) + } + }) + } + + for (const method of ['batch', 'chained batch']) { + test(`${method} with descendant sublevel option`, async function (t) { + t.plan(25) + + const db = testCommon.factory() + await db.open() + + const a = db.sublevel('a') + const b = a.sublevel('b') + const c = b.sublevel('c') + + await Promise.all([a.open(), b.open(), c.open()]) + + // Note: may return a transcoder encoding + const utf8 = db.keyEncoding('utf8') + + const put = method === 'batch' + ? (db, key, opts) => db.batch([{ type: 'put', key, value: 'x', ...opts }]) + : (db, key, opts) => db.batch().put(key, key, opts).write() + + const del = method === 'batch' + ? (db, key, opts) => db.batch([{ type: 'del', key, ...opts }]) + : (db, key, opts) => db.batch().del(key, opts).write() + + // Note: not entirely a noop. Use of sublevel option triggers data to be encoded early + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('1'), 'got put 1')) + await put(db, '1', { sublevel: db }) + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!2'), 'got put 2')) + await put(db, '2', { sublevel: a }) + await put(a, '2', { sublevel: a }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!3'), 'got put 3')) + await put(db, '3', { sublevel: b }) + await put(a, '3', { sublevel: b }) // Same + await put(b, '3', { sublevel: b }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!!c!4'), 'got put 4')) + await put(db, '4', { sublevel: c }) + await put(a, '4', { sublevel: c }) // Same + await put(b, '4', { sublevel: c }) // Same + await put(c, '4', { sublevel: c }) // Same + + t.same(await db.keys().all(), ['!a!!b!!c!4', '!a!!b!3', '!a!2', '1'], 'db has entries') + t.same(await a.keys().all(), ['!b!!c!4', '!b!3', '2'], 'sublevel a has entries') + t.same(await b.keys().all(), ['!c!4', '3'], 'sublevel b has entries') + t.same(await c.keys().all(), ['4'], 'sublevel c has entries') + + // Test deletes + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('1'), 'got del 1')) + await del(db, '1', { sublevel: db }) + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!2'), 'got del 2')) + await del(db, '2', { sublevel: a }) + await del(a, '2', { sublevel: a }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!3'), 'got del 3')) + await del(db, '3', { sublevel: b }) + await del(a, '3', { sublevel: b }) // Same + await del(b, '3', { sublevel: b }) // Same + + db.removeAllListeners('write') + db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!!c!4'), 'got del 4')) + await del(db, '4', { sublevel: c }) + await del(a, '4', { sublevel: c }) // Same + await del(b, '4', { sublevel: c }) // Same + await del(c, '4', { sublevel: c }) // Same + + t.same(await db.keys().all(), [], 'db has no entries') + return db.close() + }) + + // See https://github.com/Level/abstract-level/issues/80 + test(`${method} with nondescendant sublevel option`, async function (t) { + const db = testCommon.factory() + await db.open() + + const a = db.sublevel('a') + const b = db.sublevel('b') + + await Promise.all([a.open(), b.open()]) + + // The b sublevel is not a descendant of a, so the sublevel option + // has to be forwarded to db so that the key gets the correct prefix. + if (method === 'batch') { + await a.batch([{ type: 'put', key: 'k', value: 'v', sublevel: b }]) + } else { + await a.batch().put('k', 'v', { sublevel: b }).write() + } + + t.same(await db.keys().all(), ['!b!k'], 'written to sublevel b') + }) + } + + for (const deferred of [false, true]) { + for (const keyEncoding of ['buffer', 'view']) { + if (!testCommon.supports.encodings[keyEncoding]) continue + + // NOTE: adapted from subleveldown. See https://github.com/Level/subleveldown/issues/87 + test(`iterate sublevel keys with bytes above 196 (${keyEncoding}, deferred: ${deferred})`, async function (t) { + const db = testCommon.factory() + const sub1 = db.sublevel('a', { keyEncoding }) + const sub2 = db.sublevel('b', { keyEncoding }) + const length = (db) => db.keys().all().then(x => x.length) + + if (!deferred) await sub1.open() + if (!deferred) await sub2.open() + + const batch1 = [] + const batch2 = [] + const keys = [] + + // TODO: write before creating the sublevels, to make the deferred test more meaningful + for (let i = 0; i < 256; i++) { + const key = keyEncoding === 'buffer' ? Buffer.from([i]) : new Uint8Array([i]) + keys.push(key) + batch1.push({ type: 'put', key, value: 'aa' }) + batch2.push({ type: 'put', key, value: 'bb' }) + } + + await Promise.all([sub1.batch(batch1), sub2.batch(batch2)]) + + const entries1 = await sub1.iterator().all() + const entries2 = await sub2.iterator().all() + + t.is(entries1.length, 256, 'sub1 yielded all entries') + t.is(entries2.length, 256, 'sub2 yielded all entries') + t.ok(entries1.every(x => x[1] === 'aa')) + t.ok(entries2.every(x => x[1] === 'bb')) + + const many1 = await sub1.getMany(keys) + const many2 = await sub2.getMany(keys) + + t.is(many1.length, 256, 'sub1 yielded all values') + t.is(many2.length, 256, 'sub2 yielded all values') + t.ok(many1.every(x => x === 'aa')) + t.ok(many2.every(x => x === 'bb')) + + const singles1 = await Promise.all(keys.map(k => sub1.get(k))) + const singles2 = await Promise.all(keys.map(k => sub2.get(k))) + + t.is(singles1.length, 256, 'sub1 yielded all values') + t.is(singles2.length, 256, 'sub2 yielded all values') + t.ok(singles1.every(x => x === 'aa')) + t.ok(singles2.every(x => x === 'bb')) + + await sub1.clear() + + t.same(await length(sub1), 0, 'cleared sub1') + t.same(await length(sub2), 256, 'did not clear sub2') + + await db.close() + }) + } + } +} diff --git a/test/traits/closed.js b/test/traits/closed.js new file mode 100644 index 0000000..a2fb7db --- /dev/null +++ b/test/traits/closed.js @@ -0,0 +1,42 @@ +'use strict' + +module.exports = function (name, testCommon, run) { + const test = testCommon.test + + for (const deferred of [false, true]) { + test(`${name} on closed db fails (deferred open: ${deferred})`, async function (t) { + let error + + const db = testCommon.factory() + if (!deferred) await db.open() + + await db.close() + + try { + await run(t, db) + } catch (err) { + error = err + } + + t.is(error && error.code, 'LEVEL_DATABASE_NOT_OPEN') + }) + + test(`${name} on closing db fails (deferred open: ${deferred})`, async function (t) { + let error + + const db = testCommon.factory() + if (!deferred) await db.open() + + const promise = db.close() + + try { + await run(t, db) + } catch (err) { + error = err + } + + await promise + t.is(error && error.code, 'LEVEL_DATABASE_NOT_OPEN') + }) + } +} diff --git a/test/traits/index.js b/test/traits/index.js new file mode 100644 index 0000000..29227f3 --- /dev/null +++ b/test/traits/index.js @@ -0,0 +1,4 @@ +'use strict' + +exports.open = require('./open') +exports.closed = require('./closed') diff --git a/test/traits/open.js b/test/traits/open.js new file mode 100644 index 0000000..1534c8c --- /dev/null +++ b/test/traits/open.js @@ -0,0 +1,62 @@ +'use strict' + +module.exports = function (name, testCommon, options, run) { + if (typeof options === 'function') { + run = options + options = {} + } + + const test = testCommon.test + const deferred = options.deferred !== false + + test(`${name} on open db`, async function (t) { + const db = testCommon.factory() + + await db.open() + t.is(db.status, 'open') + + await run(t, db) + t.is(db.status, 'open') + + return db.close() + }) + + deferred && test(`${name} on opening db`, async function (t) { + const db = testCommon.factory() + t.is(db.status, 'opening') + await run(t, db) + t.is(db.status, 'open') + return db.close() + }) + + test(`${name} on reopened db`, async function (t) { + const db = testCommon.factory() + + await db.close() + t.is(db.status, 'closed') + + await db.open() + t.is(db.status, 'open') + + await run(t, db) + t.is(db.status, 'open') + + return db.close() + }) + + deferred && test(`${name} on reopening db`, async function (t) { + const db = testCommon.factory() + + await db.close() + t.is(db.status, 'closed') + + const promise = db.open() + t.is(db.status, 'opening') + + await run(t, db) + t.is(db.status, 'open') + + await promise + return db.close() + }) +} diff --git a/test/util.js b/test/util.js new file mode 100644 index 0000000..a78d774 --- /dev/null +++ b/test/util.js @@ -0,0 +1,269 @@ +'use strict' + +const { AbstractLevel, AbstractChainedBatch, AbstractSnapshot } = require('..') +const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('..') +const noop = function () {} + +exports.illegalKeys = [ + { name: 'null key', key: null }, + { name: 'undefined key', key: undefined } +] + +exports.illegalValues = [ + { name: 'null key', value: null }, + { name: 'undefined value', value: undefined } +] + +// Utility to ensure we're not fooled by `await 123`. Instead do `await assertPromise(123)` +exports.assertPromise = function (p) { + if (typeof p !== 'object' || p === null || typeof p.then !== 'function') { + throw new TypeError('Expected a promise') + } + + return p +} + +exports.mockLevel = function (methods, ...args) { + class TestLevel extends AbstractLevel {} + for (const k in methods) TestLevel.prototype[k] = methods[k] + if (!args.length) args = [{ encodings: { utf8: true } }] + return new TestLevel(...args) +} + +exports.mockIterator = function (db, options, methods, ...args) { + class TestIterator extends AbstractIterator {} + for (const k in methods) TestIterator.prototype[k] = methods[k] + return new TestIterator(db, options, ...args) +} + +exports.mockChainedBatch = function (db, methods, ...args) { + class TestBatch extends AbstractChainedBatch {} + for (const k in methods) TestBatch.prototype[k] = methods[k] + return new TestBatch(db, ...args) +} + +// Mock encoding where null and undefined are significant types +exports.nullishEncoding = { + name: 'nullish', + format: 'utf8', + encode (v) { + return v === null ? '\x00' : v === undefined ? '\xff' : String(v) + }, + decode (v) { + return v === '\x00' ? null : v === '\xff' ? undefined : v + } +} + +// Replacement for sinon package (which breaks too often, on features we don't use) +exports.createSpy = function (fn = noop) { + let calls = [] + + const spy = function (...args) { + const returnValue = fn(...args) + calls.push({ thisValue: this, args, returnValue }) + spy.callCount++ + return returnValue + } + + spy.callCount = 0 + spy.getCall = function (n) { + return calls[n] + } + + spy.resetHistory = function () { + calls = [] + spy.callCount = 0 + } + + return spy +} + +const kEntries = Symbol('entries') +const kPosition = Symbol('position') +const kOptions = Symbol('options') + +/** + * A minimal and non-optimized implementation for use in tests. Only supports utf8. + * Don't use this as a reference implementation. + */ +class MinimalLevel extends AbstractLevel { + constructor (options) { + super({ + encodings: { utf8: true }, + seek: true, + has: true, + explicitSnapshots: true + }, options) + + this[kEntries] = new Map() + } + + async _put (key, value, options) { + this[kEntries].set(key, value) + } + + async _get (key, options) { + const entries = (options.snapshot || this)[kEntries] + + // Is undefined if not found + return entries.get(key) + } + + async _getMany (keys, options) { + const entries = (options.snapshot || this)[kEntries] + return keys.map(k => entries.get(k)) + } + + async _has (key, options) { + const entries = (options.snapshot || this)[kEntries] + return entries.has(key) + } + + async _hasMany (keys, options) { + const entries = (options.snapshot || this)[kEntries] + return keys.map(k => entries.has(k)) + } + + async _del (key, options) { + this[kEntries].delete(key) + } + + async _clear (options) { + const entries = (options.snapshot || this)[kEntries] + + for (const [k] of sliceEntries(entries, options, true)) { + this[kEntries].delete(k) + } + } + + async _batch (operations, options) { + const entries = new Map(this[kEntries]) + + for (const op of operations) { + if (op.type === 'put') entries.set(op.key, op.value) + else entries.delete(op.key) + } + + this[kEntries] = entries + } + + _iterator (options) { + return new MinimalIterator(this, options) + } + + _keys (options) { + return new MinimalKeyIterator(this, options) + } + + _values (options) { + return new MinimalValueIterator(this, options) + } + + _snapshot (options) { + return new MinimalSnapshot(this, options) + } +} + +class MinimalSnapshot extends AbstractSnapshot { + constructor (db, options) { + super(options) + this[kEntries] = new Map(db[kEntries]) + } +} + +class MinimalIterator extends AbstractIterator { + constructor (db, options) { + super(db, options) + const entries = (options.snapshot || db)[kEntries] + this[kEntries] = sliceEntries(entries, options, false) + this[kOptions] = options + this[kPosition] = 0 + } +} + +class MinimalKeyIterator extends AbstractKeyIterator { + constructor (db, options) { + super(db, options) + const entries = (options.snapshot || db)[kEntries] + this[kEntries] = sliceEntries(entries, options, false) + this[kOptions] = options + this[kPosition] = 0 + } +} + +class MinimalValueIterator extends AbstractValueIterator { + constructor (db, options) { + super(db, options) + const entries = (options.snapshot || db)[kEntries] + this[kEntries] = sliceEntries(entries, options, false) + this[kOptions] = options + this[kPosition] = 0 + } +} + +for (const Ctor of [MinimalIterator, MinimalKeyIterator, MinimalValueIterator]) { + const mapEntry = Ctor === MinimalIterator ? e => e.slice() : Ctor === MinimalKeyIterator ? e => e[0] : e => e[1] + + Ctor.prototype._next = async function () { + const entry = this[kEntries][this[kPosition]++] + if (entry === undefined) return undefined + return mapEntry(entry) + } + + Ctor.prototype._nextv = async function (size, options) { + const entries = this[kEntries].slice(this[kPosition], this[kPosition] + size) + this[kPosition] += entries.length + return entries.map(mapEntry) + } + + Ctor.prototype._all = async function (options) { + const end = this.limit - this.count + this[kPosition] + const entries = this[kEntries].slice(this[kPosition], end) + this[kPosition] = this[kEntries].length + return entries.map(mapEntry) + } + + Ctor.prototype._seek = function (target, options) { + this[kPosition] = this[kEntries].length + + if (!outOfRange(target, this[kOptions])) { + // Don't care about performance here + for (let i = 0; i < this[kPosition]; i++) { + const key = this[kEntries][i][0] + + if (this[kOptions].reverse ? key <= target : key >= target) { + this[kPosition] = i + } + } + } + } +} + +const outOfRange = function (target, options) { + if ('gte' in options) { + if (target < options.gte) return true + } else if ('gt' in options) { + if (target <= options.gt) return true + } + + if ('lte' in options) { + if (target > options.lte) return true + } else if ('lt' in options) { + if (target >= options.lt) return true + } + + return false +} + +const sliceEntries = function (entries, options, applyLimit) { + entries = Array.from(entries) + .filter((e) => !outOfRange(e[0], options)) + .sort((a, b) => a[0] > b[0] ? 1 : a[0] < b[0] ? -1 : 0) + + if (options.reverse) entries.reverse() + if (applyLimit && options.limit !== -1) entries = entries.slice(0, options.limit) + + return entries +} + +exports.MinimalLevel = MinimalLevel diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..6c3b40a --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@voxpelli/tsconfig/node16.json", + "compilerOptions": { + "checkJs": false + }, + "include": ["*.ts", "types/*.ts"] +} diff --git a/types/abstract-chained-batch.d.ts b/types/abstract-chained-batch.d.ts new file mode 100644 index 0000000..613b9f5 --- /dev/null +++ b/types/abstract-chained-batch.d.ts @@ -0,0 +1,126 @@ +import * as Transcoder from 'level-transcoder' +import { AbstractSublevel } from './abstract-sublevel' +import { AbstractResource } from './interfaces' + +export class AbstractChainedBatch + implements AbstractResource { + constructor (db: TDatabase) + + /** + * A reference to the database that created this chained batch. + */ + db: TDatabase + + /** + * The number of queued operations on the current batch. + */ + get length (): number + + /** + * Queue a _put_ operation on this batch, not committed until {@link write} is + * called. + */ + put (key: KDefault, value: VDefault): this + + put ( + key: K, + value: V, + options: AbstractChainedBatchPutOptions + ): this + + /** + * Queue a _del_ operation on this batch, not committed until {@link write} is + * called. + */ + del (key: KDefault): this + del (key: K, options: AbstractChainedBatchDelOptions): this + + /** + * Clear all queued operations on this batch. + */ + clear (): this + + /** + * Commit the queued operations for this batch. All operations will be written + * atomically, that is, they will either all succeed or fail with no partial + * commits. + */ + write (): Promise + write (options: AbstractChainedBatchWriteOptions): Promise + + /** + * Free up underlying resources. This should be done even if the chained batch has + * zero queued operations. Automatically called by {@link write} so normally not + * necessary to call, unless the intent is to discard a chained batch without + * committing it. + */ + close (): Promise + + /** + * Close the batch. + */ + [Symbol.asyncDispose](): Promise +} + +/** + * Options for the {@link AbstractChainedBatch.put} method. + */ +export interface AbstractChainedBatchPutOptions { + /** + * Custom key encoding for this _put_ operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Custom value encoding for this _put_ operation, used to encode the `value`. + */ + valueEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Act as though the _put_ operation is performed on the given sublevel, to similar + * effect as: + * + * ```js + * await sublevel.batch().put(key, value).write() + * ``` + * + * This allows atomically committing data to multiple sublevels. The `key` will be + * prefixed with the `prefix` of the sublevel, and the `key` and `value` will be + * encoded by the sublevel (using the default encodings of the sublevel unless + * {@link keyEncoding} and / or {@link valueEncoding} are provided). + */ + sublevel?: AbstractSublevel | undefined +} + +/** + * Options for the {@link AbstractChainedBatch.del} method. + */ +export interface AbstractChainedBatchDelOptions { + /** + * Custom key encoding for this _del_ operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Act as though the _del_ operation is performed on the given sublevel, to similar + * effect as: + * + * ```js + * await sublevel.batch().del(key).write() + * ``` + * + * This allows atomically committing data to multiple sublevels. The `key` will be + * prefixed with the `prefix` of the sublevel, and the `key` will be encoded by the + * sublevel (using the default key encoding of the sublevel unless {@link keyEncoding} + * is provided). + */ + sublevel?: AbstractSublevel | undefined +} + +/** + * Options for the {@link AbstractChainedBatch.write} method. + */ +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface AbstractChainedBatchWriteOptions { + // There are no abstract options but implementations may add theirs. +} diff --git a/types/abstract-iterator.d.ts b/types/abstract-iterator.d.ts new file mode 100644 index 0000000..ac81b42 --- /dev/null +++ b/types/abstract-iterator.d.ts @@ -0,0 +1,241 @@ +import * as Transcoder from 'level-transcoder' +import { AbstractReadOptions, AbstractResource, RangeOptions } from './interfaces' + +declare interface CommonIteratorOptions extends AbstractReadOptions { + /** + * An [`AbortSignal`][1] to abort read operations on the iterator. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ + signal?: AbortSignal | undefined +} + +export interface AbstractIteratorOptions extends RangeOptions, CommonIteratorOptions { + /** + * Whether to return the key of each entry. Defaults to `true`. If set to `false`, + * the iterator will yield keys that are `undefined`. + */ + keys?: boolean | undefined + + /** + * Whether to return the value of each entry. Defaults to `true`. If set to + * `false`, the iterator will yield values that are `undefined`. + */ + values?: boolean | undefined + + /** + * Custom key encoding for this iterator, used to encode range options, to encode + * {@link AbstractIterator.seek} targets and to decode keys. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Custom value encoding for this iterator, used to decode values. + */ + valueEncoding?: string | Transcoder.PartialDecoder | undefined +} + +export interface AbstractKeyIteratorOptions extends RangeOptions, CommonIteratorOptions { + /** + * Custom key encoding for this iterator, used to encode range options, to encode + * {@link AbstractKeyIterator.seek} targets and to decode keys. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined +} + +export interface AbstractValueIteratorOptions extends RangeOptions, CommonIteratorOptions { + /** + * Custom key encoding for this iterator, used to encode range options and + * {@link AbstractValueIterator.seek} targets. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Custom value encoding for this iterator, used to decode values. + */ + valueEncoding?: string | Transcoder.PartialDecoder | undefined +} + +/** + * @template TDatabase Type of the database that created this iterator. + * @template T Type of items yielded. Items can be entries, keys or values. + */ +declare class CommonIterator implements AbstractResource { + /** + * A reference to the database that created this iterator. + */ + db: TDatabase + + /** + * Read-only getter that indicates how many items have been yielded so far (by any + * method) excluding calls that errored or yielded `undefined`. + */ + get count (): number + + /** + * Read-only getter that reflects the `limit` that was set in options. Greater than or + * equal to zero. Equals {@link Infinity} if no limit. + */ + get limit (): number + + [Symbol.asyncIterator] (): AsyncGenerator + + /** + * Free up underlying resources. Not necessary to call if [`for await...of`][1] or + * `all()` is used. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + */ + close (): Promise + + /** + * Close the iterator. + */ + [Symbol.asyncDispose](): Promise +} + +export class AbstractIterator extends CommonIterator { + constructor (db: TDatabase, options: AbstractIteratorOptions) + + /** + * Advance to the next entry and yield that entry. When possible, prefer to use + * [`for await...of`][1] instead. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + */ + next (): Promise<[K, V] | undefined> + + /** + * Advance repeatedly and get at most {@link size} amount of entries in a single call. + * Can be faster than repeated {@link next()} calls. The natural end of the iterator + * will be signaled by yielding an empty array. + * + * @param size Get at most this many entries. Has a soft minimum of 1. + * @param options Options (none at the moment, reserved for future use). + */ + nextv (size: number, options: {}): Promise> + nextv (size: number): Promise> + + /** + * Advance repeatedly and get all (remaining) entries as an array, automatically + * closing the iterator. Assumes that those entries fit in memory. If that's not the + * case, instead use {@link next()}, {@link nextv()} or [`for await...of`][1]. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + * + * @param options Options (none at the moment, reserved for future use). + */ + all (options: {}): Promise> + all (): Promise> + + /** + * Seek to the key closest to {@link target}. Subsequent calls to {@link next()}, + * {@link nextv()} or {@link all()} (including implicit calls in a `for await...of` + * loop) will yield entries with keys equal to or larger than {@link target}, or equal + * to or smaller than {@link target} if the {@link AbstractIteratorOptions.reverse} + * option was true. + */ + seek (target: K): void + seek (target: TTarget, options: AbstractSeekOptions): void +} + +export class AbstractKeyIterator extends CommonIterator { + constructor (db: TDatabase, options: AbstractKeyIteratorOptions) + + /** + * Advance to the next key and yield that key. When possible, prefer to use + * [`for await...of`][1] instead. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + */ + next (): Promise + + /** + * Advance repeatedly and get at most {@link size} amount of keys in a single call. Can + * be faster than repeated {@link next()} calls. The natural end of the iterator will + * be signaled by yielding an empty array. + * + * @param size Get at most this many keys. Has a soft minimum of 1. + * @param options Options (none at the moment, reserved for future use). + */ + nextv (size: number, options: {}): Promise + nextv (size: number): Promise + + /** + * Advance repeatedly and get all (remaining) keys as an array, automatically closing + * the iterator. Assumes that those keys fit in memory. If that's not the case, instead + * use {@link next()}, {@link nextv()} or [`for await...of`][1]. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + * + * @param options Options (none at the moment, reserved for future use). + */ + all (options: {}): Promise + all (): Promise + + /** + * Seek to the key closest to {@link target}. Subsequent calls to {@link next()}, + * {@link nextv()} or {@link all()} (including implicit calls in a `for await...of` + * loop) will yield keys equal to or larger than {@link target}, or equal to or smaller + * than {@link target} if the {@link AbstractKeyIteratorOptions.reverse} option was + * true. + */ + seek (target: K): void + seek (target: TTarget, options: AbstractSeekOptions): void +} + +export class AbstractValueIterator extends CommonIterator { + constructor (db: TDatabase, options: AbstractValueIteratorOptions) + + /** + * Advance to the next value and yield that value. When possible, prefer + * to use [`for await...of`][1] instead. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + */ + next (): Promise + + /** + * Advance repeatedly and get at most {@link size} amount of values in a single call. + * Can be faster than repeated {@link next()} calls. The natural end of the iterator + * will be signaled by yielding an empty array. + * + * @param size Get at most this many values. Has a soft minimum of 1. + * @param options Options (none at the moment, reserved for future use). + */ + nextv (size: number, options: {}): Promise + nextv (size: number): Promise + + /** + * Advance repeatedly and get all (remaining) values as an array, automatically closing + * the iterator. Assumes that those values fit in memory. If that's not the case, + * instead use {@link next()}, {@link nextv()} or [`for await...of`][1]. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of + * + * @param options Options (none at the moment, reserved for future use). + */ + all (options: {}): Promise + all (): Promise + + /** + * Seek to the key closest to {@link target}. Subsequent calls to {@link next()}, + * {@link nextv()} or {@link all()} (including implicit calls in a `for await...of` + * loop) will yield the values of keys equal to or larger than {@link target}, or equal + * to or smaller than {@link target} if the {@link AbstractValueIteratorOptions.reverse} + * option was true. + */ + seek (target: K): void + seek (target: TTarget, options: AbstractSeekOptions): void +} + +/** + * Options for the {@link AbstractIterator.seek} method. + */ +export interface AbstractSeekOptions { + /** + * Custom key encoding, used to encode the `target`. By default the keyEncoding option + * of the iterator is used, or (if that wasn't set) the keyEncoding of the database. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined +} diff --git a/types/abstract-level.d.ts b/types/abstract-level.d.ts new file mode 100644 index 0000000..625d13b --- /dev/null +++ b/types/abstract-level.d.ts @@ -0,0 +1,634 @@ +import { IManifest } from 'level-supports' +import * as Transcoder from 'level-transcoder' +import { EventEmitter } from 'events' +import { AbstractChainedBatch } from './abstract-chained-batch' +import { AbstractSublevel, AbstractSublevelOptions } from './abstract-sublevel' +import { AbstractSnapshot } from './abstract-snapshot' + +import { + AbstractIterator, + AbstractIteratorOptions, + AbstractKeyIterator, + AbstractKeyIteratorOptions, + AbstractValueIterator, + AbstractValueIteratorOptions +} from './abstract-iterator' + +import { AbstractReadOptions, AbstractResource, RangeOptions } from './interfaces' + +/** + * Abstract class for a lexicographically sorted key-value database. + * + * @template TFormat The type used internally by the database to store data. + * @template KDefault The default type of keys if not overridden on operations. + * @template VDefault The default type of values if not overridden on operations. + */ +declare class AbstractLevel + extends EventEmitter implements AbstractResource { + /** + * Private database constructor. + * + * @param manifest A [manifest](https://github.com/Level/supports) describing the + * features supported by (the private API of) this database. + * @param options Options, of which some will be forwarded to {@link open}. + */ + constructor ( + manifest: Partial, + options?: AbstractDatabaseOptions | undefined + ) + + /** + * A [manifest](https://github.com/Level/supports) describing the features + * supported by this database. + */ + supports: IManifest + + /** + * Allows userland _hook functions_ to customize behavior of the database. + */ + hooks: AbstractDatabaseHooks + + /** + * Read-only getter that returns a string reflecting the current state of the database: + * + * - `'opening'` - waiting for the database to be opened + * - `'open'` - successfully opened the database + * - `'closing'` - waiting for the database to be closed + * - `'closed'` - database is closed. + */ + get status (): 'opening' | 'open' | 'closing' | 'closed' + + /** + * Open the database. + */ + open (): Promise + open (options: AbstractOpenOptions): Promise + + /** + * Close the database. + */ + close (): Promise + + /** + * Close the database. + */ + [Symbol.asyncDispose](): Promise + + /** + * Get a value from the database by {@link key}. + */ + get (key: KDefault): Promise + + get ( + key: K, + options: AbstractGetOptions + ): Promise + + /** + * Get multiple values from the database by an array of {@link keys}. + */ + getMany (keys: KDefault[]): Promise<(VDefault | undefined)[]> + + getMany ( + keys: K[], + options: AbstractGetManyOptions + ): Promise<(V | undefined)[]> + + /** + * Check if the database has an entry with the given {@link key}. + * + * @returns A promise for a boolean that will be true if the entry exists. + * + * @example + * ```js + * if (await db.has('fruit')) { + * console.log('We have fruit') + * } + * ``` + */ + has (key: KDefault): Promise + has (key: K, options: AbstractHasOptions): Promise + + /** + * Check if the database has entries with the given {@link keys}. + * + * @returns A promise for an array of booleans with the same order as {@link keys}. + * + * @example + * ```js + * await db.put('a', '123') + * await db.hasMany(['a', 'b']) // [true, false] + * ``` + */ + hasMany (keys: KDefault[]): Promise + hasMany (keys: K[], options: AbstractHasManyOptions): Promise + + /** + * Add a new entry or overwrite an existing entry. + */ + put (key: KDefault, value: VDefault): Promise + + put ( + key: K, + value: V, + options: AbstractPutOptions + ): Promise + + /** + * Delete an entry by {@link key}. + */ + del (key: KDefault): Promise + + del ( + key: K, + options: AbstractDelOptions + ): Promise + + /** + * Perform multiple _put_ and/or _del_ operations in bulk. + */ + batch ( + operations: Array> + ): Promise + + batch ( + operations: Array>, + options: AbstractBatchOptions + ): Promise + + batch (): AbstractChainedBatch + + /** + * Create an iterator. For example: + * + * ```js + * for await (const [key, value] of db.iterator({ gte: 'a' })) { + * console.log([key, value]) + * } + * ``` + */ + iterator (): AbstractIterator + iterator ( + options: AbstractIteratorOptions + ): AbstractIterator + + /** + * Create a key iterator. For example: + * + * ```js + * for await (const key of db.keys({ gte: 'a' })) { + * console.log(key) + * } + * ``` + */ + keys (): AbstractKeyIterator + keys ( + options: AbstractKeyIteratorOptions + ): AbstractKeyIterator + + /** + * Create a value iterator. For example: + * + * ```js + * for await (const value of db.values({ gte: 'a' })) { + * console.log(value) + * } + * ``` + */ + values (): AbstractValueIterator + values ( + options: AbstractValueIteratorOptions + ): AbstractValueIterator + + /** + * Delete all entries or a range. + */ + clear (): Promise + clear (options: AbstractClearOptions): Promise + + /** + * Create a sublevel. + * @param name Name of the sublevel, used to prefix keys. + */ + sublevel (name: string | string[]): AbstractSublevel + sublevel ( + name: string | string[], + options: AbstractSublevelOptions + ): AbstractSublevel + + /** + * Add sublevel prefix to the given {@link key}, which must be already-encoded. If this + * database is not a sublevel, the given {@link key} is returned as-is. + * + * @param key Key to add prefix to. + * @param keyFormat Format of {@link key}. One of `'utf8'`, `'buffer'`, `'view'`. + * If `'utf8'` then {@link key} must be a string and the return value will be a string. + * If `'buffer'` then Buffer, if `'view'` then Uint8Array. + * @param local If true, add prefix for parent database, else for root database (default). + */ + prefixKey (key: string, keyFormat: 'utf8', local?: boolean | undefined): string + prefixKey (key: Buffer, keyFormat: 'buffer', local?: boolean | undefined): Buffer + prefixKey (key: Uint8Array, keyFormat: 'view', local?: boolean | undefined): Uint8Array + + /** + * Returns the given {@link encoding} argument as a normalized encoding object + * that follows the [`level-transcoder`](https://github.com/Level/transcoder) + * encoding interface. + */ + keyEncoding ( + encoding: N + ): Transcoder.KnownEncoding + + keyEncoding ( + encoding: Transcoder.MixedEncoding + ): Transcoder.Encoding + + /** + * Returns the default key encoding of the database as a normalized encoding + * object that follows the [`level-transcoder`](https://github.com/Level/transcoder) + * encoding interface. + */ + keyEncoding (): Transcoder.Encoding + + /** + * Returns the given {@link encoding} argument as a normalized encoding object + * that follows the [`level-transcoder`](https://github.com/Level/transcoder) + * encoding interface. + */ + valueEncoding ( + encoding: N + ): Transcoder.KnownEncoding + + valueEncoding ( + encoding: Transcoder.MixedEncoding + ): Transcoder.Encoding + + /** + * Returns the default value encoding of the database as a normalized encoding + * object that follows the [`level-transcoder`](https://github.com/Level/transcoder) + * encoding interface. + */ + valueEncoding (): Transcoder.Encoding + + /** + * Create an explicit snapshot. Throws a `LEVEL_NOT_SUPPORTED` error if + * `db.supports.explicitSnapshots` is false ([Level/community#118][1]). + * + * @param options There are currently no options but specific implementations + * may add their own. + * + * @example + * ```ts + * await db.put('example', 'before') + * await using snapshot = db.snapshot() + * await db.put('example', 'after') + * await db.get('example', { snapshot })) // Returns 'before' + * ``` + * + * [1]: https://github.com/Level/community/issues/118 + */ + snapshot (options?: any | undefined): AbstractSnapshot + + /** + * Call the function {@link fn} at a later time when {@link status} changes to + * `'open'` or `'closed'`. Known as a _deferred operation_. + * + * @param fn Synchronous function to (eventually) call. + * @param options Options for the deferred operation. + */ + defer (fn: Function, options?: AbstractDeferOptions | undefined): void + + /** + * Call the function {@link fn} at a later time when {@link status} changes to + * `'open'` or `'closed'`. Known as a _deferred operation_. + * + * @param fn Asynchronous function to (eventually) call. + * @param options Options for the deferred operation. + * @returns A promise for the result of {@link fn}. + */ + deferAsync (fn: () => Promise, options?: AbstractDeferOptions | undefined): Promise + + /** + * Keep track of the given {@link resource} in order to call its `close()` method when + * the database is closed. Once successfully closed, the resource will no longer be + * tracked, to the same effect as manually calling {@link detachResource}. When given + * multiple resources, the database will close them in parallel. Resources are kept in + * a {@link Set} so that the same object will not be attached (and closed) twice. + * + * Intended for objects that rely on an open database. Used internally for built-in + * resources like iterators and sublevels, and is publicly exposed for custom + * resources. + */ + attachResource(resource: AbstractResource): void + + /** + * Stop tracking the given {@link resource}. + */ + detachResource(resource: AbstractResource): void +} + +export { AbstractLevel } + +/** + * Options for the database constructor. + */ +export interface AbstractDatabaseOptions + extends Omit { + /** + * Encoding to use for keys. + * @defaultValue `'utf8'` + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Encoding to use for values. + * @defaultValue `'utf8'` + */ + valueEncoding?: string | Transcoder.PartialEncoding | undefined +} + +/** + * Options for the {@link AbstractLevel.open} method. + */ +export interface AbstractOpenOptions { + /** + * If `true`, create an empty database if one doesn't already exist. If `false` + * and the database doesn't exist, opening will fail. + * + * @defaultValue `true` + */ + createIfMissing?: boolean | undefined + + /** + * If `true` and the database already exists, opening will fail. + * + * @defaultValue `false` + */ + errorIfExists?: boolean | undefined + + /** + * Wait for, but do not initiate, opening of the database. + * + * @defaultValue `false` + */ + passive?: boolean | undefined +} + +/** + * Options for the {@link AbstractLevel.get} method. + */ +export interface AbstractGetOptions extends AbstractReadOptions { + /** + * Custom key encoding for this operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Custom value encoding for this operation, used to decode the value. + */ + valueEncoding?: string | Transcoder.PartialDecoder | undefined +} + +/** + * Options for the {@link AbstractLevel.getMany} method. + */ +export interface AbstractGetManyOptions extends AbstractReadOptions { + /** + * Custom key encoding for this operation, used to encode the `keys`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Custom value encoding for this operation, used to decode values. + */ + valueEncoding?: string | Transcoder.PartialDecoder | undefined +} + +/** + * Options for the {@link AbstractLevel.has} method. + */ +export interface AbstractHasOptions extends AbstractReadOptions { + /** + * Custom key encoding for this operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined +} + +/** + * Options for the {@link AbstractLevel.hasMany} method. + */ +export interface AbstractHasManyOptions extends AbstractReadOptions { + /** + * Custom key encoding for this operation, used to encode the `keys`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined +} + +/** + * Options for the {@link AbstractLevel.put} method. + */ +export interface AbstractPutOptions { + /** + * Custom key encoding for this operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Custom value encoding for this operation, used to encode the `value`. + */ + valueEncoding?: string | Transcoder.PartialEncoder | undefined +} + +/** + * Options for the {@link AbstractLevel.del} method. + */ +export interface AbstractDelOptions { + /** + * Custom key encoding for this operation, used to encode the `key`. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined +} + +/** + * Options for the {@link AbstractLevel.batch} method. + */ +export interface AbstractBatchOptions { + /** + * Custom key encoding for this batch, used to encode keys. + */ + keyEncoding?: string | Transcoder.PartialEncoder | undefined + + /** + * Custom value encoding for this batch, used to encode values. + */ + valueEncoding?: string | Transcoder.PartialEncoder | undefined +} + +/** + * A _put_ or _del_ operation to be committed with the {@link AbstractLevel.batch} + * method. + */ +export type AbstractBatchOperation = + AbstractBatchPutOperation | AbstractBatchDelOperation + +/** + * A _put_ operation to be committed with the {@link AbstractLevel.batch} method. + */ +export interface AbstractBatchPutOperation { + type: 'put' + key: K + value: V + + /** + * Custom key encoding for this _put_ operation, used to encode the {@link key}. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Custom key encoding for this _put_ operation, used to encode the {@link value}. + */ + valueEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Act as though the _put_ operation is performed on the given sublevel, to similar + * effect as: + * + * ```js + * await sublevel.batch([{ type: 'put', key, value }]) + * ``` + * + * This allows atomically committing data to multiple sublevels. The {@link key} will + * be prefixed with the `prefix` of the sublevel, and the {@link key} and {@link value} + * will be encoded by the sublevel (using the default encodings of the sublevel unless + * {@link keyEncoding} and / or {@link valueEncoding} are provided). + */ + sublevel?: AbstractSublevel | undefined +} + +/** + * A _del_ operation to be committed with the {@link AbstractLevel.batch} method. + */ +export interface AbstractBatchDelOperation { + type: 'del' + key: K + + /** + * Custom key encoding for this _del_ operation, used to encode the {@link key}. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Act as though the _del_ operation is performed on the given sublevel, to similar + * effect as: + * + * ```js + * await sublevel.batch([{ type: 'del', key }]) + * ``` + * + * This allows atomically committing data to multiple sublevels. The {@link key} will + * be prefixed with the `prefix` of the sublevel, and the {@link key} will be encoded + * by the sublevel (using the default key encoding of the sublevel unless + * {@link keyEncoding} is provided). + */ + sublevel?: AbstractSublevel | undefined +} + +/** + * Options for the {@link AbstractLevel.clear} method. + */ +export interface AbstractClearOptions extends RangeOptions { + /** + * Custom key encoding for this operation, used to encode range options. + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Explicit snapshot to read from, such that entries not present in the snapshot will + * not be deleted. + */ + snapshot?: AbstractSnapshot | undefined +} + +/** + * Allows userland _hook functions_ to customize behavior of the database. + * + * @template TDatabase Type of database. + */ +export interface AbstractDatabaseHooks< + TDatabase, + TOpenOptions = AbstractOpenOptions, + TBatchOperation = AbstractBatchOperation> { + /** + * An asynchronous hook that runs after the database has succesfully opened, but before + * deferred operations are executed and before events are emitted. Example: + * + * ```js + * db.hooks.postopen.add(async function () { + * // Initialize data + * }) + * ``` + */ + postopen: AbstractHook<(options: TOpenOptions) => Promise> + + /** + * A synchronous hook for modifying or adding operations. Example: + * + * ```js + * db.hooks.prewrite.add(function (op, batch) { + * op.key = op.key.toUpperCase() + * }) + * ``` + * + * @todo Define type of `op`. + */ + prewrite: AbstractHook<(op: any, batch: AbstractPrewriteBatch) => void> + + /** + * A synchronous hook that runs when an {@link AbstractSublevel} instance has been + * created by {@link AbstractLevel.sublevel()}. + */ + newsub: AbstractHook<( + sublevel: AbstractSublevel, + options: AbstractSublevelOptions + ) => void> +} + +/** + * An interface for prewrite hook functions to add operations, to be committed in the + * same batch as the input operation(s). + */ +export interface AbstractPrewriteBatch { + /** + * Add a batch operation. + */ + add: (op: TBatchOperation) => this +} + +/** + * @template TFn The hook-specific function signature. + */ +export interface AbstractHook { + /** + * Add the given {@link fn} function to this hook, if it wasn't already added. + * @param fn Hook function. + */ + add: (fn: TFn) => void + + /** + * Remove the given {@link fn} function from this hook. + * @param fn Hook function. + */ + delete: (fn: TFn) => void +} + +/** + * Options for {@link AbstractLevel.defer()} and {@link AbstractLevel.deferAsync()}. + */ +export interface AbstractDeferOptions { + /** + * An [`AbortSignal`][1] to abort the deferred operation. + * + * [1]: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ + signal?: AbortSignal | undefined +} diff --git a/types/abstract-snapshot.d.ts b/types/abstract-snapshot.d.ts new file mode 100644 index 0000000..a7df9dd --- /dev/null +++ b/types/abstract-snapshot.d.ts @@ -0,0 +1,30 @@ +import { AbstractResource } from './interfaces' + +/** + * A lightweight token that represents a version of a database at a particular point in + * time. + */ +export class AbstractSnapshot implements AbstractResource { + /** + * Increment reference count, to register work that should delay closing until + * {@link unref} is called an equal amount of times. The promise that will be returned + * by {@link close} will not resolve until the reference count returns to 0. This + * prevents prematurely closing underlying resources while the snapshot is in use. + */ + ref (): void + + /** + * Decrement reference count, to indicate that the work has finished. + */ + unref (): void + + /** + * Close the snapshot. + */ + close (): Promise + + /** + * Close the snapshot. + */ + [Symbol.asyncDispose](): Promise +} diff --git a/types/abstract-sublevel.d.ts b/types/abstract-sublevel.d.ts new file mode 100644 index 0000000..014fd6e --- /dev/null +++ b/types/abstract-sublevel.d.ts @@ -0,0 +1,72 @@ +import * as Transcoder from 'level-transcoder' +import { AbstractLevel } from './abstract-level' + +/** + * @template TDatabase Type of parent database. + * @template TFormat The type used internally by the parent database to store data. + * @template KDefault The default type of keys if not overridden on operations. + * @template VDefault The default type of values if not overridden on operations. + */ +declare class AbstractSublevel + extends AbstractLevel { + /** + * Sublevel constructor. + * + * @param db Parent database. + * @param name Name of the sublevel, used to prefix keys. + */ + constructor ( + db: TDatabase, + name: string, + options?: AbstractSublevelOptions | undefined + ) + + /** + * Prefix of the sublevel. A read-only string property. + */ + get prefix (): string + + /** + * Get the path of the sublevel, which is its prefix without separators. + * + * @param local If true, exclude path of parent database. If false (the default) then + * recurse to form a fully-qualified path that travels from the root database to this + * sublevel. + */ + path (local?: boolean | undefined): string[] + + /** + * Parent database. A read-only property. + */ + get parent (): TDatabase + + /** + * Root database. A read-only property. + */ + get db (): AbstractLevel +} + +/** + * Options for the {@link AbstractLevel.sublevel} method. + */ +export interface AbstractSublevelOptions { + /** + * Character for separating sublevel names from user keys and each other. Must sort + * before characters used in `name`. An error will be thrown if that's not the case. + * + * @defaultValue `'!'` + */ + separator?: string | undefined + + /** + * Encoding to use for keys. + * @defaultValue `'utf8'` + */ + keyEncoding?: string | Transcoder.PartialEncoding | undefined + + /** + * Encoding to use for values. + * @defaultValue `'utf8'` + */ + valueEncoding?: string | Transcoder.PartialEncoding | undefined +} diff --git a/types/interfaces.d.ts b/types/interfaces.d.ts new file mode 100644 index 0000000..702037b --- /dev/null +++ b/types/interfaces.d.ts @@ -0,0 +1,42 @@ +import { AbstractLevel } from './abstract-level' +import { AbstractSnapshot } from './abstract-snapshot' + +export interface RangeOptions { + gt?: K + gte?: K + lt?: K + lte?: K + reverse?: boolean | undefined + limit?: number | undefined +} + +/** + * Common options for read methods like {@link AbstractLevel.get} and + * {@link AbstractLevel.iterator}. + */ +export interface AbstractReadOptions { + /** + * Explicit snapshot to read from. + */ + snapshot?: AbstractSnapshot | undefined +} + +/** + * Represents a stateful resource that can be closed. + */ +export interface AbstractResource extends AsyncDisposable { + /** + * Close the resource. + * + * Note for implementors: if the resource is exposed to the user and can also be closed + * in an automated fashion - through `db.attachResource()` or other - then the + * `close()` method should be idempotent such that calling it twice will make no + * difference. + */ + close (): Promise + + /** + * Close the resource. Identical in functionality to {@link close}. + */ + [Symbol.asyncDispose](): Promise +}