初始提交
Test / Node ${{ matrix.node }} (18) (push) Has been cancelled Details
Test / Node ${{ matrix.node }} (20) (push) Has been cancelled Details
Test / Node ${{ matrix.node }} (22) (push) Has been cancelled Details
Test / Browsers (push) Has been cancelled Details
Test / Electron (push) Has been cancelled Details

This commit is contained in:
Your Name 2025-09-19 13:59:34 +08:00
commit 6e34e1d6a2
90 changed files with 18290 additions and 0 deletions

19
.airtap.yml Normal file
View File

@ -0,0 +1,19 @@
providers:
- airtap-playwright
browsers:
- name: chromium
- name: firefox
- name: webkit
presets:
electron:
providers:
- airtap-electron
browsers:
- name: electron
# Until airtap switches to rollup
browserify:
- transform: babelify
presets: ["@babel/preset-env"]

6
.github/codecov.yml vendored Normal file
View File

@ -0,0 +1,6 @@
coverage:
status:
project:
default:
threshold: 5%
patch: off

23
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,23 @@
version: 2
updates:
- package-ecosystem: npm
directory: /
schedule:
interval: monthly
ignore:
- dependency-name: standard
- dependency-name: ts-standard
- dependency-name: "@types/node"
- dependency-name: voxpelli/tsconfig
- dependency-name: typescript
- dependency-name: hallmark
- dependency-name: "@babel/preset-env"
- dependency-name: babelify
# Stay on the 3rd or 4th oldest stable release, per
# https://www.electronjs.org/docs/latest/tutorial/electron-timelines#version-support-policy
- dependency-name: electron
- package-ecosystem: github-actions
directory: /
schedule:
interval: monthly

17
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Release
on:
push:
tags: ['*']
permissions:
contents: write
jobs:
release:
name: Release
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Create GitHub release
uses: docker://antonyurchenko/git-release:v4
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

65
.github/workflows/test.yml vendored Normal file
View File

@ -0,0 +1,65 @@
name: Test
on: [push, pull_request]
jobs:
node:
runs-on: ubuntu-latest
strategy:
matrix:
node: [18, 20, 22]
name: Node ${{ matrix.node }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Use node ${{ matrix.node }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
- name: Install
run: npm install --ignore-scripts
- name: Test
run: npm test
- name: Coverage
run: npm run coverage
- name: Codecov
uses: codecov/codecov-action@v3
with:
file: coverage/lcov.info
browsers:
name: Browsers
if: ${{ github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up node
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install
run: npm install --ignore-scripts
- name: Install Playwright dependencies
run: npx --no-install playwright install-deps
- name: Install Playwright
run: npx --no-install playwright install
- name: Test
run: npm run test-browsers
electron:
name: Electron
if: ${{ github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up node
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install
run: npm install --ignore-scripts
- name: Install Electron
run: npm run postinstall
working-directory: node_modules/electron
- name: Test
uses: GabrielBB/xvfb-action@v1
with:
run: npm run test-electron

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
node_modules
coverage
.nyc_output

133
CHANGELOG.md Normal file
View File

@ -0,0 +1,133 @@
# Changelog
## [3.0.1] - 2025-01-26
### Added
- Test seeking outside of range options ([#113](https://github.com/Level/abstract-level/issues/113)) ([`90ee9b5`](https://github.com/Level/abstract-level/commit/90ee9b5)) (Vincent Weevers)
## [3.0.0] - 2025-01-05
_Would you mind voting in this [community poll](https://github.com/orgs/Level/discussions/143)? Thank you! If you are upgrading, please see [`UPGRADING.md`](UPGRADING.md)._
### Changed
- **Breaking:** use new language features ([#94](https://github.com/Level/abstract-level/issues/94)) ([`1fdb362`](https://github.com/Level/abstract-level/commit/1fdb362)) (Vincent Weevers)
- **Breaking:** make `iterator.seek()` a mandatory feature ([#105](https://github.com/Level/abstract-level/issues/105)) ([`daf2a88`](https://github.com/Level/abstract-level/commit/daf2a88)) (Vincent Weevers)
- **Breaking:** change `_checkKey` and `_checkValue` to assertions ([#108](https://github.com/Level/abstract-level/issues/108)) ([`ca3c368`](https://github.com/Level/abstract-level/commit/ca3c368)) (Vincent Weevers)
### Added
- Implement explicit snapshots ([#93](https://github.com/Level/abstract-level/issues/93)) ([`a8485a2`](https://github.com/Level/abstract-level/commit/a8485a2), [`f81d348`](https://github.com/Level/abstract-level/commit/f81d348), [`b5b583c`](https://github.com/Level/abstract-level/commit/b5b583c)) (Vincent Weevers)
- Implement `has()` and `hasMany()` ([#96](https://github.com/Level/abstract-level/issues/96)) ([`6684039`](https://github.com/Level/abstract-level/commit/6684039)) (Vincent Weevers)
- Implement `Symbol.asyncDispose` ([#95](https://github.com/Level/abstract-level/issues/95)) ([`eedeed9`](https://github.com/Level/abstract-level/commit/eedeed9)) (Vincent Weevers)
- Add docs and types for `attachResource()` & `detachResource()` ([#110](https://github.com/Level/abstract-level/issues/110)) ([`5f621d4`](https://github.com/Level/abstract-level/commit/5f621d4)) (Vincent Weevers)
### Removed
- **Breaking:** remove deprecated `put`, `del` & `batch` events ([#104](https://github.com/Level/abstract-level/issues/104)) ([`86bd271`](https://github.com/Level/abstract-level/commit/86bd271), [`7c32d39`](https://github.com/Level/abstract-level/commit/7c32d39)) (Vincent Weevers)
- **Breaking:** drop support of Node.js 16 ([#103](https://github.com/Level/abstract-level/issues/103)) ([`a05a8ea`](https://github.com/Level/abstract-level/commit/a05a8ea)) (Vincent Weevers)
### Fixed
- Close sublevels upon closing parent db ([#102](https://github.com/Level/abstract-level/issues/102)) ([`9eeb291`](https://github.com/Level/abstract-level/commit/9eeb291)) (Vincent Weevers)
- Avoid cloning option objects in more places ([#109](https://github.com/Level/abstract-level/issues/109)) ([`efd4175`](https://github.com/Level/abstract-level/commit/efd4175)) (Vincent Weevers)
- Refactor: use async/await in `closeResources()` ([#107](https://github.com/Level/abstract-level/issues/107)) ([`fdb7864`](https://github.com/Level/abstract-level/commit/fdb7864)) (Vincent Weevers)
- Refactor: restore use of spread operator ([#106](https://github.com/Level/abstract-level/issues/106)) ([`a5c2e52`](https://github.com/Level/abstract-level/commit/a5c2e52)) (Vincent Weevers)
- Fix skipped sublevel tests ([`f195d99`](https://github.com/Level/abstract-level/commit/f195d99)) (Vincent Weevers)
## [2.0.2] - 2024-12-09
### Fixed
- Fix TypeScript types of `get`, `getMany`, `nextv` and `all` ([#91](https://github.com/Level/abstract-level/issues/91)) ([`bbcfb04`](https://github.com/Level/abstract-level/commit/bbcfb04)) (Junxiao Shi)
## [2.0.1] - 2024-10-21
### Fixed
- Generalize prewrite test for memory-level ([#90](https://github.com/Level/abstract-level/issues/90)) ([`9ea8770`](https://github.com/Level/abstract-level/commit/9ea8770)) (Vincent Weevers)
## [2.0.0] - 2024-02-03
_If you are upgrading, please see [`UPGRADING.md`](UPGRADING.md)._
### Changed
- **Breaking:** remove callbacks in favor of promises ([#50](https://github.com/Level/abstract-level/issues/50)) ([`f97dbae`](https://github.com/Level/abstract-level/commit/f97dbae)) (Vincent Weevers)
- **Breaking:** use `undefined` instead of error for non-existing entries ([#49](https://github.com/Level/abstract-level/issues/49)) ([`1e08b30`](https://github.com/Level/abstract-level/commit/1e08b30)) (Vincent Weevers)
- **Breaking:** add hooks and deprecate `batch`, `put` & `del` events ([#45](https://github.com/Level/abstract-level/issues/45), [#53](https://github.com/Level/abstract-level/issues/53), [#81](https://github.com/Level/abstract-level/issues/81)) ([`bcb4192`](https://github.com/Level/abstract-level/commit/bcb4192), [`bee1085`](https://github.com/Level/abstract-level/commit/bee1085), [`dbcf7d7`](https://github.com/Level/abstract-level/commit/dbcf7d7)) (Vincent Weevers)
- **Breaking:** require snapshots to be created synchronously ([#54](https://github.com/Level/abstract-level/issues/54)) ([`d89e68e`](https://github.com/Level/abstract-level/commit/d89e68e)) (Vincent Weevers).
### Added
- Add experimental support of `AbortSignal` ([#55](https://github.com/Level/abstract-level/issues/55), [#59](https://github.com/Level/abstract-level/issues/59)) ([`b075a25`](https://github.com/Level/abstract-level/commit/b075a25), [`e3fba20`](https://github.com/Level/abstract-level/commit/e3fba20)) (Vincent Weevers)
- Expose path of sublevel ([#78](https://github.com/Level/abstract-level/issues/78)) ([`20974f6`](https://github.com/Level/abstract-level/commit/20974f6)) (Vincent Weevers).
### Removed
- **Breaking:** drop Node.js < 16 ([`9e8f561`](https://github.com/Level/abstract-level/commit/9e8f561)) (Vincent Weevers)
- **Breaking:** remove deferred chained batch ([#51](https://github.com/Level/abstract-level/issues/51), [#58](https://github.com/Level/abstract-level/issues/58)) ([`fc7be7b`](https://github.com/Level/abstract-level/commit/fc7be7b), [`e119cad`](https://github.com/Level/abstract-level/commit/e119cad)) (Vincent Weevers)
- **Breaking:** remove `ready` alias of `open` event ([#48](https://github.com/Level/abstract-level/issues/48)) ([`5f7b923`](https://github.com/Level/abstract-level/commit/5f7b923)) (Vincent Weevers)
- Remove compatibility checks for `levelup` & friends ([#52](https://github.com/Level/abstract-level/issues/52)) ([`def791f`](https://github.com/Level/abstract-level/commit/def791f)) (Vincent Weevers).
### Fixed
- Keep track of iterator end ([#56](https://github.com/Level/abstract-level/issues/56)) ([`9b78443`](https://github.com/Level/abstract-level/commit/9b78443)) (Vincent Weevers).
## [1.0.4] - 2024-01-20
### Fixed
- Fix TypeScript definitions of `all()` and `nextv()` ([#67](https://github.com/Level/abstract-level/issues/67)) ([`8e85993`](https://github.com/Level/abstract-level/commit/8e85993), [`9f17757`](https://github.com/Level/abstract-level/commit/9f17757)) (Bryan)
## [1.0.3] - 2022-03-20
### Added
- Document error codes of `classic-level` and `many-level` ([#20](https://github.com/Level/abstract-level/issues/20)) ([`4b3464c`](https://github.com/Level/abstract-level/commit/4b3464c)) (Vincent Weevers)
### Fixed
- Add hidden `abortOnClose` option to iterators ([`2935180`](https://github.com/Level/abstract-level/commit/2935180)) (Vincent Weevers)
- Make internal iterator decoding options enumerable ([`eb08363`](https://github.com/Level/abstract-level/commit/eb08363)) (Vincent Weevers)
- Restore Sauce Labs browser tests ([`90b8816`](https://github.com/Level/abstract-level/commit/90b8816)) (Vincent Weevers)
## [1.0.2] - 2022-03-06
### Fixed
- Fix TypeScript declaration of chained batch `write()` options ([`392b7f7`](https://github.com/Level/abstract-level/commit/392b7f7)) (Vincent Weevers)
- Document the return type of `db.batch()` and add example ([`9739bba`](https://github.com/Level/abstract-level/commit/9739bba)) (Vincent Weevers)
## [1.0.1] - 2022-02-06
### Fixed
- Add `highWaterMarkBytes` option to tests where it matters ([`6b25a91`](https://github.com/Level/abstract-level/commit/6b25a91)) (Vincent Weevers)
- Clarify the meaning of `db.status` ([`2e90b05`](https://github.com/Level/abstract-level/commit/2e90b05)) (Vincent Weevers)
- Use `new` in README examples ([`379503e`](https://github.com/Level/abstract-level/commit/379503e)) (Vincent Weevers).
## [1.0.0] - 2022-01-30
_:seedling: Initial release. If you are upgrading from `abstract-leveldown` please see [`UPGRADING.md`](UPGRADING.md)_
[3.0.1]: https://github.com/Level/abstract-level/releases/tag/v3.0.1
[3.0.0]: https://github.com/Level/abstract-level/releases/tag/v3.0.0
[2.0.2]: https://github.com/Level/abstract-level/releases/tag/v2.0.2
[2.0.1]: https://github.com/Level/abstract-level/releases/tag/v2.0.1
[2.0.0]: https://github.com/Level/abstract-level/releases/tag/v2.0.0
[1.0.4]: https://github.com/Level/abstract-level/releases/tag/v1.0.4
[1.0.3]: https://github.com/Level/abstract-level/releases/tag/v1.0.3
[1.0.2]: https://github.com/Level/abstract-level/releases/tag/v1.0.2
[1.0.1]: https://github.com/Level/abstract-level/releases/tag/v1.0.1
[1.0.0]: https://github.com/Level/abstract-level/releases/tag/v1.0.0

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright © 2013 Rod Vagg and the contributors to abstract-level.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

1924
README.md Normal file

File diff suppressed because it is too large Load Diff

669
UPGRADING.md Normal file
View File

@ -0,0 +1,669 @@
# Upgrade Guide
This document describes breaking changes and how to upgrade. For a complete list of changes including minor and patch releases, please refer to the [changelog](CHANGELOG.md).
## 3.0.0
This release drops supports of Node.js 16. It also started using new JavaScript language features ([`1fdb362`](https://github.com/Level/abstract-level/commit/1fdb362)) which are supported by all target environments of `abstract-level` but may require additional configuration of JavaScript bundlers, for example if `browserify` is used. Third, the `put`, `del` & `batch` events (which were deprecated in `abstract-level` 2.0.0) have been removed in favor of the `write` event.
On to the good news. We have some exciting new features! To start we have "explicit snapshots" which allow you to read previous versions of a database. This will be supported in at least `classic-level` and `memory-level` (see [Level/community#118](https://github.com/Level/community/issues/118)). Here's an example:
```js
await db.put('example', 'before')
const snapshot = db.snapshot()
await db.put('example', 'after')
await db.get('example', { snapshot })) // Returns 'before'
await snapshot.close()
```
In TypeScript (5.2) that last `close()` call can be skipped because we added support of [`Symbol.asyncDispose`](https://github.com/tc39/proposal-explicit-resource-management) on databases, iterators and snapshots:
```ts
await db.put('example', 'before')
await using snapshot = db.snapshot()
await db.put('example', 'after')
await db.get('example', { snapshot })) // Returns 'before'
```
Lastly, we added `has()` and `hasMany()` methods to check if keys exist without the cost of fetching values:
```js
if (await db.has('fruit')) {
console.log('We have fruit')
}
```
Support of this feature is tracked in [Level/community#142](https://github.com/Level/community/issues/142).
## 2.0.0
**This release adds [hooks](./README.md#hooks) and drops callbacks, not-found errors and support of Node.js < 16. The guide for this release consists of two sections. One for the public API, relevant to all consumers of `abstract-level` and implementations thereof (`level`, `classic-level`, `memory-level` et cetera) and another for the private API that only implementors should have to read.**
If you're upgrading from `levelup`, `abstract-leveldown` or other old modules, it's recommended to first upgrade to `abstract-level` 1.x because that version includes compatibility checks that have since been removed.
### 1. Public API
#### 1.1. Callbacks have been removed
All methods that previously (also) accepted a callback now only support promises. If you were already using promises then nothing changed, except for subtle timing differences and improved performance. If you were not yet using promises, migrating should be relatively straightforward because nearly all callbacks had just two arguments (an error and a result) thus making promise function signatures predictable. The only method that had a callback with more than two arguments was `iterator.next()`. If you previously did:
```js
iterator.next(function (err, key, value) {
// ..
})
```
You must now do:
```js
const [ key, value ] = await iterator.next()
```
Or switch to async iterators:
```js
for await (const [key, value] of iterator) {
// ..
}
```
The deprecated `iterator.end()` alias of `iterator.close()` has been removed.
#### 1.2. Not found
The `db.get()` method now yields `undefined` instead of an error for non-existing entries. If you previously did:
```js
try {
await db.get('example')
} catch (err) {
if (err.code === 'LEVEL_NOT_FOUND') {
console.log('Not found')
}
}
```
You must now do:
```js
const value = await db.get('example')
if (value === undefined) {
console.log('Not found')
}
```
The same applies to equivalent and older `if (err.notFound)` code in the style of `levelup`.
#### 1.3. Not ready
The `ready` alias of the `open` event has been removed. If you previously did:
```js
db.once('ready', function () {
// ..
})
```
You must now do:
```js
db.once('open', function () {
// ..
})
```
Although, old code that uses these events would likely be better off using `db.open()` because synchronous events don't mix well with `async/await`. You could instead do:
```js
await db.open({ passive: true })
await db.get('example')
```
Or simply:
```js
await db.get('example')
```
#### 1.4. Slower nested sublevels
The internals of nested sublevels have been refactored for the benefit of [hooks](./README.md#hooks). Nested sublevels, no matter their depth, were previously all connected to the same parent database rather than forming a tree. In the following example, the `colorIndex` sublevel would previously forward its operations directly to `db`:
```js
const indexes = db.sublevel('idx')
const colorIndex = indexes.sublevel('colors')
```
It will now forward its operations to `indexes`, which in turn forwards them to `db`. At each step, hooks and events are available to transform and react to data from a different perspective. Which comes at a (typically small) performance cost that increases with further nested sublevels.
To optionally negate that cost, a new feature has been added to `db.sublevel(name)`: it now also accepts a `name` that is an array. If the `indexes` sublevel is only used to organize keys and not directly interfaced with, operations on `colorIndex` can be made faster by skipping `indexes`:
```js
const colorIndex = db.sublevel(['idx', 'colors'])
```
#### 1.5. Open before creating a chained batch
It is no longer possible to create a chained batch while the database is opening. If you previously did:
```js
const db = new ExampleLevel()
const batch = db.batch().del('example')
await batch.write()
```
You must now do:
```js
const db = new ExampleLevel()
await db.open()
const batch = db.batch().del('example')
await batch.write()
```
Alternatively:
```js
const db = new ExampleLevel()
await db.batch([{ type: 'del', key: 'example' }])
```
As for why that last example works yet the same is not supported on a chained batch: the `put()`, `del()` and `clear()` methods of a chained batch are synchronous. This meant `abstract-level` (and `levelup` before it) had to jump through several hoops to make it work while the database is opening. Having such logic internally is fine, but the problem extended to the new [hooks](./README.md#hooks) feature and more specifically, the `prewrite` hook that runs on `put()` and `del()`.
### 2. Private API
#### 2.1. Promises all the way
All private methods that previously took a callback now use a promise. For example, the function signature `_get(key, options, callback)` has changed to `async _get(key, options)`. Same as in the public API, the new function signatures are predictable and the only method that requires special attention is `iterator._next()`. For details, please see the updated [README](./README.md#private-api-for-implementors).
#### 2.2. Ticks
Internal use of `process.nextTick` has been replaced with [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask) (which was already used in browsers) and the [polyfill](https://github.com/feross/queue-microtask) for `queueMicrotask` (for older browsers) has been removed. The `db.nextTick` utility has been removed as well. These utilities are typically not even needed anymore, thanks to the use of promises. If you previously did:
```js
class ExampleLevel extends AbstractLevel {
_get (key, options, callback) {
process.nextTick(callback, null, 'abc')
}
customMethod () {
this.nextTick(() => {
// ..
})
}
}
```
You must now do:
```js
class ExampleLevel extends AbstractLevel {
async _get (key, options) {
return 'abc'
}
customMethod () {
queueMicrotask(() => {
// ..
})
}
}
```
#### 2.3. A new way to abort iterator work
Iterators now take an experimental `signal` option that is an [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal). You can use the `signal` to abort an in-progress `_next()`, `_nextv()` or `_all()` call. Doing so is optional until a future semver-major release.
#### 2.4. Snapshots must be synchronous
If an implementations indicates support of snapshots via `db.supports.snapshots` then the `db._get()` and `db._getMany()` methods are now required to synchronously create their snapshot, rather than asynchronously. For details, please see the [README](./README.md#db_getkey-options). This is a documentation-only change because the abstract test suite cannot verify it.
## 1.0.0
**Introducing `abstract-level`: a fork of [`abstract-leveldown`](https://github.com/Level/abstract-leveldown) that removes the need for [`levelup`](https://github.com/Level/levelup), [`encoding-down`](https://github.com/Level/encoding-down) and more. An `abstract-level` database is a complete solution that doesn't need to be wrapped. It has the same API as `level(up)` including encodings, promises and events. In addition, implementations can now choose to use Uint8Array instead of Buffer. Consumers of an implementation can use both. Sublevels are builtin.**
We've put together several upgrade guides for different modules. See the [FAQ](https://github.com/Level/community#faq) to find the best upgrade guide for you. This upgrade guide describes how to replace `abstract-leveldown` with `abstract-level`. Implementations that do so, can no longer be wrapped with `levelup`.
The npm package name is `abstract-level` and the main export is called `AbstractLevel` rather than `AbstractLevelDOWN`. It started using classes. Support of Node.js 10 has been dropped.
For most folks, a database that upgraded from `abstract-leveldown` to `abstract-level` can be a drop-in replacement for a `level(up)` database (with the exception of stream methods). Let's start this guide there: all methods have been enhanced to reach API parity with `levelup` and `level`.
### 1. API parity with `levelup`
#### 1.1. New: promises
Methods that take a callback now also support promises. They return a promise if no callback is provided, the same as `levelup`. Implementations that override public (non-underscored) methods _must_ do the same and any implementation _should_ do the same for additional methods if any.
#### 1.2. New: events
An `abstract-level` database emits the same events as `levelup` would.
#### 1.3. New: idempotent open
Opening and closing a database is idempotent and safe, similar to `levelup` but more precise. If `open()` and `close()` are called repeatedly, the last call dictates the final status. Callbacks are not called (or promises not resolved) until any pending state changes are done. Same for events. Unlike on `levelup` it is safe to call `open()` while status is `'closing'`: the database will wait for closing to complete and then reopen. None of these changes are likely to constitute a breaking change; they increase state consistency in edge cases.
The `open()` method has a new option called `passive`. If set to `true` the call will wait for, but not initiate, opening of the database. To similar effect as `db.once('open', callback)` with added benefit that it also works if the database is already open. Implementations that wrap another database can use the `passive` option to open themselves without taking full control of the database that they wrap.
#### 1.4. New: deferred open
Deferred open is built-in. This means a database opens itself a tick after its constructor returns (unless `open()` was called manually). Any operations made until opening has completed are queued up in memory. When opening completes the operations are replayed. If opening failed (and this is a new behavior compared to `levelup`) the operations will yield errors. The `AbstractLevel` class has a new `defer()` method for an implementation to defer custom operations.
The initial `status` of a database is `'opening'` rather than `'new'`, which no longer exists. Wrapping a database with [`deferred-leveldown`](https://github.com/Level/deferred-leveldown) is not supported and will exhibit undefined behavior.
Implementations must also accept options for `open()` in their constructor, which was previously done by `levelup`. For example, usage of the [`classic-level`](https://github.com/Level/classic-level) implementation is as follows:
```js
const db = new ClassicLevel('./db', {
createIfMissing: false,
compression: false
})
```
This works by first forwarding options to the `AbstractLevel` constructor, which in turn forwards them to `open(options)`. If `open(options)` is called manually those options will be shallowly merged with options from the constructor:
```js
// Results in { createIfMissing: false, compression: true }
await db.open({ compression: true })
```
A database is not "patch-safe". If some form of plugin monkey-patches a database like in the following example, it must now also take the responsibility of deferring the operation (as well as handling promises and callbacks) using `db.defer()`. I.e. this example is incomplete:
```js
function plugin (db) {
const original = db.get
db.get = function (...args) {
original.call(this, ...args)
}
}
```
#### 1.5. No constructor callback
The database constructor does not take a callback argument, unlike `levelup`. This goes for `abstract-level` as well as implementations - which is to say, implementors don't have to (and should not) support this old pattern.
Instead call `db.open()` if you wish to wait for opening (which is not necessary to use the database) or to capture an error. If that's your reason for using the callback and you previously initialized a database like so (simplified):
```js
levelup(function (err, db) {
// ..
})
```
You must now do:
```js
db.open(function (err) {
// ..
})
```
Or using promises:
```js
await db.open()
```
#### 1.6. New: state checks
On any operation, an `abstract-level` database checks if it's open. If not, it will either throw an error (if the relevant API is synchronous) or asynchronously yield an error. For example:
```js
await db.close()
try {
db.iterator()
} catch (err) {
console.log(err.code) // LEVEL_DATABASE_NOT_OPEN
}
```
_Errors now have a `code` property. More on that below\._
This may be a breaking change downstream because it changes error messages for implementations that had their own safety checks (which will now be ineffective because `abstract-level` checks are performed first) or implicitly relied on `levelup` checks. By safety we mean mainly that yielding a JavaScript error is preferred over segmentation faults, though non-native implementations also benefit from detecting incorrect usage.
Implementations that have additional methods should add or align their own safety checks for consistency. Like so:
<details>
<summary>Click to expand</summary>
```js
const ModuleError = require('module-error')
class ExampleLevel extends AbstractLevel {
// For brevity this example does not implement promises or encodings
approximateSize (start, end, callback) {
if (this.status === 'opening') {
this.defer(() => this.approximateSize(start, end, callback))
} else if (this.status !== 'open') {
this.nextTick(callback, new ModuleError('Database is not open', {
code: 'LEVEL_DATABASE_NOT_OPEN'
}))
} else {
// ..
}
}
}
```
</details>
#### 1.7. New: chained batch length
The `AbstractChainedBatch` prototype has a new `length` property that, like a chained batch in `levelup`, returns the number of queued operations in the batch. Implementations should not have to make changes for this unless they monkey-patched public methods of `AbstractChainedBatch`.
### 2. API parity with `level`
It was previously necessary to use [`level`](https://github.com/Level/level) to get the "full experience". Or similar modules like [`level-mem`](https://github.com/Level/mem), [`level-rocksdb`](https://github.com/Level/level-rocksdb) and more. These modules combined an `abstract-leveldown` implementation with [`encoding-down`](https://github.com/Level/encoding-down) and [`levelup`](https://github.com/Level/levelup). Encodings are now built-in to `abstract-level`, using [`level-transcoder`](https://github.com/Level/transcoder) rather than [`level-codec`](https://github.com/Level/codec). The main change is that logic from the existing public API has been expanded down into the storage layer.
The `level` module still has a place, for its support of both Node.js and browsers and for being the main entrypoint into the Level ecosystem. The next major version of `level`, that's v8.0.0, will likely simply export [`classic-level`](https://github.com/Level/classic-level) in Node.js and [`browser-level`](https://github.com/Level/browser-level) in browsers. To differentiate, the text below will refer to the old version as `level@7`.
#### 2.1. For consumers
All relevant methods including the database constructor now accept `keyEncoding` and `valueEncoding` options, the same as `level@7`. Read operations now yield strings rather than buffers by default, having the same default `'utf8'` encoding as `level@7` and friends.
There are a few differences from `level@7` and `encoding-down`. Some breaking:
- The lesser-used `'ascii'`, `'ucs2'` and `'utf16le'` encodings are not supported
- The `'id'` encoding, which was not supported by any active `abstract-leveldown` implementation and aliased as `'none'`, has been removed
- The undocumented `encoding` option (as an alias for `valueEncoding`) is not supported.
And some non-breaking:
- The `'binary'` encoding has been renamed to `'buffer'`, with `'binary'` as an alias
- The `'utf8'` encoding previously did not touch Buffers. Now it will call `buffer.toString('utf8')` for consistency. Consumers can use the `'buffer'` encoding to avoid this conversion.
If you previously did one of the following (on a database that's defaulting to the `'utf8'` encoding):
```js
await db.put('a', Buffer.from('x'))
await db.put('a', Buffer.from('x'), { valueEncoding: 'binary' })
```
Both examples will still work (assuming the buffer contains only UTF8 data) but you should now do:
```js
await db.put('a', Buffer.from('x'), { valueEncoding: 'buffer' })
```
Or use the new `'view'` encoding which accepts Uint8Arrays (and therefore also Buffer):
```js
await db.put('a', new Uint8Array(...), { valueEncoding: 'view' })
```
#### 2.2. For implementors
_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._
Both the public and private API of `abstract-level` are encoding-aware. This means that private methods receive `keyEncoding` and `valueEncoding` options too, instead of the `keyAsBuffer`, `valueAsBuffer` and `asBuffer` options that `abstract-leveldown` had. Implementations don't need to perform encoding or decoding themselves. In fact they can do less: the `_serializeKey()` and `_serializeValue()` methods are also gone and implementations are less likely to have to convert between strings and buffers.
For example: a call like `db.put(key, { x: 2 }, { valueEncoding: 'json' })` will encode the `{ x: 2 }` value and might forward it to the private API as `db._put(key, '{"x":2}', { valueEncoding: 'utf8' }, callback)`. Same for the key, omitted for brevity. We say "might" because it depends on the implementation, which can now declare which encodings it supports.
To first give a concrete example for `get()`, if your implementation previously did:
```js
class ExampleLeveldown extends AbstractLevelDOWN {
_get (key, options, callback) {
if (options.asBuffer) {
this.nextTick(callback, null, Buffer.from('abc'))
} else {
this.nextTick(callback, null, 'abc')
}
}
}
```
You must now do (if still relevant):
```js
class ExampleLevel extends AbstractLevel {
_get (key, options, callback) {
if (options.valueEncoding === 'buffer') {
this.nextTick(callback, null, Buffer.from('abc'))
} else {
this.nextTick(callback, null, 'abc')
}
}
}
```
The encoding options and data received by the private API depend on which encodings it supports. It must declare those via the manifest passed to the `AbstractLevel` constructor. See the [`README`](README.md) for details. For example, an implementation might only support storing data as Uint8Arrays, known here as the `'view'` encoding:
```js
class ExampleLevel extends AbstractLevel {
constructor (location, options) {
super({ encodings: { view: true } }, options)
}
}
```
The earlier `put()` example would then result in `db._put(key, value, { valueEncoding: 'view' }, callback)` where `value` is a Uint8Array containing JSON in binary form. And the earlier `_get()` example can be simplified to:
```js
class ExampleLevel extends AbstractLevel {
_get (key, options, callback) {
// No need to check valueEncoding as it's always 'view'
this.nextTick(callback, null, new Uint8Array(...))
}
}
```
Implementations can also declare support of multiple encodings; keys and values will then be encoded via the most optimal path. For example:
```js
super({
encodings: {
view: true,
utf8: true
}
})
```
#### 2.3. Other notable changes
- The `AbstractIterator` constructor now requires an `options` argument, for encoding options
- The `AbstractIterator#_seek()` method got a new `options` argument, for a `keyEncoding` option
- The `db.supports.bufferKeys` property has been removed. Use `db.supports.encodings.buffer` instead.
### 3. Streams have moved
Node.js readable streams must now be created with a new standalone module called [`level-read-stream`](https://github.com/Level/read-stream), rather than database methods like `db.createReadStream()`. Please see its [upgrade guide](https://github.com/Level/read-stream/blob/main/UPGRADING.md#100) for details.
To offer an alternative to `db.createKeyStream()` and `db.createValueStream()`, two new types of iterators have been added: `db.keys()` and `db.values()`. Their default implementations are functional but implementors may want to override them for optimal performance. The same goes for two new methods on iterators: `nextv()` and `all()`. To achieve this and honor the `limit` option, abstract iterators now count how many items they yielded, which may remove the need for implementations to do so on their own. Please see the README for details.
### 4. Zero-length keys and range options are now valid
These keys sort before anything else. Historically they weren't supported for causing segmentation faults in `leveldown`. That doesn't apply to today's codebase. Implementations must now support:
```js
await db.put('', 'example')
console.log(await db.get('')) // 'example'
for await (const [key, value] of db.iterator({ lte: '' })) {
console.log(value) // 'example'
}
```
Same goes for zero-length Buffer and Uint8Array keys. Zero-length keys would previously result in an error and never reach the private API.
### 5. Resources are auto-closed
To further improve safety and consistency, additional changes were made that make an `abstract-level` database safer to use than `abstract-leveldown` wrapped with `levelup`.
#### 5.1. Closing iterators is idempotent
The `iterator.end()` method has been renamed to `iterator.close()`, with `end()` being an alias until a next major version in the future. The term "close" makes it easier to differentiate between the iterator having reached its natural end (data-wise) versus closing it to cleanup resources. If you previously did:
```js
const iterator = db.iterator()
iterator.end(callback)
```
You should now do one of:
```js
iterator.close(callback)
await iterator.close()
```
Likewise, in the private API for implementors, `_end()` has been renamed to `_close()` but without an alias. This method is no longer allowed to yield an error.
On `db.close()`, non-closed iterators are now automatically closed. This may be a breaking change but only if an implementation has (at its own risk) overridden the public `end()` method, because `close()` or `end()` is now an idempotent operation rather than yielding an `end() already called on iterator` error. If a `next()` call is in progress, closing the iterator (or database) will wait for that.
The error message `cannot call next() after end()` has been replaced with code `LEVEL_ITERATOR_NOT_OPEN`, the error `cannot call seek() after end()` has been removed in favor of a silent return, and `cannot call next() before previous next() has completed` and `cannot call seek() before next() has completed` have been replaced with code `LEVEL_ITERATOR_BUSY`.
The `next()` method no longer returns `this` (when a callback is provided).
#### 5.2. Chained batch can be closed
Chained batch has a new method `close()` which is an idempotent operation and automatically called after `write()` (for backwards compatibility) or on `db.close()`. This to ensure batches can't be used after closing and reopening a db. If a `write()` is in progress, closing will wait for that. If `write()` is never called then `close()` must be. For example:
```js
const batch = db.batch()
.put('abc', 'zyz')
.del('foo')
if (someCondition) {
await batch.write()
} else {
// Decided not to commit
await batch.close()
}
// In either case this will throw
batch.put('more', 'data')
```
These changes could be breaking for an implementation that has (at its own risk) overridden the public `write()` method. In addition, the error message `write() already called on this batch` has been replaced with code `LEVEL_BATCH_NOT_OPEN`.
An implementation can optionally override `AbstractChainedBatch#_close()` if it has resources to free and wishes to free them earlier than GC would.
### 6. Errors now use codes
The [`level-errors`](https://github.com/Level/errors) module as used by `levelup` and friends, is not used or exposed by `abstract-level`. Instead errors thrown or yielded from a database have a `code` property. See the [`README`](./README.md#errors) for details. Going forward, the semver contract will be on `code` and error messages will change without a semver-major bump.
To minimize breakage, the most used error as yielded by `get()` when an entry is not found, has the same properties that `level-errors` added (`notFound` and `status`) in addition to code `LEVEL_NOT_FOUND`. Those properties will be removed in a future version. Implementations can still yield an error that matches `/NotFound/i.test(err)` or they can start using the code. Either way `abstract-level` will normalize the error.
If you previously did:
```js
db.get('abc', function (err, value) {
if (err && err.notFound) {
// Handle missing entry
}
})
```
That will still work but it's preferred to do:
```js
db.get('abc', function (err, value) {
if (err && err.code === 'LEVEL_NOT_FOUND') {
// Handle missing entry
}
})
```
Or using promises:
```js
try {
const value = await db.get('abc')
} catch (err) {
if (err.code === 'LEVEL_NOT_FOUND') {
// Handle missing entry
}
}
```
### 7. Semi-private properties have been removed
The following properties and methods can no longer be accessed, as they've been removed or replaced with internal [symbols](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol):
- `AbstractIterator#_nexting`
- `AbstractIterator#_ended`
- `AbstractChainedBatch#_written`
- `AbstractChainedBatch#_checkWritten()`
- `AbstractChainedBatch#_operations`
- `AbstractLevel#_setupIteratorOptions()`
### 8. Changes to test suite
_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._
The abstract test suite of `abstract-level` has some breaking changes compared to `abstract-leveldown`:
- Options to skip tests have been removed in favor of `db.supports`
- Support of `db.clear()` and `db.getMany()` is now mandatory. The default (slow) implementation of `_clear()` has been removed.
- Added tests that `gte` and `lte` range options take precedence over `gt` and `lt` respectively. This is incompatible with [`ltgt`](https://github.com/dominictarr/ltgt) but aligns with `subleveldown`, [`level-option-wrap`](https://github.com/substack/level-option-wrap) and half of `leveldown`. There was no good choice.
- The `setUp` and `tearDown` functions have been removed from the test suite and `suite.common()`.
- Added ability to access manifests via `testCommon.supports`, by lazily copying it from `testCommon.factory().supports`. This requires that the manifest does not change during the lifetime of a `db`.
- Your `factory()` function must now accept an `options` argument.
Many tests were imported from `levelup`, `encoding-down`, `deferred-leveldown`, `memdown`, `level-js` and `leveldown`. They test the changes described above and improve coverage of existing behavior.
Lastly, it's recommended to revisit any custom tests of an implementation. In particular if those tests relied upon the previously loose state checking of `abstract-leveldown`. For example, making a `db.put()` call before `db.open()`. Such a test now has a different meaning. The previous meaning can typically be restored by inserting `db.once('open', ...)` or `await db.open()` logic.
### 9. Sublevels are builtin
_This section is only relevant if you use [`subleveldown`](https://github.com/Level/subleveldown) (which can not wrap an `abstract-level` database)._
Sublevels are now builtin. If you previously did:
```js
const sub = require('subleveldown')
const example1 = sub(db, 'example1')
const example2 = sub(db, 'example2', { valueEncoding: 'json' })
```
You must now do:
```js
const example1 = db.sublevel('example1')
const example2 = db.sublevel('example2', { valueEncoding: 'json' })
```
The key structure is equal to that of `subleveldown`. This means that an `abstract-level` sublevel can read sublevels previously created with (and populated by) `subleveldown`. There are some new features:
- `db.batch(..)` takes a `sublevel` option on operations, to atomically commit data to multiple sublevels
- Sublevels support Uint8Array in addition to Buffer
- `AbstractLevel#_sublevel()` can be overridden to add additional methods to sublevels.
To reduce function overloads, the prefix argument (`example1` above) is now required and it's called `name` here. If you previously did one of the following, resulting in an empty name:
```js
subleveldown(db)
subleveldown(db, { separator: '@' })
```
You must now use an explicit empty name:
```js
db.sublevel('')
db.sublevel('', { separator: '@' })
```
The string shorthand for `{ separator }` has also been removed. If you previously did:
```js
subleveldown(db, 'example', '@')
```
You must now do:
```js
db.sublevel('example', { separator: '@' })
```
Third, the `open` option has been removed. If you need an asynchronous open hook, feel free to open an issue to discuss restoring this API. Should it support promises? Should `abstract-level` support it on any database and not just sublevels?
Lastly, the error message `Parent database is not open` (courtesy of `subleveldown` which had to check open state to prevent segmentation faults from underlying databases) changed to error code [`LEVEL_DATABASE_NOT_OPEN`](https://github.com/Level/abstract-level#errors) (courtesy of `abstract-level` which does those checks on any database).
---
_For earlier releases, before `abstract-level` was forked from `abstract-leveldown` (v7.2.0), please see [the upgrade guide of `abstract-leveldown`](https://github.com/Level/abstract-leveldown/blob/master/UPGRADING.md)._

397
abstract-chained-batch.js Normal file
View File

@ -0,0 +1,397 @@
'use strict'
const combineErrors = require('maybe-combine-errors')
const ModuleError = require('module-error')
const { getOptions, emptyOptions, noop } = require('./lib/common')
const { prefixDescendantKey, isDescendant } = require('./lib/prefixes')
const { PrewriteBatch } = require('./lib/prewrite-batch')
const kPublicOperations = Symbol('publicOperations')
const kPrivateOperations = Symbol('privateOperations')
class AbstractChainedBatch {
#status = 'open'
#length = 0
#closePromise = null
#publicOperations
#prewriteRun
#prewriteBatch
#prewriteData
#addMode
constructor (db, options) {
if (typeof db !== 'object' || db === null) {
const hint = db === null ? 'null' : typeof db
throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`)
}
const enableWriteEvent = db.listenerCount('write') > 0
const enablePrewriteHook = !db.hooks.prewrite.noop
// Operations for write event. We can skip populating this array (and cloning of
// operations, which is the expensive part) if there are 0 write event listeners.
this.#publicOperations = enableWriteEvent ? [] : null
this.#addMode = getOptions(options, emptyOptions).add === true
if (enablePrewriteHook) {
// Use separate arrays to collect operations added by hook functions, because
// we wait to apply those until write(). Store these arrays in PrewriteData which
// exists to separate internal data from the public PrewriteBatch interface.
const data = new PrewriteData([], enableWriteEvent ? [] : null)
this.#prewriteData = data
this.#prewriteBatch = new PrewriteBatch(db, data[kPrivateOperations], data[kPublicOperations])
this.#prewriteRun = db.hooks.prewrite.run // TODO: document why
} else {
this.#prewriteData = null
this.#prewriteBatch = null
this.#prewriteRun = null
}
this.db = db
this.db.attachResource(this)
}
get length () {
if (this.#prewriteData !== null) {
return this.#length + this.#prewriteData.length
} else {
return this.#length
}
}
put (key, value, options) {
this.#assertStatus()
options = getOptions(options, emptyOptions)
const delegated = options.sublevel != null
const db = delegated ? options.sublevel : this.db
db._assertValidKey(key)
db._assertValidValue(value)
const op = {
...options,
type: 'put',
key,
value,
keyEncoding: db.keyEncoding(options.keyEncoding),
valueEncoding: db.valueEncoding(options.valueEncoding)
}
if (this.#prewriteRun !== null) {
try {
// Note: we could have chosen to recurse here so that prewriteBatch.put() would
// call this.put(). But then operations added by hook functions would be inserted
// before rather than after user operations. Instead we process those operations
// lazily in write(). This does hurt the only performance benefit benefit of a
// chained batch though, which is that it avoids blocking the event loop with
// more than one operation at a time. On the other hand, if operations added by
// hook functions are adjacent (i.e. sorted) committing them should be faster.
this.#prewriteRun(op, this.#prewriteBatch)
// Normalize encodings again in case they were modified
op.keyEncoding = db.keyEncoding(op.keyEncoding)
op.valueEncoding = db.valueEncoding(op.valueEncoding)
} catch (err) {
throw new ModuleError('The prewrite hook failed on batch.put()', {
code: 'LEVEL_HOOK_ERROR',
cause: err
})
}
}
// Encode data for private API
const keyEncoding = op.keyEncoding
const preencodedKey = keyEncoding.encode(op.key)
const keyFormat = keyEncoding.format
// If the sublevel is not a descendant then forward that option to the parent db
// so that we don't erroneously add our own prefix to the key of the operation.
const siblings = delegated && !isDescendant(op.sublevel, this.db) && op.sublevel !== this.db
const encodedKey = delegated && !siblings
? prefixDescendantKey(preencodedKey, keyFormat, db, this.db)
: preencodedKey
const valueEncoding = op.valueEncoding
const encodedValue = valueEncoding.encode(op.value)
const valueFormat = valueEncoding.format
// Only prefix once
if (delegated && !siblings) {
op.sublevel = null
}
// If the sublevel is not a descendant then we shouldn't emit events
if (this.#publicOperations !== null && !siblings) {
// Clone op before we mutate it for the private API
const publicOperation = { ...op }
publicOperation.encodedKey = encodedKey
publicOperation.encodedValue = encodedValue
if (delegated) {
// Ensure emitted data makes sense in the context of this db
publicOperation.key = encodedKey
publicOperation.value = encodedValue
publicOperation.keyEncoding = this.db.keyEncoding(keyFormat)
publicOperation.valueEncoding = this.db.valueEncoding(valueFormat)
}
this.#publicOperations.push(publicOperation)
}
// If we're forwarding the sublevel option then don't prefix the key yet
op.key = siblings ? encodedKey : this.db.prefixKey(encodedKey, keyFormat, true)
op.value = encodedValue
op.keyEncoding = keyFormat
op.valueEncoding = valueFormat
if (this.#addMode) {
this._add(op)
} else {
// This "operation as options" trick avoids further cloning
this._put(op.key, encodedValue, op)
}
// Increment only on success
this.#length++
return this
}
_put (key, value, options) {}
del (key, options) {
this.#assertStatus()
options = getOptions(options, emptyOptions)
const delegated = options.sublevel != null
const db = delegated ? options.sublevel : this.db
db._assertValidKey(key)
const op = {
...options,
type: 'del',
key,
keyEncoding: db.keyEncoding(options.keyEncoding)
}
if (this.#prewriteRun !== null) {
try {
this.#prewriteRun(op, this.#prewriteBatch)
// Normalize encoding again in case it was modified
op.keyEncoding = db.keyEncoding(op.keyEncoding)
} catch (err) {
throw new ModuleError('The prewrite hook failed on batch.del()', {
code: 'LEVEL_HOOK_ERROR',
cause: err
})
}
}
// Encode data for private API
const keyEncoding = op.keyEncoding
const preencodedKey = keyEncoding.encode(op.key)
const keyFormat = keyEncoding.format
const encodedKey = delegated ? prefixDescendantKey(preencodedKey, keyFormat, db, this.db) : preencodedKey
// Prevent double prefixing
if (delegated) op.sublevel = null
if (this.#publicOperations !== null) {
// Clone op before we mutate it for the private API
const publicOperation = { ...op }
publicOperation.encodedKey = encodedKey
if (delegated) {
// Ensure emitted data makes sense in the context of this db
publicOperation.key = encodedKey
publicOperation.keyEncoding = this.db.keyEncoding(keyFormat)
}
this.#publicOperations.push(publicOperation)
}
op.key = this.db.prefixKey(encodedKey, keyFormat, true)
op.keyEncoding = keyFormat
if (this.#addMode) {
this._add(op)
} else {
// This "operation as options" trick avoids further cloning
this._del(op.key, op)
}
// Increment only on success
this.#length++
return this
}
_del (key, options) {}
_add (op) {}
clear () {
this.#assertStatus()
this._clear()
if (this.#publicOperations !== null) this.#publicOperations = []
if (this.#prewriteData !== null) this.#prewriteData.clear()
this.#length = 0
return this
}
_clear () {}
async write (options) {
this.#assertStatus()
options = getOptions(options)
if (this.#length === 0) {
return this.close()
} else {
this.#status = 'writing'
// Prepare promise in case close() is called in the mean time
const close = this.#prepareClose()
try {
// Process operations added by prewrite hook functions
if (this.#prewriteData !== null) {
const publicOperations = this.#prewriteData[kPublicOperations]
const privateOperations = this.#prewriteData[kPrivateOperations]
const length = this.#prewriteData.length
for (let i = 0; i < length; i++) {
const op = privateOperations[i]
// We can _add(), _put() or _del() even though status is now 'writing' because
// status isn't exposed to the private API, so there's no difference in state
// from that perspective, unless an implementation overrides the public write()
// method at its own risk.
if (this.#addMode) {
this._add(op)
} else if (op.type === 'put') {
this._put(op.key, op.value, op)
} else {
this._del(op.key, op)
}
}
if (publicOperations !== null && length !== 0) {
this.#publicOperations = this.#publicOperations.concat(publicOperations)
}
}
await this._write(options)
} catch (err) {
close()
try {
await this.#closePromise
} catch (closeErr) {
// eslint-disable-next-line no-ex-assign
err = combineErrors([err, closeErr])
}
throw err
}
close()
// Emit after initiating the closing, because event may trigger a
// db close which in turn triggers (idempotently) closing this batch.
if (this.#publicOperations !== null) {
this.db.emit('write', this.#publicOperations)
}
return this.#closePromise
}
}
async _write (options) {}
async close () {
if (this.#closePromise !== null) {
// First caller of close() or write() is responsible for error
return this.#closePromise.catch(noop)
} else {
// Wrap promise to avoid race issues on recursive calls
this.#prepareClose()()
return this.#closePromise
}
}
async _close () {}
#assertStatus () {
if (this.#status !== 'open') {
throw new ModuleError('Batch is not open: cannot change operations after write() or close()', {
code: 'LEVEL_BATCH_NOT_OPEN'
})
}
// Can technically be removed, because it's no longer possible to call db.batch() when
// status is not 'open', and db.close() closes the batch. Keep for now, in case of
// unforseen userland behaviors.
if (this.db.status !== 'open') {
/* istanbul ignore next */
throw new ModuleError('Database is not open', {
code: 'LEVEL_DATABASE_NOT_OPEN'
})
}
}
#prepareClose () {
let close
this.#closePromise = new Promise((resolve, reject) => {
close = () => {
this.#privateClose().then(resolve, reject)
}
})
return close
}
async #privateClose () {
// TODO: should we not set status earlier?
this.#status = 'closing'
await this._close()
this.db.detachResource(this)
}
}
if (typeof Symbol.asyncDispose === 'symbol') {
AbstractChainedBatch.prototype[Symbol.asyncDispose] = async function () {
return this.close()
}
}
class PrewriteData {
constructor (privateOperations, publicOperations) {
this[kPrivateOperations] = privateOperations
this[kPublicOperations] = publicOperations
}
get length () {
return this[kPrivateOperations].length
}
clear () {
// Clear operation arrays if present.
for (const k of [kPublicOperations, kPrivateOperations]) {
const ops = this[k]
if (ops !== null) {
// Keep array alive because PrewriteBatch has a reference to it
ops.splice(0, ops.length)
}
}
}
}
exports.AbstractChainedBatch = AbstractChainedBatch

404
abstract-iterator.js Normal file
View File

@ -0,0 +1,404 @@
'use strict'
const ModuleError = require('module-error')
const combineErrors = require('maybe-combine-errors')
const { getOptions, emptyOptions, noop } = require('./lib/common')
const { AbortError } = require('./lib/errors')
const kDecodeOne = Symbol('decodeOne')
const kDecodeMany = Symbol('decodeMany')
const kKeyEncoding = Symbol('keyEncoding')
const kValueEncoding = Symbol('valueEncoding')
// This class is an internal utility for common functionality between AbstractIterator,
// AbstractKeyIterator and AbstractValueIterator. It's not exported.
class CommonIterator {
#working = false
#pendingClose = null
#closingPromise = null
#count = 0
#signal
#limit
#ended
#snapshot
constructor (db, options) {
if (typeof db !== 'object' || db === null) {
const hint = db === null ? 'null' : typeof db
throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`)
}
if (typeof options !== 'object' || options === null) {
throw new TypeError('The second argument must be an options object')
}
this[kKeyEncoding] = options[kKeyEncoding]
this[kValueEncoding] = options[kValueEncoding]
this.#limit = Number.isInteger(options.limit) && options.limit >= 0 ? options.limit : Infinity
this.#signal = options.signal != null ? options.signal : null
this.#snapshot = options.snapshot != null ? options.snapshot : null
// Ending means reaching the natural end of the data and (unlike closing) that can
// be reset by seek(), unless the limit was reached.
this.#ended = false
this.db = db
this.db.attachResource(this)
}
get count () {
return this.#count
}
get limit () {
return this.#limit
}
async next () {
this.#startWork()
try {
if (this.#ended || this.#count >= this.#limit) {
this.#ended = true
return undefined
}
let item = await this._next()
if (item === undefined) {
this.#ended = true
return undefined
}
try {
item = this[kDecodeOne](item)
} catch (err) {
throw new IteratorDecodeError(err)
}
this.#count++
return item
} finally {
this.#endWork()
}
}
async _next () {}
async nextv (size, options) {
if (!Number.isInteger(size)) {
throw new TypeError("The first argument 'size' must be an integer")
}
options = getOptions(options, emptyOptions)
if (size < 1) size = 1
if (this.#limit < Infinity) size = Math.min(size, this.#limit - this.#count)
this.#startWork()
try {
if (this.#ended || size <= 0) {
this.#ended = true
return []
}
const items = await this._nextv(size, options)
if (items.length === 0) {
this.#ended = true
return items
}
try {
this[kDecodeMany](items)
} catch (err) {
throw new IteratorDecodeError(err)
}
this.#count += items.length
return items
} finally {
this.#endWork()
}
}
async _nextv (size, options) {
const acc = []
while (acc.length < size) {
const item = await this._next(options)
if (item !== undefined) {
acc.push(item)
} else {
// Must track this here because we're directly calling _next()
this.#ended = true
break
}
}
return acc
}
async all (options) {
options = getOptions(options, emptyOptions)
this.#startWork()
try {
if (this.#ended || this.#count >= this.#limit) {
return []
}
const items = await this._all(options)
try {
this[kDecodeMany](items)
} catch (err) {
throw new IteratorDecodeError(err)
}
this.#count += items.length
return items
} catch (err) {
this.#endWork()
await this.#destroy(err)
} finally {
this.#ended = true
if (this.#working) {
this.#endWork()
await this.close()
}
}
}
async _all (options) {
// Must count here because we're directly calling _nextv()
let count = this.#count
const acc = []
while (true) {
// Not configurable, because implementations should optimize _all().
const size = this.#limit < Infinity ? Math.min(1e3, this.#limit - count) : 1e3
if (size <= 0) {
return acc
}
const items = await this._nextv(size, options)
if (items.length === 0) {
return acc
}
acc.push.apply(acc, items)
count += items.length
}
}
seek (target, options) {
options = getOptions(options, emptyOptions)
if (this.#closingPromise !== null) {
// Don't throw here, to be kind to implementations that wrap
// another db and don't necessarily control when the db is closed
} else if (this.#working) {
throw new ModuleError('Iterator is busy: cannot call seek() until next() has completed', {
code: 'LEVEL_ITERATOR_BUSY'
})
} else {
const keyEncoding = this.db.keyEncoding(options.keyEncoding || this[kKeyEncoding])
const keyFormat = keyEncoding.format
if (options.keyEncoding !== keyFormat) {
options = { ...options, keyEncoding: keyFormat }
}
const mapped = this.db.prefixKey(keyEncoding.encode(target), keyFormat, false)
this._seek(mapped, options)
// If _seek() was successfull, more data may be available.
this.#ended = false
}
}
_seek (target, options) {
throw new ModuleError('Iterator does not implement seek()', {
code: 'LEVEL_NOT_SUPPORTED'
})
}
async close () {
if (this.#closingPromise !== null) {
// First caller of close() is responsible for error
return this.#closingPromise.catch(noop)
}
// Wrap to avoid race issues on recursive calls
this.#closingPromise = new Promise((resolve, reject) => {
this.#pendingClose = () => {
this.#pendingClose = null
this.#privateClose().then(resolve, reject)
}
})
// If working we'll delay closing, but still handle the close error (if any) here
if (!this.#working) {
this.#pendingClose()
}
return this.#closingPromise
}
async _close () {}
async * [Symbol.asyncIterator] () {
try {
let item
while ((item = (await this.next())) !== undefined) {
yield item
}
} catch (err) {
await this.#destroy(err)
} finally {
await this.close()
}
}
#startWork () {
if (this.#closingPromise !== null) {
throw new ModuleError('Iterator is not open: cannot read after close()', {
code: 'LEVEL_ITERATOR_NOT_OPEN'
})
} else if (this.#working) {
throw new ModuleError('Iterator is busy: cannot read until previous read has completed', {
code: 'LEVEL_ITERATOR_BUSY'
})
} else if (this.#signal?.aborted) {
throw new AbortError()
}
// Keep snapshot open during operation
this.#snapshot?.ref()
this.#working = true
}
#endWork () {
this.#working = false
this.#pendingClose?.()
this.#snapshot?.unref()
}
async #privateClose () {
await this._close()
this.db.detachResource(this)
}
async #destroy (err) {
try {
await this.close()
} catch (closeErr) {
throw combineErrors([err, closeErr])
}
throw err
}
}
if (typeof Symbol.asyncDispose === 'symbol') {
CommonIterator.prototype[Symbol.asyncDispose] = async function () {
return this.close()
}
}
// For backwards compatibility this class is not (yet) called AbstractEntryIterator.
class AbstractIterator extends CommonIterator {
#keys
#values
constructor (db, options) {
super(db, options)
this.#keys = options.keys !== false
this.#values = options.values !== false
}
[kDecodeOne] (entry) {
const key = entry[0]
const value = entry[1]
if (key !== undefined) {
entry[0] = this.#keys ? this[kKeyEncoding].decode(key) : undefined
}
if (value !== undefined) {
entry[1] = this.#values ? this[kValueEncoding].decode(value) : undefined
}
return entry
}
[kDecodeMany] (entries) {
const keyEncoding = this[kKeyEncoding]
const valueEncoding = this[kValueEncoding]
for (const entry of entries) {
const key = entry[0]
const value = entry[1]
if (key !== undefined) entry[0] = this.#keys ? keyEncoding.decode(key) : undefined
if (value !== undefined) entry[1] = this.#values ? valueEncoding.decode(value) : undefined
}
}
}
class AbstractKeyIterator extends CommonIterator {
[kDecodeOne] (key) {
return this[kKeyEncoding].decode(key)
}
[kDecodeMany] (keys) {
const keyEncoding = this[kKeyEncoding]
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (key !== undefined) keys[i] = keyEncoding.decode(key)
}
}
}
class AbstractValueIterator extends CommonIterator {
[kDecodeOne] (value) {
return this[kValueEncoding].decode(value)
}
[kDecodeMany] (values) {
const valueEncoding = this[kValueEncoding]
for (let i = 0; i < values.length; i++) {
const value = values[i]
if (value !== undefined) values[i] = valueEncoding.decode(value)
}
}
}
// Internal utility, not typed or exported
class IteratorDecodeError extends ModuleError {
constructor (cause) {
super('Iterator could not decode data', {
code: 'LEVEL_DECODE_ERROR',
cause
})
}
}
// Exposed so that AbstractLevel can set these options
AbstractIterator.keyEncoding = kKeyEncoding
AbstractIterator.valueEncoding = kValueEncoding
exports.AbstractIterator = AbstractIterator
exports.AbstractKeyIterator = AbstractKeyIterator
exports.AbstractValueIterator = AbstractValueIterator

1060
abstract-level.js Normal file

File diff suppressed because it is too large Load Diff

84
abstract-snapshot.js Normal file
View File

@ -0,0 +1,84 @@
'use strict'
const ModuleError = require('module-error')
const { noop } = require('./lib/common')
class AbstractSnapshot {
#open = true
#referenceCount = 0
#pendingClose = null
#closePromise = null
#owner
constructor (options) {
// Defining this as an option gives sublevels the opportunity to create a snapshot
// via their parent database but still designate themselves as the "owner", which
// just means which database will close the snapshot upon db.close(). This ensures
// that the API of AbstractSublevel is symmetrical to AbstractLevel.
const owner = options.owner
if (typeof owner !== 'object' || owner === null) {
const hint = owner === null ? 'null' : typeof owner
throw new TypeError(`Owner must be an abstract-level database, received ${hint}`)
}
// Also ensures this db will not be garbage collected
this.#owner = owner
this.#owner.attachResource(this)
}
ref () {
if (!this.#open) {
throw new ModuleError('Snapshot is not open: cannot use snapshot after close()', {
code: 'LEVEL_SNAPSHOT_NOT_OPEN'
})
}
this.#referenceCount++
}
unref () {
if (--this.#referenceCount === 0) {
this.#pendingClose?.()
}
}
async close () {
if (this.#closePromise !== null) {
// First caller of close() is responsible for error
return this.#closePromise.catch(noop)
}
this.#open = false
// Wrap to avoid race issues on recursive calls
this.#closePromise = new Promise((resolve, reject) => {
this.#pendingClose = () => {
this.#pendingClose = null
privateClose(this, this.#owner).then(resolve, reject)
}
})
// If working we'll delay closing, but still handle the close error (if any) here
if (this.#referenceCount === 0) {
this.#pendingClose()
}
return this.#closePromise
}
async _close () {}
}
if (typeof Symbol.asyncDispose === 'symbol') {
AbstractSnapshot.prototype[Symbol.asyncDispose] = async function () {
return this.close()
}
}
const privateClose = async function (snapshot, owner) {
await snapshot._close()
owner.detachResource(snapshot)
}
exports.AbstractSnapshot = AbstractSnapshot

52
index.d.ts vendored Normal file
View File

@ -0,0 +1,52 @@
export {
AbstractLevel,
AbstractDatabaseOptions,
AbstractOpenOptions,
AbstractGetOptions,
AbstractGetManyOptions,
AbstractHasOptions,
AbstractHasManyOptions,
AbstractPutOptions,
AbstractDelOptions,
AbstractBatchOptions,
AbstractBatchOperation,
AbstractBatchPutOperation,
AbstractBatchDelOperation,
AbstractClearOptions,
AbstractDatabaseHooks,
AbstractHook,
AbstractDeferOptions
} from './types/abstract-level'
export {
AbstractIterator,
AbstractIteratorOptions,
AbstractSeekOptions,
AbstractKeyIterator,
AbstractKeyIteratorOptions,
AbstractValueIterator,
AbstractValueIteratorOptions
} from './types/abstract-iterator'
export {
AbstractChainedBatch,
AbstractChainedBatchPutOptions,
AbstractChainedBatchDelOptions,
AbstractChainedBatchWriteOptions
} from './types/abstract-chained-batch'
export {
AbstractSublevel,
AbstractSublevelOptions
} from './types/abstract-sublevel'
export {
AbstractSnapshot
} from './types/abstract-snapshot'
export {
AbstractReadOptions,
AbstractResource
} from './types/interfaces'
export * as Transcoder from 'level-transcoder'

9
index.js Normal file
View File

@ -0,0 +1,9 @@
'use strict'
exports.AbstractLevel = require('./abstract-level').AbstractLevel
exports.AbstractSublevel = require('./abstract-level').AbstractSublevel
exports.AbstractIterator = require('./abstract-iterator').AbstractIterator
exports.AbstractKeyIterator = require('./abstract-iterator').AbstractKeyIterator
exports.AbstractValueIterator = require('./abstract-iterator').AbstractValueIterator
exports.AbstractChainedBatch = require('./abstract-chained-batch').AbstractChainedBatch
exports.AbstractSnapshot = require('./abstract-snapshot').AbstractSnapshot

View File

@ -0,0 +1,141 @@
'use strict'
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
// TODO: unfix natively if db supports it
class AbstractSublevelIterator extends AbstractIterator {
#iterator
#unfix
constructor (db, options, iterator, unfix) {
super(db, options)
this.#iterator = iterator
this.#unfix = unfix
}
async _next () {
const entry = await this.#iterator.next()
if (entry !== undefined) {
const key = entry[0]
if (key !== undefined) entry[0] = this.#unfix(key)
}
return entry
}
async _nextv (size, options) {
const entries = await this.#iterator.nextv(size, options)
const unfix = this.#unfix
for (const entry of entries) {
const key = entry[0]
if (key !== undefined) entry[0] = unfix(key)
}
return entries
}
async _all (options) {
const entries = await this.#iterator.all(options)
const unfix = this.#unfix
for (const entry of entries) {
const key = entry[0]
if (key !== undefined) entry[0] = unfix(key)
}
return entries
}
_seek (target, options) {
this.#iterator.seek(target, options)
}
async _close () {
return this.#iterator.close()
}
}
class AbstractSublevelKeyIterator extends AbstractKeyIterator {
#iterator
#unfix
constructor (db, options, iterator, unfix) {
super(db, options)
this.#iterator = iterator
this.#unfix = unfix
}
async _next () {
const key = await this.#iterator.next()
return key === undefined ? key : this.#unfix(key)
}
async _nextv (size, options) {
const keys = await this.#iterator.nextv(size, options)
const unfix = this.#unfix
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (key !== undefined) keys[i] = unfix(key)
}
return keys
}
async _all (options) {
const keys = await this.#iterator.all(options)
const unfix = this.#unfix
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (key !== undefined) keys[i] = unfix(key)
}
return keys
}
_seek (target, options) {
this.#iterator.seek(target, options)
}
async _close () {
return this.#iterator.close()
}
}
class AbstractSublevelValueIterator extends AbstractValueIterator {
#iterator
constructor (db, options, iterator) {
super(db, options)
this.#iterator = iterator
}
async _next () {
return this.#iterator.next()
}
async _nextv (size, options) {
return this.#iterator.nextv(size, options)
}
async _all (options) {
return this.#iterator.all(options)
}
_seek (target, options) {
this.#iterator.seek(target, options)
}
async _close () {
return this.#iterator.close()
}
}
exports.AbstractSublevelIterator = AbstractSublevelIterator
exports.AbstractSublevelKeyIterator = AbstractSublevelKeyIterator
exports.AbstractSublevelValueIterator = AbstractSublevelValueIterator

286
lib/abstract-sublevel.js Normal file
View File

@ -0,0 +1,286 @@
'use strict'
const ModuleError = require('module-error')
const { Buffer } = require('buffer') || {}
const {
AbstractSublevelIterator,
AbstractSublevelKeyIterator,
AbstractSublevelValueIterator
} = require('./abstract-sublevel-iterator')
const kRoot = Symbol('root')
const textEncoder = new TextEncoder()
const defaults = { separator: '!' }
// Wrapped to avoid circular dependency
module.exports = function ({ AbstractLevel }) {
class AbstractSublevel extends AbstractLevel {
#globalPrefix
#localPrefix
#localPath
#globalPath
#globalUpperBound
#parent
#unfix
static defaults (options) {
if (options == null) {
return defaults
} else if (!options.separator) {
return { ...options, separator: '!' }
} else {
return options
}
}
constructor (db, name, options) {
// Don't forward AbstractSublevel options to AbstractLevel
const { separator, manifest, ...forward } = AbstractSublevel.defaults(options)
const names = [].concat(name).map(name => trim(name, separator))
// Reserve one character between separator and name to give us an upper bound
const reserved = separator.charCodeAt(0) + 1
const root = db[kRoot] || db
// Keys should sort like ['!a!', '!a!!a!', '!a"', '!aa!', '!b!'].
// Use ASCII for consistent length between string, Buffer and Uint8Array
if (!names.every(name => textEncoder.encode(name).every(x => x > reserved && x < 127))) {
throw new ModuleError(`Sublevel name must use bytes > ${reserved} < ${127}`, {
code: 'LEVEL_INVALID_PREFIX'
})
}
super(mergeManifests(db, manifest), forward)
const localPrefix = names.map(name => separator + name + separator).join('')
const globalPrefix = (db.prefix || '') + localPrefix
const globalUpperBound = globalPrefix.slice(0, -1) + String.fromCharCode(reserved)
// Most operations are forwarded to the parent database, but clear() and iterators
// still forward to the root database - which is older logic and does not yet need
// to change, until we add some form of preread or postread hooks.
this[kRoot] = root
this.#parent = db
this.#localPath = names
this.#globalPath = db.prefix ? db.path().concat(names) : names
this.#globalPrefix = new MultiFormat(globalPrefix)
this.#globalUpperBound = new MultiFormat(globalUpperBound)
this.#localPrefix = new MultiFormat(localPrefix)
this.#unfix = new Unfixer()
}
prefixKey (key, keyFormat, local) {
const prefix = local ? this.#localPrefix : this.#globalPrefix
if (keyFormat === 'utf8') {
return prefix.utf8 + key
} else if (key.byteLength === 0) {
// Fast path for empty key (no copy)
return prefix[keyFormat]
} else if (keyFormat === 'view') {
const view = prefix.view
const result = new Uint8Array(view.byteLength + key.byteLength)
result.set(view, 0)
result.set(key, view.byteLength)
return result
} else {
const buffer = prefix.buffer
return Buffer.concat([buffer, key], buffer.byteLength + key.byteLength)
}
}
// Not exposed for now.
#prefixRange (range, keyFormat) {
if (range.gte !== undefined) {
range.gte = this.prefixKey(range.gte, keyFormat, false)
} else if (range.gt !== undefined) {
range.gt = this.prefixKey(range.gt, keyFormat, false)
} else {
range.gte = this.#globalPrefix[keyFormat]
}
if (range.lte !== undefined) {
range.lte = this.prefixKey(range.lte, keyFormat, false)
} else if (range.lt !== undefined) {
range.lt = this.prefixKey(range.lt, keyFormat, false)
} else {
range.lte = this.#globalUpperBound[keyFormat]
}
}
get prefix () {
return this.#globalPrefix.utf8
}
get db () {
return this[kRoot]
}
get parent () {
return this.#parent
}
path (local = false) {
return local ? this.#localPath : this.#globalPath
}
async _open (options) {
// The parent db must open itself or be (re)opened by the user because
// a sublevel should not initiate state changes on the rest of the db.
await this.#parent.open({ passive: true })
// Close sublevel when parent is closed
this.#parent.attachResource(this)
}
async _close () {
this.#parent.detachResource(this)
}
async _put (key, value, options) {
return this.#parent.put(key, value, options)
}
async _get (key, options) {
return this.#parent.get(key, options)
}
async _getMany (keys, options) {
return this.#parent.getMany(keys, options)
}
async _has (key, options) {
return this.#parent.has(key, options)
}
async _hasMany (keys, options) {
return this.#parent.hasMany(keys, options)
}
async _del (key, options) {
return this.#parent.del(key, options)
}
async _batch (operations, options) {
return this.#parent.batch(operations, options)
}
// TODO: call parent instead of root
async _clear (options) {
// TODO (refactor): move to AbstractLevel
this.#prefixRange(options, options.keyEncoding)
return this[kRoot].clear(options)
}
// TODO: call parent instead of root
_iterator (options) {
// TODO (refactor): move to AbstractLevel
this.#prefixRange(options, options.keyEncoding)
const iterator = this[kRoot].iterator(options)
const unfix = this.#unfix.get(this.#globalPrefix.utf8.length, options.keyEncoding)
return new AbstractSublevelIterator(this, options, iterator, unfix)
}
_keys (options) {
this.#prefixRange(options, options.keyEncoding)
const iterator = this[kRoot].keys(options)
const unfix = this.#unfix.get(this.#globalPrefix.utf8.length, options.keyEncoding)
return new AbstractSublevelKeyIterator(this, options, iterator, unfix)
}
_values (options) {
this.#prefixRange(options, options.keyEncoding)
const iterator = this[kRoot].values(options)
return new AbstractSublevelValueIterator(this, options, iterator)
}
_snapshot (options) {
return this[kRoot].snapshot(options)
}
}
return { AbstractSublevel }
}
const mergeManifests = function (parent, manifest) {
return {
// Inherit manifest of parent db
...parent.supports,
// Disable unsupported features
createIfMissing: false,
errorIfExists: false,
// Unset additional events because we're not forwarding them
events: {},
// Unset additional methods (like approximateSize) which we can't support here unless
// the AbstractSublevel class is overridden by an implementation of `abstract-level`.
additionalMethods: {},
// Inherit manifest of custom AbstractSublevel subclass. Such a class is not
// allowed to override encodings.
...manifest,
encodings: {
utf8: supportsEncoding(parent, 'utf8'),
buffer: supportsEncoding(parent, 'buffer'),
view: supportsEncoding(parent, 'view')
}
}
}
const supportsEncoding = function (parent, encoding) {
// Prefer a non-transcoded encoding for optimal performance
return parent.supports.encodings[encoding]
? parent.keyEncoding(encoding).name === encoding
: false
}
class MultiFormat {
constructor (key) {
this.utf8 = key
this.view = textEncoder.encode(key)
this.buffer = Buffer ? Buffer.from(this.view.buffer, 0, this.view.byteLength) : {}
}
}
class Unfixer {
constructor () {
this.cache = new Map()
}
get (prefixLength, keyFormat) {
let unfix = this.cache.get(keyFormat)
if (unfix === undefined) {
if (keyFormat === 'view') {
unfix = function (prefixLength, key) {
// Avoid Uint8Array#slice() because it copies
return key.subarray(prefixLength)
}.bind(null, prefixLength)
} else {
unfix = function (prefixLength, key) {
// Avoid Buffer#subarray() because it's slow
return key.slice(prefixLength)
}.bind(null, prefixLength)
}
this.cache.set(keyFormat, unfix)
}
return unfix
}
}
const trim = function (str, char) {
let start = 0
let end = str.length
while (start < end && str[start] === char) start++
while (end > start && str[end - 1] === char) end--
return str.slice(start, end)
}

33
lib/common.js Normal file
View File

@ -0,0 +1,33 @@
'use strict'
const ModuleError = require('module-error')
const deprecations = new Set()
exports.getOptions = function (options, def) {
if (typeof options === 'object' && options !== null) {
return options
}
if (def !== undefined) {
return def
}
return {}
}
exports.emptyOptions = Object.freeze({})
exports.noop = function () {}
exports.resolvedPromise = Promise.resolve()
exports.deprecate = function (message) {
if (!deprecations.has(message)) {
deprecations.add(message)
// Avoid polyfills
const c = globalThis.console
if (typeof c !== 'undefined' && typeof c.warn === 'function') {
c.warn(new ModuleError(message, { code: 'LEVEL_LEGACY' }))
}
}
}

View File

@ -0,0 +1,29 @@
'use strict'
const { AbstractChainedBatch } = require('../abstract-chained-batch')
// Functional default for chained batch
class DefaultChainedBatch extends AbstractChainedBatch {
#encoded = []
constructor (db) {
// Opt-in to _add() instead of _put() and _del()
super(db, { add: true })
}
_add (op) {
this.#encoded.push(op)
}
_clear () {
this.#encoded = []
}
async _write (options) {
// Need to call the private rather than public method, to prevent
// recursion, double prefixing, double encoding and double hooks.
return this.db._batch(this.#encoded, options)
}
}
exports.DefaultChainedBatch = DefaultChainedBatch

View File

@ -0,0 +1,74 @@
'use strict'
const { AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
const kIterator = Symbol('iterator')
const kHandleOne = Symbol('handleOne')
const kHandleMany = Symbol('handleMany')
class DefaultKeyIterator extends AbstractKeyIterator {
constructor (db, options) {
super(db, options)
this[kIterator] = db.iterator({ ...options, keys: true, values: false })
}
[kHandleOne] (entry) {
return entry[0]
}
[kHandleMany] (entries) {
for (let i = 0; i < entries.length; i++) {
entries[i] = entries[i][0]
}
}
}
class DefaultValueIterator extends AbstractValueIterator {
constructor (db, options) {
super(db, options)
this[kIterator] = db.iterator({ ...options, keys: false, values: true })
}
[kHandleOne] (entry) {
return entry[1]
}
[kHandleMany] (entries) {
for (let i = 0; i < entries.length; i++) {
entries[i] = entries[i][1]
}
}
}
for (const Iterator of [DefaultKeyIterator, DefaultValueIterator]) {
Iterator.prototype._next = async function () {
const entry = await this[kIterator].next()
return entry === undefined ? entry : this[kHandleOne](entry)
}
Iterator.prototype._nextv = async function (size, options) {
const entries = await this[kIterator].nextv(size, options)
this[kHandleMany](entries)
return entries
}
Iterator.prototype._all = async function (options) {
const entries = await this[kIterator].all(options)
this[kHandleMany](entries)
return entries
}
Iterator.prototype._seek = function (target, options) {
this[kIterator].seek(target, options)
}
Iterator.prototype._close = async function () {
return this[kIterator].close()
}
}
// Internal utilities, should be typed as AbstractKeyIterator and AbstractValueIterator
exports.DefaultKeyIterator = DefaultKeyIterator
exports.DefaultValueIterator = DefaultValueIterator

110
lib/deferred-iterator.js Normal file
View File

@ -0,0 +1,110 @@
'use strict'
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
const ModuleError = require('module-error')
const kNut = Symbol('nut')
const kUndefer = Symbol('undefer')
const kFactory = Symbol('factory')
const kSignalOptions = Symbol('signalOptions')
class DeferredIterator extends AbstractIterator {
constructor (db, options) {
super(db, options)
this[kNut] = null
this[kFactory] = () => db.iterator(options)
this[kSignalOptions] = { signal: options.signal }
this.db.defer(() => this[kUndefer](), this[kSignalOptions])
}
}
class DeferredKeyIterator extends AbstractKeyIterator {
constructor (db, options) {
super(db, options)
this[kNut] = null
this[kFactory] = () => db.keys(options)
this[kSignalOptions] = { signal: options.signal }
this.db.defer(() => this[kUndefer](), this[kSignalOptions])
}
}
class DeferredValueIterator extends AbstractValueIterator {
constructor (db, options) {
super(db, options)
this[kNut] = null
this[kFactory] = () => db.values(options)
this[kSignalOptions] = { signal: options.signal }
this.db.defer(() => this[kUndefer](), this[kSignalOptions])
}
}
for (const Iterator of [DeferredIterator, DeferredKeyIterator, DeferredValueIterator]) {
Iterator.prototype[kUndefer] = function () {
if (this.db.status === 'open') {
this[kNut] = this[kFactory]()
}
}
Iterator.prototype._next = async function () {
if (this[kNut] !== null) {
return this[kNut].next()
} else if (this.db.status === 'opening') {
return this.db.deferAsync(() => this._next(), this[kSignalOptions])
} else {
throw new ModuleError('Iterator is not open: cannot call next() after close()', {
code: 'LEVEL_ITERATOR_NOT_OPEN'
})
}
}
Iterator.prototype._nextv = async function (size, options) {
if (this[kNut] !== null) {
return this[kNut].nextv(size, options)
} else if (this.db.status === 'opening') {
return this.db.deferAsync(() => this._nextv(size, options), this[kSignalOptions])
} else {
throw new ModuleError('Iterator is not open: cannot call nextv() after close()', {
code: 'LEVEL_ITERATOR_NOT_OPEN'
})
}
}
Iterator.prototype._all = async function (options) {
if (this[kNut] !== null) {
return this[kNut].all()
} else if (this.db.status === 'opening') {
return this.db.deferAsync(() => this._all(options), this[kSignalOptions])
} else {
throw new ModuleError('Iterator is not open: cannot call all() after close()', {
code: 'LEVEL_ITERATOR_NOT_OPEN'
})
}
}
Iterator.prototype._seek = function (target, options) {
if (this[kNut] !== null) {
// TODO: explain why we need _seek() rather than seek() here
this[kNut]._seek(target, options)
} else if (this.db.status === 'opening') {
this.db.defer(() => this._seek(target, options), this[kSignalOptions])
}
}
Iterator.prototype._close = async function () {
if (this[kNut] !== null) {
return this[kNut].close()
} else if (this.db.status === 'opening') {
return this.db.deferAsync(() => this._close())
}
}
}
exports.DeferredIterator = DeferredIterator
exports.DeferredKeyIterator = DeferredKeyIterator
exports.DeferredValueIterator = DeferredValueIterator

84
lib/deferred-queue.js Normal file
View File

@ -0,0 +1,84 @@
'use strict'
const { getOptions, emptyOptions } = require('./common')
const { AbortError } = require('./errors')
class DeferredOperation {
constructor (fn, signal) {
this.fn = fn
this.signal = signal
}
}
class DeferredQueue {
#operations
#signals
constructor () {
this.#operations = []
this.#signals = new Set()
}
add (fn, options) {
options = getOptions(options, emptyOptions)
const signal = options.signal
if (signal == null) {
this.#operations.push(new DeferredOperation(fn, null))
return
}
if (signal.aborted) {
// Note that this is called in the same tick
fn(new AbortError())
return
}
if (!this.#signals.has(signal)) {
this.#signals.add(signal)
signal.addEventListener('abort', this.#handleAbort, { once: true })
}
this.#operations.push(new DeferredOperation(fn, signal))
}
drain () {
const operations = this.#operations
const signals = this.#signals
this.#operations = []
this.#signals = new Set()
for (const signal of signals) {
signal.removeEventListener('abort', this.#handleAbort)
}
for (const operation of operations) {
operation.fn.call(null)
}
}
#handleAbort = (ev) => {
const signal = ev.target
const err = new AbortError()
const aborted = []
// TODO: optimize
this.#operations = this.#operations.filter(function (operation) {
if (operation.signal !== null && operation.signal === signal) {
aborted.push(operation)
return false
} else {
return true
}
})
this.#signals.delete(signal)
for (const operation of aborted) {
operation.fn.call(null, err)
}
}
}
exports.DeferredQueue = DeferredQueue

21
lib/errors.js Normal file
View File

@ -0,0 +1,21 @@
'use strict'
const ModuleError = require('module-error')
class AbortError extends ModuleError {
constructor (cause) {
super('Operation has been aborted', {
code: 'LEVEL_ABORTED',
cause
})
}
// Set name to AbortError for web compatibility. See:
// https://dom.spec.whatwg.org/#aborting-ongoing-activities
// https://github.com/nodejs/node/pull/35911#discussion_r515779306
get name () {
return 'AbortError'
}
}
exports.AbortError = AbortError

30
lib/event-monitor.js Normal file
View File

@ -0,0 +1,30 @@
'use strict'
const { deprecate } = require('./common')
exports.EventMonitor = class EventMonitor {
constructor (emitter) {
// Track whether listeners are present, because checking
// a boolean is faster than checking listenerCount().
this.write = false
const beforeAdded = (name) => {
if (name === 'write') {
this.write = true
}
if (name === 'put' || name === 'del' || name === 'batch') {
deprecate(`The '${name}' event has been removed in favor of 'write'`)
}
}
const afterRemoved = (name) => {
if (name === 'write') {
this.write = emitter.listenerCount('write') > 0
}
}
emitter.on('newListener', beforeAdded)
emitter.on('removeListener', afterRemoved)
}
}

78
lib/hooks.js Normal file
View File

@ -0,0 +1,78 @@
'use strict'
const { noop } = require('./common')
class DatabaseHooks {
constructor () {
this.postopen = new Hook({ async: true })
this.prewrite = new Hook({ async: false })
this.newsub = new Hook({ async: false })
}
}
class Hook {
#functions = new Set()
#isAsync
constructor (options) {
this.#isAsync = options.async
// Offer a fast way to check if hook functions are present. We could also expose a
// size getter, which would be slower, or check it by hook.run !== noop, which would
// not allow userland to do the same check.
this.noop = true
this.run = this.#runner()
}
add (fn) {
// Validate now rather than in asynchronous code paths
assertFunction(fn)
this.#functions.add(fn)
this.noop = false
this.run = this.#runner()
}
delete (fn) {
assertFunction(fn)
this.#functions.delete(fn)
this.noop = this.#functions.size === 0
this.run = this.#runner()
}
#runner () {
if (this.noop) {
return noop
} else if (this.#functions.size === 1) {
const [fn] = this.#functions
return fn
} else if (this.#isAsync) {
// The run function should not reference hook, so that consumers like chained batch
// and db.open() can save a reference to hook.run and safely assume it won't change
// during their lifetime or async work.
const run = async function (functions, ...args) {
for (const fn of functions) {
await fn(...args)
}
}
return run.bind(null, Array.from(this.#functions))
} else {
const run = function (functions, ...args) {
for (const fn of functions) {
fn(...args)
}
}
return run.bind(null, Array.from(this.#functions))
}
}
}
const assertFunction = function (fn) {
if (typeof fn !== 'function') {
const hint = fn === null ? 'null' : typeof fn
throw new TypeError(`The first argument must be a function, received ${hint}`)
}
}
exports.DatabaseHooks = DatabaseHooks

21
lib/prefixes.js Normal file
View File

@ -0,0 +1,21 @@
'use strict'
exports.prefixDescendantKey = function (key, keyFormat, descendant, ancestor) {
while (descendant !== null && descendant !== ancestor) {
key = descendant.prefixKey(key, keyFormat, true)
descendant = descendant.parent
}
return key
}
// Check if db is a descendant of ancestor
// TODO: optimize, when used alongside prefixDescendantKey
// which means we visit parents twice.
exports.isDescendant = function (db, ancestor) {
while (true) {
if (db.parent == null) return false
if (db.parent === ancestor) return true
db = db.parent
}
}

96
lib/prewrite-batch.js Normal file
View File

@ -0,0 +1,96 @@
'use strict'
const { prefixDescendantKey, isDescendant } = require('./prefixes')
// An interface for prewrite hook functions to add operations
class PrewriteBatch {
#db
#privateOperations
#publicOperations
constructor (db, privateOperations, publicOperations) {
this.#db = db
// Note: if for db.batch([]), these arrays include input operations (or empty slots
// for them) but if for chained batch then it does not. Small implementation detail.
this.#privateOperations = privateOperations
this.#publicOperations = publicOperations
}
add (op) {
const isPut = op.type === 'put'
const delegated = op.sublevel != null
const db = delegated ? op.sublevel : this.#db
db._assertValidKey(op.key)
op.keyEncoding = db.keyEncoding(op.keyEncoding)
if (isPut) {
db._assertValidValue(op.value)
op.valueEncoding = db.valueEncoding(op.valueEncoding)
} else if (op.type !== 'del') {
throw new TypeError("A batch operation must have a type property that is 'put' or 'del'")
}
// Encode data for private API
const keyEncoding = op.keyEncoding
const preencodedKey = keyEncoding.encode(op.key)
const keyFormat = keyEncoding.format
// If the sublevel is not a descendant then forward that option to the parent db
// so that we don't erroneously add our own prefix to the key of the operation.
const siblings = delegated && !isDescendant(op.sublevel, this.#db) && op.sublevel !== this.#db
const encodedKey = delegated && !siblings
? prefixDescendantKey(preencodedKey, keyFormat, db, this.#db)
: preencodedKey
// Only prefix once
if (delegated && !siblings) {
op.sublevel = null
}
let publicOperation = null
// If the sublevel is not a descendant then we shouldn't emit events
if (this.#publicOperations !== null && !siblings) {
// Clone op before we mutate it for the private API
publicOperation = { ...op }
publicOperation.encodedKey = encodedKey
if (delegated) {
// Ensure emitted data makes sense in the context of this.#db
publicOperation.key = encodedKey
publicOperation.keyEncoding = this.#db.keyEncoding(keyFormat)
}
this.#publicOperations.push(publicOperation)
}
// If we're forwarding the sublevel option then don't prefix the key yet
op.key = siblings ? encodedKey : this.#db.prefixKey(encodedKey, keyFormat, true)
op.keyEncoding = keyFormat
if (isPut) {
const valueEncoding = op.valueEncoding
const encodedValue = valueEncoding.encode(op.value)
const valueFormat = valueEncoding.format
op.value = encodedValue
op.valueEncoding = valueFormat
if (publicOperation !== null) {
publicOperation.encodedValue = encodedValue
if (delegated) {
publicOperation.value = encodedValue
publicOperation.valueEncoding = this.#db.valueEncoding(valueFormat)
}
}
}
this.#privateOperations.push(op)
return this
}
}
exports.PrewriteBatch = PrewriteBatch

26
lib/range-options.js Normal file
View File

@ -0,0 +1,26 @@
'use strict'
const hasOwnProperty = Object.prototype.hasOwnProperty
const rangeOptions = new Set(['lt', 'lte', 'gt', 'gte'])
module.exports = function (options, keyEncoding) {
const result = {}
for (const k in options) {
if (!hasOwnProperty.call(options, k)) continue
if (k === 'keyEncoding' || k === 'valueEncoding') continue
if (rangeOptions.has(k)) {
// Note that we don't reject nullish and empty options here. While
// those types are invalid as keys, they are valid as range options.
result[k] = keyEncoding.encode(options[k])
} else {
result[k] = options[k]
}
}
result.reverse = !!result.reverse
result.limit = Number.isInteger(result.limit) && result.limit >= 0 ? result.limit : -1
return result
}

65
package.json Normal file
View File

@ -0,0 +1,65 @@
{
"name": "abstract-level",
"version": "3.0.1",
"description": "Abstract class for a lexicographically sorted key-value database",
"license": "MIT",
"main": "index.js",
"types": "./index.d.ts",
"scripts": {
"test": "standard && hallmark && (nyc -s node test/self.js | tap-arc) && nyc report",
"test-pessimistic": "node test/self.js | tap-arc -pv",
"test-browsers": "airtap --coverage test/self.js",
"test-electron": "airtap -p electron --coverage test/self.js | tap-arc",
"coverage": "nyc report -r lcovonly"
},
"files": [
"abstract-chained-batch.js",
"abstract-iterator.js",
"abstract-level.js",
"abstract-snapshot.js",
"index.js",
"index.d.ts",
"lib",
"test",
"types",
"CHANGELOG.md",
"UPGRADING.md"
],
"dependencies": {
"buffer": "^6.0.3",
"is-buffer": "^2.0.5",
"level-supports": "^6.2.0",
"level-transcoder": "^1.0.1",
"maybe-combine-errors": "^1.0.0",
"module-error": "^1.0.1"
},
"devDependencies": {
"@babel/preset-env": "^7.26.0",
"@types/node": "^22.10.2",
"@voxpelli/tsconfig": "^15.1.0",
"airtap": "^5.0.0",
"airtap-electron": "^1.0.0",
"airtap-playwright": "^1.0.1",
"babelify": "^10.0.0",
"electron": "^33.2.1",
"hallmark": "^5.0.1",
"nyc": "^17.1.0",
"standard": "^17.1.2",
"tap-arc": "^1.3.2",
"tape": "^5.9.0",
"typescript": "^5.7.2"
},
"repository": {
"type": "git",
"url": "https://github.com/Level/abstract-level.git"
},
"homepage": "https://github.com/Level/abstract-level",
"keywords": [
"abstract-level",
"level",
"leveldb"
],
"engines": {
"node": ">=18"
}
}

132
test/async-iterator-test.js Normal file
View File

@ -0,0 +1,132 @@
'use strict'
const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }]
let db
exports.setup = function (test, testCommon) {
test('async iterator setup', async function (t) {
db = testCommon.factory()
await db.open()
return db.batch(input.map(entry => ({ ...entry, type: 'put' })))
})
}
exports.asyncIterator = function (test, testCommon) {
for (const mode of ['iterator', 'keys', 'values']) {
test(`for await...of ${mode}()`, async function (t) {
t.plan(1)
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const output = []
for await (const item of it) {
output.push(item)
}
t.same(output, input.map(({ key, value }) => {
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
}))
})
testCommon.supports.permanence && test(`for await...of ${mode}() (deferred)`, async function (t) {
t.plan(1)
const db = testCommon.factory()
await db.batch(input.map(entry => ({ ...entry, type: 'put' })))
await db.close()
// Don't await
db.open()
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const output = []
for await (const item of it) {
output.push(item)
}
t.same(output, input.map(({ key, value }) => {
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
}))
await db.close()
})
testCommon.supports.implicitSnapshots && test(`for await...of ${mode}() (deferred, with snapshot)`, async function (t) {
t.plan(2)
const db = testCommon.factory()
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const promise = db.batch(input.map(entry => ({ ...entry, type: 'put' })))
const output = []
for await (const item of it) {
output.push(item)
}
t.same(output, [], 'used snapshot')
// Wait for data to be written
await promise
for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) {
output.push(item)
}
t.same(output, input.map(({ key, value }) => {
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
}))
await db.close()
})
for (const deferred of [false, true]) {
test(`for await...of ${mode}() (empty, deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
const entries = []
if (!deferred) await db.open()
for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) {
entries.push(item)
}
t.same(entries, [])
await db.close()
})
}
test(`for await...of ${mode}() does not permit reuse`, async function (t) {
t.plan(3)
const it = db[mode]()
// eslint-disable-next-line no-unused-vars
for await (const item of it) {
t.pass('nexted')
}
try {
// eslint-disable-next-line no-unused-vars
for await (const item of it) {
t.fail('should not be called')
}
} catch (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}
})
}
}
exports.teardown = async function (test, testCommon) {
test('async iterator teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setup(test, testCommon)
exports.asyncIterator(test, testCommon)
exports.teardown(test, testCommon)
}

214
test/batch-test.js Normal file
View File

@ -0,0 +1,214 @@
'use strict'
const { Buffer } = require('buffer')
const { illegalKeys, illegalValues } = require('./util')
let db
exports.setUp = function (test, testCommon) {
test('batch([]) setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('batch([]) with missing value fails', function (t) {
t.plan(1)
db.batch([{ type: 'put', key: 'foo1' }]).catch((err) => {
t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code')
})
})
test('batch([]) with illegal values fails', function (t) {
t.plan(illegalValues.length * 2)
for (const { name, value } of illegalValues) {
db.batch([{ type: 'put', key: 'foo1', value }]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code')
})
}
})
test('batch([]) with missing key fails', function (t) {
t.plan(1)
db.batch([{ type: 'put', value: 'foo1' }]).catch(function (err) {
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code')
})
})
test('batch([]) with illegal keys fails', function (t) {
t.plan(illegalKeys.length * 2)
for (const { name, key } of illegalKeys) {
db.batch([{ type: 'put', key, value: 'foo1' }]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
}
})
test('batch([]) with missing or incorrect type fails', function (t) {
t.plan(4)
db.batch([{ key: 'key', value: 'value' }]).catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message')
})
db.batch([{ key: 'key', value: 'value', type: 'foo' }]).catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message')
})
})
test('batch([]) with missing or nullish operations fails', function (t) {
t.plan(2 * 2)
for (const array of [null, undefined]) {
db.batch(array).catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err.message, "The first argument 'operations' must be an array", 'correct error message')
})
}
})
test('batch([]) with empty operations array and empty options', async function (t) {
await db.batch([])
await db.batch([], null)
await db.batch([], undefined)
await db.batch([], {})
})
;[null, undefined, 1, true].forEach(function (operation) {
const type = operation === null ? 'null' : typeof operation
test(`batch([]) with ${type} operation fails`, function (t) {
t.plan(1)
db.batch([operation]).catch(function (err) {
// We can either explicitly check the type of the op and throw a TypeError,
// or skip that for performance reasons in which case the next thing checked
// will be op.key or op.type. Doesn't matter, because we've documented that
// TypeErrors and such are not part of the semver contract.
t.ok(err.name === 'TypeError' || err.code === 'LEVEL_INVALID_KEY')
})
})
})
}
exports.batch = function (test, testCommon) {
test('simple batch([])', async function (t) {
const db = testCommon.factory()
await db.open()
await db.batch([{ type: 'del', key: 'non-existent' }]) // should not error
t.is(await db.get('foo'), undefined, 'not found')
await db.batch([{ type: 'put', key: 'foo', value: 'bar' }])
t.is(await db.get('foo'), 'bar')
await db.batch([{ type: 'del', key: 'foo' }])
t.is(await db.get('foo'), undefined, 'not found')
return db.close()
})
test('batch([]) with multiple operations', async function (t) {
t.plan(3)
await db.batch([
{ type: 'put', key: 'foobatch1', value: 'bar1' },
{ type: 'put', key: 'foobatch2', value: 'bar2' },
{ type: 'put', key: 'foobatch3', value: 'bar3' },
{ type: 'del', key: 'foobatch2' }
])
const promises = [
db.get('foobatch1').then(function (value) {
t.is(value, 'bar1')
}),
db.get('foobatch2').then(function (value) {
t.is(value, undefined, 'not found')
}),
db.get('foobatch3').then(function (value) {
t.is(value, 'bar3')
})
]
return Promise.all(promises)
})
for (const encoding of ['utf8', 'buffer', 'view']) {
if (!testCommon.supports.encodings[encoding]) continue
// NOTE: adapted from memdown
test(`empty values in batch with ${encoding} valueEncoding`, async function (t) {
const db = testCommon.factory({ valueEncoding: encoding })
const values = ['', Uint8Array.from([]), Buffer.alloc(0)]
const expected = encoding === 'utf8' ? values[0] : encoding === 'view' ? values[1] : values[2]
await db.open()
await db.batch(values.map((value, i) => ({ type: 'put', key: String(i), value })))
for (let i = 0; i < values.length; i++) {
const value = await db.get(String(i))
// Buffer is a Uint8Array, so this is allowed
if (encoding === 'view' && Buffer.isBuffer(value)) {
t.same(value, values[2])
} else {
t.same(value, expected)
}
}
return db.close()
})
test(`empty keys in batch with ${encoding} keyEncoding`, async function (t) {
const db = testCommon.factory({ keyEncoding: encoding })
const keys = ['', Uint8Array.from([]), Buffer.alloc(0)]
await db.open()
for (let i = 0; i < keys.length; i++) {
await db.batch([{ type: 'put', key: keys[i], value: String(i) }])
t.same(await db.get(keys[i]), String(i), `got value ${i}`)
}
return db.close()
})
}
}
exports.atomic = function (test, testCommon) {
test('batch([]) is atomic', async function (t) {
t.plan(3)
try {
await db.batch([
{ type: 'put', key: 'foobah1', value: 'bar1' },
{ type: 'put', value: 'bar2' },
{ type: 'put', key: 'foobah3', value: 'bar3' }
])
} catch (err) {
t.is(err.code, 'LEVEL_INVALID_KEY', 'should error and not commit anything')
}
t.is(await db.get('foobah1'), undefined, 'not found')
t.is(await db.get('foobah3'), undefined, 'not found')
})
}
exports.tearDown = function (test, testCommon) {
test('batch([]) teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.batch(test, testCommon)
exports.atomic(test, testCommon)
exports.tearDown(test, testCommon)
}

315
test/chained-batch-test.js Normal file
View File

@ -0,0 +1,315 @@
'use strict'
let db
exports.setUp = function (test, testCommon) {
test('chained batch setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('chained batch has db reference', async function (t) {
const batch = db.batch()
t.ok(batch.db === db)
return batch.close()
})
test('chained batch.put() with missing or nullish value fails', async function (t) {
t.plan(3 * 2)
for (const args of [[null], [undefined], []]) {
const batch = db.batch()
try {
batch.put('key', ...args)
} catch (err) {
t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code')
t.is(batch.length, 0, 'length is not incremented on error')
}
await batch.close()
}
})
test('chained batch.put() with missing of nullish key fails', async function (t) {
t.plan(3 * 2)
for (const args of [[], [null, 'foo'], [undefined, 'foo']]) {
const batch = db.batch()
try {
batch.put(...args)
} catch (err) {
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code')
t.is(batch.length, 0, 'length is not incremented on error')
}
await batch.close()
}
})
test('chained batch.del() with missing or nullish key fails', async function (t) {
t.plan(3 * 2)
for (const args of [[null], [undefined], []]) {
const batch = db.batch()
try {
batch.del(...args)
} catch (err) {
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code')
t.is(batch.length, 0, 'length is not incremented on error')
}
await batch.close()
}
})
test('chained batch.clear() does not throw if empty', async function (t) {
return db.batch().clear().close()
})
test('chained batch.put() after write() fails', async function (t) {
t.plan(1)
const batch = db.batch().put('foo', 'bar')
await batch.write()
try {
batch.put('boom', 'bang')
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
}
})
test('chained batch.del() after write() fails', async function (t) {
t.plan(1)
const batch = db.batch().put('foo', 'bar')
await batch.write()
try {
batch.del('foo')
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
}
})
test('chained batch.clear() after write() fails', async function (t) {
t.plan(1)
const batch = db.batch().put('foo', 'bar')
await batch.write()
try {
batch.clear()
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
}
})
test('chained batch.write() after write() fails', async function (t) {
t.plan(1)
const batch = db.batch().put('foo', 'bar')
await batch.write()
try {
await batch.write()
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
}
})
test('chained batch.write() after close() fails', async function (t) {
t.plan(1)
const batch = db.batch().put('foo', 'bar')
await batch.close()
try {
await batch.write()
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
}
})
test('chained batch.write() with no operations', async function (t) {
return db.batch().write()
})
test('chained batch.close() with no operations', async function (t) {
return db.batch().close()
})
test('chained batch.close() is idempotent', async function (t) {
const batch = db.batch()
await batch.close()
await batch.close()
return Promise.all([batch.close(), batch.close()])
})
}
exports.batch = function (test, testCommon) {
test('simple chained batch', async function (t) {
await db.batch([
{ type: 'put', key: 'one', value: '1' },
{ type: 'put', key: 'two', value: '2' },
{ type: 'put', key: 'three', value: '3' }
])
const batch = db.batch()
.put('1', 'one')
.del('2', 'two')
.put('3', 'three')
t.is(batch.length, 3, 'length was incremented')
batch.clear()
t.is(batch.length, 0, 'length is reset')
batch.put('one', 'I')
.put('two', 'II')
.del('three')
.put('foo', 'bar')
t.is(batch.length, 4, 'length was incremented')
await batch.write()
t.same(await db.iterator().all(), [
['foo', 'bar'],
['one', 'I'],
['two', 'II']
])
})
test('chained batch requires database to be open', async function (t) {
t.plan(5)
const db1 = testCommon.factory()
const db2 = testCommon.factory()
try {
db1.batch()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
await db2.open()
const batch = db2.batch()
await db2.close()
try {
batch.put('beep', 'boop')
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN')
}
try {
batch.del('456')
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN')
}
try {
batch.clear()
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN')
}
try {
await batch.write()
} catch (err) {
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN')
}
// Should be a noop (already closed)
await batch.close()
return Promise.all([db1.close(), db2.close()])
})
// NOTE: adapted from levelup
test('chained batch with per-operation encoding options', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
const utf8 = db.keyEncoding('utf8')
const json = db.valueEncoding('json')
db.once('write', function (operations) {
t.same(operations, [
{ type: 'put', key: 'a', value: 'a', keyEncoding: utf8, valueEncoding: json, encodedKey: utf8.encode('a'), encodedValue: utf8.encode('"a"') },
{ type: 'put', key: 'b', value: 'b', keyEncoding: utf8, valueEncoding: utf8, encodedKey: utf8.encode('b'), encodedValue: utf8.encode('b') },
{ type: 'put', key: '"c"', value: 'c', keyEncoding: utf8, valueEncoding: utf8, encodedKey: utf8.encode('"c"'), encodedValue: utf8.encode('c') },
{ type: 'del', key: 'c', keyEncoding: json, encodedKey: utf8.encode('"c"'), arbitraryOption: true }
])
})
await db.batch()
.put('a', 'a', { valueEncoding: 'json' })
.put('b', 'b')
.put('"c"', 'c')
.del('c', { keyEncoding: 'json', arbitraryOption: true })
.write()
t.same(await db.iterator().all(), [
['a', '"a"'],
['b', 'b']
])
return db.close()
})
}
exports.events = function (test, testCommon) {
test('db.close() on chained batch write event', async function (t) {
const db = testCommon.factory()
await db.open()
let promise
db.on('write', function () {
// Should not interfere with the current write() operation
promise = db.close()
})
await db.batch().put('a', 'b').write()
await promise
t.ok(promise, 'event was emitted')
})
}
exports.tearDown = function (test, testCommon) {
test('chained batch teardown', async function (t) {
return db.close()
})
}
exports.dispose = function (test, testCommon) {
// Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management)
Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) {
const db = testCommon.factory()
await db.open()
const batch = db.batch()
await batch[Symbol.asyncDispose]()
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.batch(test, testCommon)
exports.events(test, testCommon)
exports.tearDown(test, testCommon)
exports.dispose(test, testCommon)
}

262
test/clear-range-test.js Normal file
View File

@ -0,0 +1,262 @@
'use strict'
const data = (function () {
const d = []
let i = 0
let k
for (; i < 100; i++) {
k = (i < 10 ? '0' : '') + i
d.push({
key: k,
value: String(Math.random())
})
}
return d
}())
exports.range = function (test, testCommon) {
function rangeTest (name, opts, expected) {
test('clear() range with ' + name, async function (t) {
const db = await prepare()
await db.clear(opts)
await verify(t, db, expected)
return db.close()
})
}
async function prepare (t) {
const db = testCommon.factory()
await db.open()
await db.batch(data.map(function ({ key, value }) {
return { type: 'put', key, value }
}))
return db
}
async function verify (t, db, expected) {
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const entries = await it.all()
t.is(entries.length, expected.length, 'correct number of entries')
t.same(entries, expected.map(kv => [kv.key, kv.value]))
}
function exclude (data, start, end, expectedLength) {
data = data.slice()
const removed = data.splice(start, end - start + 1) // Inclusive
if (expectedLength != null) checkLength(removed, expectedLength)
return data
}
// For sanity checks on test arguments
function checkLength (arr, length) {
if (arr.length !== length) {
throw new RangeError('Expected ' + length + ' elements, got ' + arr.length)
}
return arr
}
rangeTest('no options', {}, [])
// Reversing has no effect without limit
rangeTest('reverse=true', {
reverse: true
}, [])
rangeTest('gte=00', {
gte: '00'
}, [])
rangeTest('gte=50', {
gte: '50'
}, data.slice(0, 50))
rangeTest('lte=50 and reverse=true', {
lte: '50',
reverse: true
}, data.slice(51))
rangeTest('gte=49.5 (midway)', {
gte: '49.5'
}, data.slice(0, 50))
rangeTest('gte=49999 (midway)', {
gte: '49999'
}, data.slice(0, 50))
rangeTest('lte=49.5 (midway) and reverse=true', {
lte: '49.5',
reverse: true
}, data.slice(50))
rangeTest('lt=49.5 (midway) and reverse=true', {
lt: '49.5',
reverse: true
}, data.slice(50))
rangeTest('lt=50 and reverse=true', {
lt: '50',
reverse: true
}, data.slice(50))
rangeTest('lte=50', {
lte: '50'
}, data.slice(51))
rangeTest('lte=50.5 (midway)', {
lte: '50.5'
}, data.slice(51))
rangeTest('lte=50555 (midway)', {
lte: '50555'
}, data.slice(51))
rangeTest('lt=50555 (midway)', {
lt: '50555'
}, data.slice(51))
rangeTest('gte=50.5 (midway) and reverse=true', {
gte: '50.5',
reverse: true
}, data.slice(0, 51))
rangeTest('gt=50.5 (midway) and reverse=true', {
gt: '50.5',
reverse: true
}, data.slice(0, 51))
rangeTest('gt=50 and reverse=true', {
gt: '50',
reverse: true
}, data.slice(0, 51))
// First key is actually '00' so it should avoid it
rangeTest('lte=0', {
lte: '0'
}, data)
// First key is actually '00' so it should avoid it
rangeTest('lt=0', {
lt: '0'
}, data)
rangeTest('gte=30 and lte=70', {
gte: '30',
lte: '70'
}, exclude(data, 30, 70))
// The gte and lte options should take precedence over gt and lt respectively.
rangeTest('gte=30 and lte=70 and gt=40 and lt=60', {
gte: '30',
lte: '70',
gt: '40',
lt: '60'
}, exclude(data, 30, 70))
// Also test the other way around: if gt and lt were to select a bigger range.
rangeTest('gte=30 and lte=70 and gt=20 and lt=80', {
gte: '30',
lte: '70',
gt: '20',
lt: '80'
}, exclude(data, 30, 70))
rangeTest('gt=29 and lt=71', {
gt: '29',
lt: '71'
}, exclude(data, 30, 70))
rangeTest('gte=30 and lte=70 and reverse=true', {
lte: '70',
gte: '30',
reverse: true
}, exclude(data, 30, 70))
rangeTest('gt=29 and lt=71 and reverse=true', {
lt: '71',
gt: '29',
reverse: true
}, exclude(data, 30, 70))
rangeTest('limit=20', {
limit: 20
}, data.slice(20))
rangeTest('limit=20 and gte=20', {
limit: 20,
gte: '20'
}, exclude(data, 20, 39, 20))
rangeTest('limit=20 and reverse=true', {
limit: 20,
reverse: true
}, data.slice(0, -20))
rangeTest('limit=20 and lte=79 and reverse=true', {
limit: 20,
lte: '79',
reverse: true
}, exclude(data, 60, 79, 20))
rangeTest('limit=-1 should clear whole database', {
limit: -1
}, [])
rangeTest('limit=0 should not clear anything', {
limit: 0
}, data)
rangeTest('lte after limit', {
limit: 20,
lte: '50'
}, data.slice(20))
rangeTest('lte before limit', {
limit: 50,
lte: '19'
}, data.slice(20))
rangeTest('gte after database end', {
gte: '9a'
}, data)
rangeTest('gt after database end', {
gt: '9a'
}, data)
rangeTest('lte after database end and reverse=true', {
lte: '9a',
reverse: true
}, [])
rangeTest('lte and gte after database and reverse=true', {
lte: '9b',
gte: '9a',
reverse: true
}, data)
rangeTest('lt and gt after database and reverse=true', {
lt: '9b',
gt: '9a',
reverse: true
}, data)
rangeTest('gt greater than lt', {
gt: '20',
lt: '10'
}, data)
rangeTest('gte greater than lte', {
gte: '20',
lte: '10'
}, data)
}
exports.all = function (test, testCommon) {
exports.range(test, testCommon)
}

110
test/clear-test.js Normal file
View File

@ -0,0 +1,110 @@
'use strict'
const isBuffer = require('is-buffer')
const { Buffer } = require('buffer')
exports.clear = function (test, testCommon) {
makeTest('string', ['a', 'b'])
if (testCommon.supports.encodings.buffer) {
makeTest('buffer', [Buffer.from('a'), Buffer.from('b')])
makeTest('mixed', [Buffer.from('a'), 'b'])
// These keys would be equal when compared as utf8 strings
makeTest('non-utf8 buffer', [Buffer.from('80', 'hex'), Buffer.from('c0', 'hex')])
}
function makeTest (type, keys) {
test('simple clear() on ' + type + ' keys', async function (t) {
const db = testCommon.factory()
const ops = keys.map(function (key) {
return {
type: 'put',
key,
value: 'foo',
keyEncoding: isBuffer(key) ? 'buffer' : 'utf8'
}
})
await db.open()
await db.batch(ops)
t.is((await db.iterator().all()).length, keys.length, 'has entries')
await db.clear()
t.is((await db.iterator().all()).length, 0, 'has no entries')
return db.close()
})
}
// NOTE: adapted from levelup
for (const deferred of [false, true]) {
for (const [gte, keyEncoding] of [['"b"', 'utf8'], ['b', 'json']]) {
test(`clear() with ${keyEncoding} encoding (deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
await db.open()
await db.batch([
{ type: 'put', key: '"a"', value: 'a' },
{ type: 'put', key: '"b"', value: 'b' }
])
let promise
if (deferred) {
await db.close()
t.is(db.status, 'closed')
promise = db.open()
t.is(db.status, 'opening')
}
await db.clear({ gte, keyEncoding })
await promise
const keys = await db.keys().all()
t.same(keys, ['"a"'], 'got expected keys')
return db.close()
})
}
}
}
exports.events = function (test, testCommon) {
test('test clear() with options emits clear event', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
t.ok(db.supports.events.clear)
db.on('clear', function (options) {
t.same(options, { gt: 567, custom: 123 })
})
await db.clear({ gt: 567, custom: 123 })
return db.close()
})
test('test clear() without options emits clear event', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
t.ok(db.supports.events.clear)
db.on('clear', function (options) {
t.same(options, {})
})
await db.clear()
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.events(test, testCommon)
exports.clear(test, testCommon)
}

90
test/common.js Normal file
View File

@ -0,0 +1,90 @@
'use strict'
const kNone = Symbol('none')
const kProtected = Symbol('protected')
function testCommon (options) {
const factory = options.factory
const test = options.test
if (typeof factory !== 'function') {
throw new TypeError('factory must be a function')
}
if (typeof test !== 'function') {
throw new TypeError('test must be a function')
}
if (options.legacyRange != null) {
throw new Error('The legacyRange option has been removed')
}
let supports = kNone
return protect(options, {
test,
factory,
internals: options.internals || {},
// Expose manifest through testCommon to more easily skip tests based on
// supported features. Use a getter to only create a db once. Implicitly
// we also test that the manifest doesn't change after the db constructor.
get supports () {
if (supports === kNone) this.supports = this.factory().supports
return supports
},
// Prefer assigning early via manifest-test unless test.only() is used
// in which case we create the manifest on-demand. Copy it to be safe.
set supports (value) {
if (supports === kNone) supports = JSON.parse(JSON.stringify(value))
}
})
}
module.exports = testCommon
// To help migrating from abstract-leveldown.
// Throw if test suite options are used instead of db.supports
function protect (options, testCommon) {
const legacyOptions = [
['createIfMissing', true],
['errorIfExists', true],
['snapshots', true],
['seek', true],
['encodings', true],
['deferredOpen', true],
['streams', true],
['clear', true],
['getMany', true],
['bufferKeys', false],
['serialize', false],
['idempotentOpen', false],
['passiveOpen', false],
['openCallback', false]
]
Object.defineProperty(testCommon, kProtected, {
value: true
})
for (const [k, exists] of legacyOptions) {
const msg = exists ? 'has moved to db.supports' : 'has been removed'
// Options may be a testCommon instance
if (!options[kProtected] && k in options) {
throw new Error(`The test suite option '${k}' ${msg}`)
}
Object.defineProperty(testCommon, k, {
get () {
throw new Error(`The test suite option '${k}' ${msg}`)
},
set () {
throw new Error(`The test suite option '${k}' ${msg}`)
}
})
}
return testCommon
}

209
test/deferred-open-test.js Normal file
View File

@ -0,0 +1,209 @@
'use strict'
const { DeferredIterator } = require('../lib/deferred-iterator')
exports.all = function (test, testCommon) {
async function verifyValues (t, db, entries) {
const promises = []
for (let i = 1; i <= entries; i++) {
promises.push(db.get('k' + i).then((v) => {
t.is(v, 'v' + i, 'value is ok')
t.is(db.status, 'open', 'status is ok')
}))
}
await Promise.all(promises)
t.is(await db.get('k' + (entries + 1)), undefined, 'not found')
}
// NOTE: copied from levelup
test('deferred open(): batch() on new database', async function (t) {
// Create database, opens in next tick
const db = testCommon.factory()
const entries = 3
const ops = []
// Add entries with batch([]), these should be deferred until the database is actually open
for (let i = 1; i <= entries; i++) {
ops.push({ type: 'put', key: 'k' + i, value: 'v' + i })
}
t.is(db.status, 'opening')
await db.batch(ops)
await verifyValues(t, db, entries)
return db.close()
})
// NOTE: copied from levelup
test('deferred open(): value of deferred operation is not stringified', async function (t) {
const db = testCommon.factory({ valueEncoding: 'json' })
t.is(db.status, 'opening')
await db.put('key', { thing: 2 })
t.is(db.status, 'open')
t.same(await db.get('key'), { thing: 2 })
return db.close()
})
// NOTE: copied from levelup
test('deferred open(): key of deferred operation is not stringified', async function (t) {
const db = testCommon.factory({ keyEncoding: 'json' })
t.is(db.status, 'opening')
await db.put({ thing: 2 }, 'value')
t.is(db.status, 'open')
t.same(await db.keys().all(), [{ thing: 2 }])
return db.close()
})
// NOTE: copied from deferred-leveldown
// TODO: move to iterator tests, if not already covered there
test('cannot operate on closed db', async function (t) {
t.plan(3)
const db = testCommon.factory()
await db.open()
await db.close()
try {
db.iterator()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
try {
db.keys()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
try {
db.values()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
})
// NOTE: copied from deferred-leveldown
// TODO: move to iterator tests, if not already covered there
test('cannot operate on closing db', async function (t) {
t.plan(3)
const db = testCommon.factory()
await db.open()
const promise = db.close()
try {
db.iterator()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
try {
db.keys()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
try {
db.values()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
}
return promise
})
// NOTE: copied from deferred-leveldown
// TODO: move to iterator tests, if not already covered there
test('deferred iterator - cannot operate on closed db', async function (t) {
t.plan(4)
const db = testCommon.factory()
const it = db.iterator({ gt: 'foo' })
await db.open()
await db.close()
t.ok(it instanceof DeferredIterator)
const promises = [
it.next().catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
it.nextv(10).catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
it.all().catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
// Was already closed
it.close().catch(function () {
t.fail('no close() error')
})
]
try {
it.seek('foo')
} catch (err) {
// Should *not* throw
t.fail(err)
}
return Promise.all(promises)
})
// NOTE: copied from deferred-leveldown
// TODO: move to iterator tests, if not already covered there
test('deferred iterator - cannot operate on closing db', async function (t) {
t.plan(4)
const db = testCommon.factory()
const it = db.iterator({ gt: 'foo' })
t.ok(it instanceof DeferredIterator)
await db.open()
const promises = [
db.close(),
it.next().catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
it.nextv(10).catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
it.all().catch(function (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
}),
// Is already closing
it.close().catch(function () {
t.fail('no close() error')
})
]
try {
it.seek('foo')
} catch (err) {
// Should *not* throw
t.fail(err)
}
return Promise.all(promises)
})
}

65
test/del-test.js Normal file
View File

@ -0,0 +1,65 @@
'use strict'
const { illegalKeys, assertPromise } = require('./util')
const traits = require('./traits')
let db
exports.setUp = function (test, testCommon) {
test('del() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('del() with illegal keys', function (t) {
t.plan(illegalKeys.length * 2)
for (const { name, key } of illegalKeys) {
db.del(key).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
}
})
}
exports.del = function (test, testCommon) {
test('simple del()', async function (t) {
await db.put('foo', 'bar')
t.is(await db.get('foo'), 'bar')
t.is(await assertPromise(db.del('foo')), undefined, 'void promise')
t.is(await db.get('foo'), undefined, 'not found')
})
test('del() on non-existent key', async function (t) {
for (const key of ['nope', Math.random()]) {
t.is(await assertPromise(db.del(key)), undefined, 'void promise')
}
})
traits.open('del()', testCommon, async function (t, db) {
let emitted = false
db.once('write', () => { emitted = true })
t.is(await db.del('foo'), undefined, 'void promise')
t.ok(emitted) // Not sure what the purpose of this test is
})
traits.closed('del()', testCommon, async function (t, db) {
return db.del('foo')
})
}
exports.tearDown = function (test, testCommon) {
test('del() teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.del(test, testCommon)
exports.tearDown(test, testCommon)
}

View File

@ -0,0 +1,240 @@
'use strict'
const { Buffer } = require('buffer')
const textEncoder = new TextEncoder()
exports.all = function (test, testCommon) {
if (!testCommon.supports.encodings.buffer) return
// NOTE: adapted from levelup
test('put() and get() with buffer value and buffer valueEncoding', async function (t) {
const db = testCommon.factory()
await db.open()
await db.put('test', testBuffer(), { valueEncoding: 'buffer' })
t.same(await db.get('test', { valueEncoding: 'buffer' }), testBuffer())
return db.close()
})
// NOTE: adapted from levelup
test('put() and get() with buffer value and buffer valueEncoding in factory', async function (t) {
const db = testCommon.factory({ valueEncoding: 'buffer' })
await db.open()
await db.put('test', testBuffer())
t.same(await db.get('test'), testBuffer())
return db.close()
})
// NOTE: adapted from levelup
test('put() and get() with buffer key and buffer keyEncoding', async function (t) {
const db = testCommon.factory()
await db.open()
await db.put(testBuffer(), 'test', { keyEncoding: 'buffer' })
t.same(await db.get(testBuffer(), { keyEncoding: 'buffer' }), 'test')
return db.close()
})
// NOTE: adapted from levelup
test('put() and get() with buffer key and utf8 keyEncoding', async function (t) {
const db = testCommon.factory()
await db.open()
await db.put(Buffer.from('foo🐄'), 'test', { keyEncoding: 'utf8' })
t.same(await db.get(Buffer.from('foo🐄'), { keyEncoding: 'utf8' }), 'test')
return db.close()
})
// NOTE: adapted from levelup
test('put() and get() with string value and buffer valueEncoding', async function (t) {
const db = testCommon.factory()
await db.open()
await db.put('test', 'foo🐄', { valueEncoding: 'buffer' })
t.same(await db.get('test', { valueEncoding: 'buffer' }), Buffer.from('foo🐄'))
t.same(await db.get('test', { valueEncoding: 'utf8' }), 'foo🐄')
return db.close()
})
// NOTE: adapted from memdown
test('put() as string, get() as buffer and vice versa', async function (t) {
const db = testCommon.factory()
await db.open()
const enc = { keyEncoding: 'buffer', valueEncoding: 'buffer' }
const [a, b] = ['🐄', '🐄 says moo']
const promise1 = db.put(a, a).then(async () => {
const value = await db.get(Buffer.from(a), enc)
t.same(value, Buffer.from(a), 'got buffer value')
})
const promise2 = db.put(Buffer.from(b), Buffer.from(b), enc).then(async () => {
const value = await db.get(b)
t.same(value, b, 'got string value')
})
await Promise.all([promise1, promise2])
return db.close()
})
// NOTE: adapted from memdown
test('put() stringifies input to buffer', async function (t) {
const db = testCommon.factory()
await db.open()
await db.put(1, 2)
const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
const entries = await it.all()
t.same(entries[0][0], Buffer.from('1'), 'key was stringified')
t.same(entries[0][1], Buffer.from('2'), 'value was stringified')
return db.close()
})
// NOTE: adapted from memdown
test('put() as string, iterate as buffer', async function (t) {
const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
await db.open()
await db.put('🐄', '🐄')
const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
const entries = await it.all()
t.same(entries, [[Buffer.from('🐄'), Buffer.from('🐄')]])
return db.close()
})
// NOTE: adapted from memdown
test('put() as buffer, iterate as string', async function (t) {
const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
await db.open()
await db.put(Buffer.from('🐄'), Buffer.from('🐄'))
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const entries = await it.all()
t.same(entries, [['🐄', '🐄']])
return db.close()
})
test('put() as view, iterate as view', async function (t) {
const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' })
const cow = textEncoder.encode('🐄')
await db.open()
await db.put(cow, cow)
const it = db.iterator()
const entries = await it.all()
const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array
const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow
t.same(entries, [[key, value]])
return db.close()
})
test('put() as string, iterate as view', async function (t) {
const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const cow = textEncoder.encode('🐄')
await db.open()
await db.put('🐄', '🐄')
const it = db.iterator({ keyEncoding: 'view', valueEncoding: 'view' })
const entries = await it.all()
const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array
const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow
t.same(entries, [[key, value]])
return db.close()
})
test('put() as view, iterate as string', async function (t) {
const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' })
const cow = textEncoder.encode('🐄')
await db.open()
await db.put(cow, cow)
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
const entries = await it.all()
t.same(entries, [['🐄', '🐄']])
return db.close()
})
// NOTE: adapted from levelup
test('batch() with multiple puts with buffer valueEncoding per batch', async function (t) {
const db = testCommon.factory()
await db.open()
await db.batch([
{ type: 'put', key: 'foo', value: testBuffer() },
{ type: 'put', key: 'bar', value: testBuffer() },
{ type: 'put', key: 'baz', value: 'abazvalue' }
], { valueEncoding: 'buffer' })
t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer())
t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer())
t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue'))
return db.close()
})
test('batch() with multiple puts with buffer valueEncoding per operation', async function (t) {
const db = testCommon.factory()
await db.open()
await db.batch([
{ type: 'put', key: 'foo', value: testBuffer(), valueEncoding: 'buffer' },
{ type: 'put', key: 'bar', value: testBuffer(), valueEncoding: 'buffer' },
{ type: 'put', key: 'baz', value: 'abazvalue', valueEncoding: 'buffer' }
])
t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer())
t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer())
t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue'))
return db.close()
})
// NOTE: adapted from encoding-down
test('batch() with buffer encoding in factory', async function (t) {
const operations = [{
type: 'put',
key: Buffer.from([1, 2, 3]),
value: Buffer.from([4, 5, 6])
}, {
type: 'put',
key: Buffer.from([7, 8, 9]),
value: Buffer.from([10, 11, 12])
}]
const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
await db.open()
await db.batch(operations)
t.same(await db.get(operations[0].key), operations[0].value)
t.same(await db.get(operations[1].key), operations[1].value)
return db.close()
})
for (const keyEncoding of ['buffer', 'view']) {
// NOTE: adapted from memdown
test(`storage is byte-aware (${keyEncoding} encoding)`, async function (t) {
const db = testCommon.factory({ keyEncoding })
await db.open()
const one = Buffer.from('80', 'hex')
const two = Buffer.from('c0', 'hex')
t.ok(two.toString() === one.toString(), 'would be equal when not byte-aware')
t.ok(two.compare(one) > 0, 'but greater when byte-aware')
await db.put(one, 'one')
t.is(await db.get(one), 'one', 'value one ok')
await db.put(two, 'two')
t.is(await db.get(one), 'one', 'value one did not change')
return db.close()
})
}
}
function testBuffer () {
return Buffer.from('0080c0ff', 'hex')
}

View File

@ -0,0 +1,86 @@
'use strict'
// NOTE: copied from levelup
exports.all = function (test, testCommon) {
for (const deferred of [false, true]) {
test(`custom encoding: simple-object values (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [
{ key: '0', value: 0 },
{ key: '1', value: 1 },
{ key: 'string', value: 'a string' },
{ key: 'true', value: true },
{ key: 'false', value: false }
])
})
test(`custom encoding: simple-object keys (deferred: ${deferred})`, async function (t) {
// Test keys that would be considered the same with default utf8 encoding.
// Because String([1]) === String(1).
return run(t, deferred, [
{ value: '0', key: [1] },
{ value: '1', key: 1 },
{ value: 'string', key: 'a string' },
{ value: 'true', key: true },
{ value: 'false', key: false }
])
})
test(`custom encoding: complex-object values (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [{
key: '0',
value: {
foo: 'bar',
bar: [1, 2, 3],
bang: { yes: true, no: false }
}
}])
})
test(`custom encoding: complex-object keys (deferred: ${deferred})`, async function (t) {
// Test keys that would be considered the same with default utf8 encoding.
// Because String({}) === String({}) === '[object Object]'.
return run(t, deferred, [{
value: '0',
key: {
foo: 'bar',
bar: [1, 2, 3],
bang: { yes: true, no: false }
}
}, {
value: '1',
key: {
foo: 'different',
bar: [1, 2, 3],
bang: { yes: true, no: false }
}
}])
})
}
async function run (t, deferred, entries) {
const customEncoding = {
encode: JSON.stringify,
decode: JSON.parse,
format: 'utf8',
type: 'custom'
}
const db = testCommon.factory({
keyEncoding: customEncoding,
valueEncoding: customEncoding
})
const operations = entries.map(entry => ({ type: 'put', ...entry }))
if (!deferred) await db.open()
await db.batch(operations)
await Promise.all(entries.map(testGet))
async function testGet (entry) {
t.same(await db.get(entry.key), entry.value)
}
return db.close()
}
}

View File

@ -0,0 +1,67 @@
'use strict'
let db
let keySequence = 0
const testKey = () => 'test' + (++keySequence)
exports.all = function (test, testCommon) {
test('decode error setup', async function (t) {
db = testCommon.factory()
return db.open()
})
// NOTE: adapted from encoding-down
test('decode error is wrapped by get() and getMany()', async function (t) {
t.plan(4)
const key = testKey()
const valueEncoding = {
encode: (v) => v,
decode: (v) => { throw new Error('decode error xyz') },
format: 'utf8'
}
await db.put(key, 'bar', { valueEncoding })
try {
await db.get(key, { valueEncoding })
} catch (err) {
t.is(err.code, 'LEVEL_DECODE_ERROR')
t.is(err.cause.message, 'decode error xyz')
}
try {
await db.getMany(['other-key', key], { valueEncoding })
} catch (err) {
t.is(err.code, 'LEVEL_DECODE_ERROR')
t.is(err.cause.message, 'decode error xyz')
}
})
// NOTE: adapted from encoding-down
test('get() and getMany() yield decode error if stored value is invalid', async function (t) {
t.plan(4)
const key = testKey()
await db.put(key, 'this {} is [] not : json', { valueEncoding: 'utf8' })
try {
await db.get(key, { valueEncoding: 'json' })
} catch (err) {
t.is(err.code, 'LEVEL_DECODE_ERROR')
t.is(err.cause.name, 'SyntaxError') // From JSON.parse()
}
try {
await db.getMany(['other-key', key], { valueEncoding: 'json' })
} catch (err) {
t.is(err.code, 'LEVEL_DECODE_ERROR')
t.is(err.cause.name, 'SyntaxError') // From JSON.parse()
}
})
test('decode error teardown', async function (t) {
return db.close()
})
}

View File

@ -0,0 +1,69 @@
'use strict'
// NOTE: copied from levelup
exports.all = function (test, testCommon) {
for (const deferred of [false, true]) {
test(`json encoding: simple-object values (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [
{ key: '0', value: 0 },
{ key: '1', value: 1 },
{ key: '2', value: 'a string' },
{ key: '3', value: true },
{ key: '4', value: false }
])
})
test(`json encoding: simple-object keys (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [
{ value: 'string', key: 'a string' },
{ value: '0', key: 0 },
{ value: '1', key: 1 },
{ value: 'false', key: false },
{ value: 'true', key: true }
])
})
test(`json encoding: complex-object values (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [{
key: '0',
value: {
foo: 'bar',
bar: [1, 2, 3],
bang: { yes: true, no: false }
}
}])
})
test(`json encoding: complex-object keys (deferred: ${deferred})`, async function (t) {
return run(t, deferred, [{
value: '0',
key: {
foo: 'bar',
bar: [1, 2, 3],
bang: { yes: true, no: false }
}
}])
})
}
async function run (t, deferred, entries) {
const db = testCommon.factory({ keyEncoding: 'json', valueEncoding: 'json' })
const operations = entries.map(entry => ({ type: 'put', ...entry }))
if (!deferred) await db.open()
await db.batch(operations)
await Promise.all([...entries.map(testGet), testIterator()])
return db.close()
async function testGet (entry) {
t.same(await db.get(entry.key), entry.value)
}
async function testIterator () {
const result = await db.iterator().all()
t.same(result, entries.map(kv => [kv.key, kv.value]))
}
}
}

119
test/encoding-test.js Normal file
View File

@ -0,0 +1,119 @@
'use strict'
let db
let keySequence = 0
const testKey = () => 'test' + (++keySequence)
// TODO: test encoding options on every method. This is largely
// covered (indirectly) by other tests, but a dedicated property-
// based test for each would be good to have.
exports.all = function (test, testCommon) {
test('encoding setup', async function (t) {
db = testCommon.factory()
return db.open()
})
// NOTE: adapted from encoding-down
test('encodings default to utf8', function (t) {
t.is(db.keyEncoding().commonName, 'utf8')
t.is(db.valueEncoding().commonName, 'utf8')
t.end()
})
test('can set encoding options in factory', async function (t) {
const dbs = []
for (const name of ['buffer', 'view', 'json']) {
if (!testCommon.supports.encodings[name]) continue
const db1 = testCommon.factory({ keyEncoding: name })
const db2 = testCommon.factory({ valueEncoding: name })
const db3 = testCommon.factory({ keyEncoding: name, valueEncoding: name })
t.is(db1.keyEncoding().commonName, name)
t.is(db1.keyEncoding(), db1.keyEncoding(name))
t.is(db1.valueEncoding().commonName, 'utf8')
t.is(db1.valueEncoding(), db1.valueEncoding('utf8'))
t.is(db2.keyEncoding().commonName, 'utf8')
t.is(db2.keyEncoding(), db2.keyEncoding('utf8'))
t.is(db2.valueEncoding().commonName, name)
t.is(db2.valueEncoding(), db2.valueEncoding(name))
t.is(db3.keyEncoding().commonName, name)
t.is(db3.keyEncoding(), db3.keyEncoding(name))
t.is(db3.valueEncoding().commonName, name)
t.is(db3.valueEncoding(), db3.valueEncoding(name))
dbs.push(db1, db2, db3)
}
await Promise.all(dbs.map(db => db.close()))
})
// NOTE: adapted from encoding-down
for (const deferred of [false, true]) {
test(`default utf8 encoding stringifies numbers (deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
if (!deferred) await db.open()
await db.put(1, 2)
t.is(await db.get(1), '2')
return db.close()
})
}
// NOTE: adapted from encoding-down
test('can decode from string to json', async function (t) {
const key = testKey()
const data = { thisis: 'json' }
await db.put(key, JSON.stringify(data), { valueEncoding: 'utf8' })
t.same(await db.get(key, { valueEncoding: 'json' }), data, 'got parsed object')
})
// NOTE: adapted from encoding-down
test('can decode from json to string', async function (t) {
const data = { thisis: 'json' }
const key = testKey()
await db.put(key, data, { valueEncoding: 'json' })
t.same(await db.get(key, { valueEncoding: 'utf8' }), JSON.stringify(data), 'got unparsed JSON string')
})
// NOTE: adapted from encoding-down
test('getMany() skips decoding not-found values', async function (t) {
t.plan(2)
const valueEncoding = {
encode: JSON.stringify,
decode (value) {
t.is(value, JSON.stringify(data))
return JSON.parse(value)
},
format: 'utf8'
}
const data = { beep: 'boop' }
const key = testKey()
await db.put(key, data, { valueEncoding })
t.same(await db.getMany([key, testKey()], { valueEncoding }), [data, undefined])
})
// NOTE: adapted from memdown
test('number keys with utf8 encoding', async function (t) {
const db = testCommon.factory()
const numbers = [-Infinity, 0, 12, 2, +Infinity]
await db.open()
await db.batch(numbers.map(key => ({ type: 'put', key, value: 'value' })))
const keys = await db.keys({ keyEncoding: 'utf8' }).all()
t.same(keys, numbers.map(String), 'sorts lexicographically')
return db.close()
})
test('encoding teardown', async function (t) {
return db.close()
})
}

132
test/events/write.js Normal file
View File

@ -0,0 +1,132 @@
'use strict'
module.exports = function (test, testCommon) {
for (const deferred of [false, true]) {
// Chained batch does not support deferred open
const batchMethods = deferred ? ['batch'] : ['batch', 'chained batch']
const allMethods = batchMethods.concat(['singular'])
for (const method of allMethods) {
// db.put() and db.del() do not support the sublevel option
for (const withSublevel of (method === 'singular' ? [false] : [false, true])) {
test(`db emits write event for ${method} put operation (deferred: ${deferred}, sublevel: ${withSublevel})`, async function (t) {
t.plan(1)
const db = testCommon.factory()
const sublevel = withSublevel ? db.sublevel('abc') : null
if (!deferred) {
await db.open()
if (withSublevel) await sublevel.open()
}
// Note: may return a transcoder encoding, which unfortunately makes the below
// assertions a little less precise (i.e. we can't compare output data). But
// in places where we expect encoded data, we can use strings (rather than
// numbers) as the input to encode(), which'll tell us that encoding did happen.
const dbEncoding = db.keyEncoding('utf8')
const subEncoding = withSublevel ? sublevel.keyEncoding('utf8') : null
db.on('write', function (ops) {
t.same(ops, [
{
type: 'put',
key: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : 456,
value: withSublevel ? subEncoding.encode('99') : 99,
keyEncoding: db.keyEncoding(withSublevel ? subEncoding.format : 'utf8'),
valueEncoding: db.valueEncoding(withSublevel ? subEncoding.format : 'utf8'),
encodedKey: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : dbEncoding.encode('456'),
encodedValue: (withSublevel ? subEncoding : dbEncoding).encode('99'),
custom: 123,
sublevel: null // Should be unset
}
], 'got write event')
})
switch (method) {
case 'batch':
await db.batch([{ type: 'put', key: 456, value: 99, custom: 123, sublevel }])
break
case 'chained batch':
await db.batch().put(456, 99, { custom: 123, sublevel }).write()
break
case 'singular':
// Does not support sublevel option
await db.put(456, 99, { custom: 123, sublevel })
break
}
return db.close()
})
test(`db emits write event for ${method} del operation (deferred: ${deferred}, sublevel: ${withSublevel})`, async function (t) {
t.plan(1)
const db = testCommon.factory()
const sublevel = withSublevel ? db.sublevel('abc') : null
if (!deferred) {
await db.open()
if (withSublevel) await sublevel.open()
}
// See notes above, in the put test
const dbEncoding = db.keyEncoding('utf8')
const subEncoding = withSublevel ? sublevel.keyEncoding('utf8') : null
db.on('write', function (ops) {
t.same(ops, [
{
type: 'del',
key: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : 456,
keyEncoding: db.keyEncoding(withSublevel ? subEncoding.format : 'utf8'),
encodedKey: withSublevel ? sublevel.prefixKey(subEncoding.encode('456'), subEncoding.format, true) : dbEncoding.encode('456'),
custom: 123,
sublevel: null // Should be unset
}
], 'got write event')
})
switch (method) {
case 'batch':
await db.batch([{ type: 'del', key: 456, custom: 123, sublevel }])
break
case 'chained batch':
await db.batch().del(456, { custom: 123, sublevel }).write()
break
case 'singular':
// Does not support sublevel option
await db.del(456, { custom: 123, sublevel })
break
}
return db.close()
})
}
}
for (const method of batchMethods) {
test(`db emits write event for multiple ${method} operations (deferred: ${deferred})`, async function (t) {
t.plan(1)
const db = testCommon.factory()
if (!deferred) await db.open()
db.on('write', function (ops) {
t.same(ops.map(op => op.key), ['a', 'b'], 'got multiple operations in one event')
})
switch (method) {
case 'batch':
await db.batch([{ type: 'put', key: 'a', value: 'foo' }, { type: 'del', key: 'b' }])
break
case 'chained batch':
await db.batch().put('a', 'foo').del('b').write()
break
}
return db.close()
})
}
}
}

36
test/factory-test.js Normal file
View File

@ -0,0 +1,36 @@
'use strict'
module.exports = function (test, testCommon) {
test('testCommon.factory() returns valid database', function (t) {
t.plan(6)
const db = testCommon.factory()
const kEvent = Symbol('event')
// Avoid instanceof, for levelup compatibility tests
t.is(typeof db, 'object', 'is an object')
t.isNot(db, null, 'is not null')
t.is(typeof db.open, 'function', 'has open() method')
t.is(typeof db.on, 'function', 'has on() method')
t.is(typeof db.emit, 'function', 'has emit() method')
db.once(kEvent, (v) => t.is(v, 'foo', 'got event'))
db.emit(kEvent, 'foo')
})
test('testCommon.factory() returns a unique database', async function (t) {
const db1 = testCommon.factory()
const db2 = testCommon.factory()
t.isNot(db1, db2, 'unique instances')
await db1.open()
await db2.open()
await db1.put('key', 'value')
const value = await db2.get('key')
t.is(value, undefined, 'db2 should be empty')
return Promise.all([db1.close(), db2.close()])
})
}

157
test/get-many-test.js Normal file
View File

@ -0,0 +1,157 @@
'use strict'
const { illegalKeys, assertPromise } = require('./util')
const traits = require('./traits')
let db
/**
* @param {import('tape')} test
*/
exports.setUp = function (test, testCommon) {
test('getMany() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
/**
* @param {import('tape')} test
*/
exports.args = function (test, testCommon) {
test('getMany() requires an array argument', function (t) {
t.plan(6)
db.getMany().catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
db.getMany('foo').catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
db.getMany('foo', {}).catch(function (err) {
t.is(err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
})
test('getMany() with illegal keys', function (t) {
t.plan(illegalKeys.length * 4)
for (const { name, key } of illegalKeys) {
db.getMany([key]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
db.getMany(['valid', key]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error (second key)')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (second key)')
})
}
})
}
/**
* @param {import('tape')} test
*/
exports.getMany = function (test, testCommon) {
test('simple getMany()', async function (t) {
await db.put('foo', 'bar')
t.same(await assertPromise(db.getMany(['foo'])), ['bar'])
t.same(await db.getMany(['foo'], {}), ['bar']) // same but with {}
t.same(await db.getMany(['foo'], { valueEncoding: 'utf8' }), ['bar'])
})
test('getMany() with multiple keys', async function (t) {
await db.put('beep', 'boop')
t.same(await db.getMany(['foo', 'beep']), ['bar', 'boop'])
t.same(await db.getMany(['beep', 'foo']), ['boop', 'bar'], 'maintains order of input keys')
})
test('empty getMany()', async function (t) {
t.same(await db.getMany([]), [])
const encodings = Object.keys(db.supports.encodings)
.filter(k => db.supports.encodings[k])
for (const valueEncoding of encodings) {
t.same(await db.getMany([], { valueEncoding }), [])
}
})
test('getMany() on non-existent keys', async function (t) {
t.same(await db.getMany(['nope', 'another']), [undefined, undefined])
t.same(await db.getMany(['beep', 'another']), ['boop', undefined])
t.same(await db.getMany(['nope', 'beep', Math.random()]), [undefined, 'boop', undefined])
const encodings = Object.keys(db.supports.encodings)
.filter(k => db.supports.encodings[k])
for (const valueEncoding of encodings) {
t.same(await db.getMany(['nope', 'another'], { valueEncoding }), [undefined, undefined])
}
})
test('simultaneous getMany()', async function (t) {
t.plan(20)
await db.put('hello', 'world')
const promises = []
for (let i = 0; i < 10; ++i) {
promises.push(db.getMany(['hello']).then(function (values) {
t.same(values, ['world'])
}))
}
for (let i = 0; i < 10; ++i) {
promises.push(db.getMany(['non-existent']).then(function (values) {
t.same(values, [undefined])
}))
}
return Promise.all(promises)
})
traits.open('getMany()', testCommon, async function (t, db) {
t.same(await assertPromise(db.getMany(['foo'])), [undefined])
})
traits.closed('getMany()', testCommon, async function (t, db) {
return db.getMany(['foo'])
})
// Also test empty array because it has a fast-path
traits.open('getMany() with empty array', testCommon, async function (t, db) {
t.same(await assertPromise(db.getMany([])), [])
})
traits.closed('getMany() with empty array', testCommon, async function (t, db) {
return db.getMany([])
})
}
/**
* @param {import('tape')} test
*/
exports.tearDown = function (test, testCommon) {
test('getMany() teardown', async function (t) {
return db.close()
})
}
/**
* @param {import('tape')} test
*/
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.getMany(test, testCommon)
exports.tearDown(test, testCommon)
}

83
test/get-test.js Normal file
View File

@ -0,0 +1,83 @@
'use strict'
const { illegalKeys, assertPromise } = require('./util')
const traits = require('./traits')
let db
exports.setUp = function (test, testCommon) {
test('get() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('get() with illegal keys', function (t) {
t.plan(illegalKeys.length * 2)
for (const { name, key } of illegalKeys) {
db.get(key).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
}
})
}
exports.get = function (test, testCommon) {
test('simple get()', async function (t) {
await db.put('foo', 'bar')
t.is(await assertPromise(db.get('foo')), 'bar')
t.is(await db.get('foo', {}), 'bar') // same but with {}
t.is(await db.get('foo', { valueEncoding: 'utf8' }), 'bar')
})
test('get() on non-existent key', async function (t) {
for (const key of ['non-existent', Math.random()]) {
t.is(await assertPromise(db.get(key)), undefined, 'not found')
}
})
test('simultaneous get()', async function (t) {
t.plan(20)
await db.put('hello', 'world')
const promises = []
for (let i = 0; i < 10; ++i) {
promises.push(db.get('hello').then((value) => {
t.is(value, 'world')
}))
}
for (let i = 0; i < 10; ++i) {
promises.push(db.get('non-existent').then((value) => {
t.is(value, undefined, 'not found')
}))
}
return Promise.all(promises)
})
traits.open('get()', testCommon, async function (t, db) {
t.is(await assertPromise(db.get('foo')), undefined, 'void promise')
})
traits.closed('get()', testCommon, async function (t, db) {
return db.get('foo')
})
}
exports.tearDown = function (test, testCommon) {
test('get() teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.get(test, testCommon)
exports.tearDown(test, testCommon)
}

144
test/has-many-test.js Normal file
View File

@ -0,0 +1,144 @@
'use strict'
const { illegalKeys } = require('./util')
const traits = require('./traits')
let db
/**
* @param {import('tape')} test
*/
exports.setUp = function (test, testCommon) {
test('hasMany() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
/**
* @param {import('tape')} test
*/
exports.args = function (test, testCommon) {
test('hasMany() requires an array argument', function (t) {
t.plan(6)
db.hasMany().catch(function (err) {
t.is(err && err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
db.hasMany('foo').catch(function (err) {
t.is(err && err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
db.hasMany('foo', {}).catch(function (err) {
t.is(err && err.name, 'TypeError')
t.is(err && err.message, "The first argument 'keys' must be an array")
})
})
test('hasMany() with illegal keys', function (t) {
t.plan(illegalKeys.length * 4)
for (const { name, key } of illegalKeys) {
db.hasMany([key]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
db.hasMany(['valid', key]).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error (second key)')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (second key)')
})
}
})
}
/**
* @param {import('tape')} test
*/
exports.hasMany = function (test, testCommon) {
test('simple hasMany()', async function (t) {
await db.put('foo', 'bar')
t.same(await db.hasMany(['foo']), [true])
t.same(await db.hasMany(['foo'], {}), [true]) // same but with {}
t.same(await db.hasMany(['beep']), [false])
await db.put('beep', 'boop')
t.same(await db.hasMany(['beep']), [true])
t.same(await db.hasMany(['foo', 'beep']), [true, true])
t.same(await db.hasMany(['aaa', 'beep']), [false, true])
t.same(await db.hasMany(['beep', 'aaa']), [true, false], 'maintains order of input keys')
})
test('empty hasMany()', async function (t) {
t.same(await db.hasMany([]), [])
const encodings = Object.keys(db.supports.encodings)
.filter(k => db.supports.encodings[k])
for (const valueEncoding of encodings) {
t.same(await db.hasMany([], { valueEncoding }), [])
}
})
test('simultaneous hasMany()', async function (t) {
t.plan(20)
await db.put('hello', 'world')
const promises = []
for (let i = 0; i < 10; ++i) {
promises.push(db.hasMany(['hello']).then(function (values) {
t.same(values, [true])
}))
}
for (let i = 0; i < 10; ++i) {
promises.push(db.hasMany(['non-existent']).then(function (values) {
t.same(values, [false])
}))
}
return Promise.all(promises)
})
traits.open('hasMany()', testCommon, async function (t, db) {
t.same(await db.hasMany(['foo']), [false])
})
traits.closed('hasMany()', testCommon, async function (t, db) {
return db.hasMany(['foo'])
})
// Also test empty array because it has a fast-path
traits.open('hasMany() with empty array', testCommon, async function (t, db) {
t.same(await db.hasMany([]), [])
})
traits.closed('hasMany() with empty array', testCommon, async function (t, db) {
return db.hasMany([])
})
}
/**
* @param {import('tape')} test
*/
exports.tearDown = function (test, testCommon) {
test('hasMany() teardown', async function (t) {
return db.close()
})
}
/**
* @param {import('tape')} test
*/
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.hasMany(test, testCommon)
exports.tearDown(test, testCommon)
}

81
test/has-test.js Normal file
View File

@ -0,0 +1,81 @@
'use strict'
const { illegalKeys } = require('./util')
const traits = require('./traits')
let db
exports.setUp = function (test, testCommon) {
test('has() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('has() with illegal keys', function (t) {
t.plan(illegalKeys.length * 2)
for (const { name, key } of illegalKeys) {
db.has(key).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
}
})
}
exports.has = function (test, testCommon) {
test('simple has()', async function (t) {
await db.put('foo', 'bar')
t.is(await db.has('foo'), true)
t.is(await db.has('foo', {}), true) // same but with {}
for (const key of ['non-existent', Math.random()]) {
t.is(await db.has(key), false, 'not found')
}
})
test('simultaneous has()', async function (t) {
t.plan(20)
await db.put('hello', 'world')
const promises = []
for (let i = 0; i < 10; ++i) {
promises.push(db.has('hello').then((value) => {
t.is(value, true, 'found')
}))
}
for (let i = 0; i < 10; ++i) {
promises.push(db.has('non-existent').then((value) => {
t.is(value, false, 'not found')
}))
}
return Promise.all(promises)
})
traits.open('has()', testCommon, async function (t, db) {
t.is(await db.has('foo'), false)
})
traits.closed('has()', testCommon, async function (t, db) {
return db.has('foo')
})
}
exports.tearDown = function (test, testCommon) {
test('has() teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.has(test, testCommon)
exports.tearDown(test, testCommon)
}

57
test/hooks/newsub.js Normal file
View File

@ -0,0 +1,57 @@
'use strict'
const shared = require('./shared')
module.exports = function (test, testCommon) {
shared(test, testCommon, 'newsub')
test('newsub hook function receives sublevel and default options', async function (t) {
t.plan(3)
const db = testCommon.factory()
let instance
db.hooks.newsub.add(function (sublevel, options) {
instance = sublevel
// Recursing is the main purpose of this hook
t.ok(sublevel.hooks, 'can access sublevel hooks')
t.same(options, { separator: '!' })
})
t.ok(db.sublevel('sub') === instance)
return db.close()
})
test('newsub hook function receives userland options', async function (t) {
t.plan(1)
const db = testCommon.factory()
db.hooks.newsub.add(function (sublevel, options) {
t.same(options, { separator: '!', userland: 123 })
})
db.sublevel('sub', { userland: 123 })
return db.close()
})
test('db wraps error from newsub hook function', async function (t) {
t.plan(2)
const db = testCommon.factory()
db.hooks.newsub.add(function (sublevel, options) {
throw new Error('test')
})
try {
db.sublevel('sub')
} catch (err) {
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.message, 'test')
}
return db.close()
})
}

229
test/hooks/postopen.js Normal file
View File

@ -0,0 +1,229 @@
'use strict'
const shared = require('./shared')
module.exports = function (test, testCommon) {
shared(test, testCommon, 'postopen')
test('postopen hook function is called before deferred operations and open event', async function (t) {
t.plan(5)
const db = testCommon.factory()
const order = []
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
order.push('postopen')
})
db.on('opening', function () {
t.is(db.status, 'opening')
order.push('opening')
})
db.defer(function () {
t.is(db.status, 'open')
order.push('undefer')
})
db.on('open', function () {
t.is(db.status, 'open')
order.push('open')
})
await db.open()
t.same(order, ['opening', 'postopen', 'undefer', 'open'])
return db.close()
})
test('postopen hook functions are called sequentially', async function (t) {
t.plan(1)
const db = testCommon.factory()
let waited = false
db.hooks.postopen.add(async function (options) {
return new Promise(function (resolve) {
setTimeout(function () {
waited = true
resolve()
}, 100)
})
})
db.hooks.postopen.add(async function (options) {
t.ok(waited)
})
await db.open()
return db.close()
})
test('postopen hook function receives options from constructor', async function (t) {
t.plan(1)
const db = testCommon.factory({ userland: 123 })
db.hooks.postopen.add(async function (options) {
t.same(options, {
createIfMissing: true,
errorIfExists: false,
userland: 123
})
})
await db.open()
return db.close()
})
test('postopen hook function receives options from open()', async function (t) {
t.plan(1)
const db = testCommon.factory()
db.hooks.postopen.add(async function (options) {
t.same(options, {
createIfMissing: true,
errorIfExists: false,
userland: 456
})
})
await db.open({ userland: 456 })
return db.close()
})
test('error from postopen hook function closes the db', async function (t) {
t.plan(4)
const db = testCommon.factory()
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
throw new Error('test')
})
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.message, 'test')
}
})
test('error from postopen hook function must be an error', async function (t) {
t.plan(5)
const db = testCommon.factory()
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
// eslint-disable-next-line prefer-promise-reject-errors
return Promise.reject(null)
})
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.name, 'TypeError')
t.is(err.cause.message, 'Promise rejection reason must be an Error, received null')
}
})
test('error from postopen hook function must be an error, but it can be cross-realm', async function (t) {
t.plan(5)
class FakeError {
get [Symbol.toStringTag] () {
return 'Error'
}
}
const fake = new FakeError()
const db = testCommon.factory()
t.is(Object.prototype.toString.call(fake), '[object Error]')
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
return Promise.reject(fake)
})
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause, fake)
}
})
test('errors from both postopen hook function and resource lock the db', async function (t) {
t.plan(9)
const db = testCommon.factory()
const resource = db.iterator()
resource.close = async function () {
throw new Error('error from resource')
}
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
throw new Error('error from hook')
})
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.name, 'CombinedError')
t.is(err.cause.message, 'error from hook; error from resource')
}
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_STATUS_LOCKED')
}
try {
await db.close()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_STATUS_LOCKED')
}
})
for (const method of ['open', 'close']) {
test(`postopen hook function that attempts to call ${method}() results in error`, async function (t) {
t.plan(5)
const db = testCommon.factory()
db.hooks.postopen.add(async function (options) {
t.is(db.status, 'open')
return db[method]()
})
db.on('open', function () {
t.fail('should not open')
})
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.code, 'LEVEL_STATUS_LOCKED')
t.is(err.cause.message, 'Database status is locked')
}
})
}
}

816
test/hooks/prewrite.js Normal file
View File

@ -0,0 +1,816 @@
'use strict'
const shared = require('./shared')
module.exports = function (test, testCommon) {
shared(test, testCommon, 'prewrite')
for (const deferred of [false, true]) {
for (const type of ['put', 'del']) {
for (const method of ['batch', 'chained batch', 'singular']) {
test(`prewrite hook function is called after open (deferred: ${deferred})`, async function (t) {
t.plan(1)
const db = testCommon.factory()
if (!deferred) await db.open()
db.hooks.prewrite.add(function (op, batch) {
t.is(db.status, 'open')
})
if (type === 'put') {
switch (method) {
case 'batch':
await db.batch([{ type: 'put', key: 'beep', value: 'boop' }])
break
case 'chained batch':
// Does not support deferred open
await db.open()
await db.batch().put('beep', 'boop').write()
break
case 'singular':
await db.put('beep', 'boop')
break
}
} else if (type === 'del') {
switch (method) {
case 'batch':
await db.batch([{ type: 'del', key: 'beep' }])
break
case 'chained batch':
// Does not support deferred open
await db.open()
await db.batch().del('beep').write()
break
case 'singular':
await db.del('beep')
break
}
}
return db.close()
})
}
}
}
test('prewrite hook function receives put op', async function (t) {
t.plan(3)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
t.same(op, {
type: 'put',
key: 'beep',
value: 'boop',
keyEncoding: db.keyEncoding('utf8'),
valueEncoding: db.valueEncoding('utf8')
})
})
await db.put('beep', 'boop')
await db.batch([{ type: 'put', key: 'beep', value: 'boop' }])
await db.batch().put('beep', 'boop').write()
return db.close()
})
test('prewrite hook function receives del op', async function (t) {
t.plan(3)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
t.same(op, {
type: 'del',
key: 'beep',
keyEncoding: db.keyEncoding('utf8')
})
})
await db.del('beep')
await db.batch([{ type: 'del', key: 'beep' }])
await db.batch().del('beep').write()
return db.close()
})
test('prewrite hook function receives put op with custom encodings and userland option', async function (t) {
t.plan(3)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
t.same(op, {
type: 'put',
key: 123, // Should not be JSON-encoded
value: 'boop',
keyEncoding: db.keyEncoding('json'),
valueEncoding: db.valueEncoding('json'),
userland: 456
})
})
await db.put(123, 'boop', { keyEncoding: 'json', valueEncoding: 'json', userland: 456 })
await db.batch([{ type: 'put', key: 123, value: 'boop', keyEncoding: 'json', valueEncoding: 'json', userland: 456 }])
await db.batch().put(123, 'boop', { keyEncoding: 'json', valueEncoding: 'json', userland: 456 }).write()
return db.close()
})
test('prewrite hook function receives del op with custom encodings and userland option', async function (t) {
t.plan(3)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
t.same(op, {
type: 'del',
key: 123, // Should not be JSON-encoded
keyEncoding: db.keyEncoding('json'),
userland: 456
})
})
await db.del(123, { keyEncoding: 'json', userland: 456 })
await db.batch([{ type: 'del', key: 123, keyEncoding: 'json', userland: 456 }])
await db.batch().del(123, { keyEncoding: 'json', userland: 456 }).write()
return db.close()
})
test('prewrite hook function can modify put operation', async function (t) {
t.plan(10 * 3)
const db = testCommon.factory({ keyEncoding: 'json', valueEncoding: 'utf8' })
db.hooks.prewrite.add(function (op, batch) {
t.is(op.keyEncoding, db.keyEncoding('json'))
t.is(op.valueEncoding, db.valueEncoding('utf8'))
op.key = '456'
op.value = { x: 1 }
// Flip the encodings
op.keyEncoding = 'utf8'
op.valueEncoding = 'json'
// Test adding a userland option
op.userland = 456
})
db.on('write', function (ops) {
t.is(ops.length, 1)
t.is(ops[0].key, '456')
t.same(ops[0].value, { x: 1 })
t.is(ops[0].keyEncoding, db.keyEncoding('utf8'))
t.is(ops[0].valueEncoding, db.valueEncoding('json'))
t.same(ops[0].encodedKey, db.keyEncoding('utf8').encode('456'))
t.same(ops[0].encodedValue, db.valueEncoding('json').encode({ x: 1 }))
t.is(ops[0].userland, 456)
})
await db.put(123, 'boop')
await db.batch([{ type: 'put', key: 123, value: 'boop' }])
await db.batch().put(123, 'boop').write()
return db.close()
})
test('prewrite hook function can modify del operation', async function (t) {
t.plan(6 * 3)
const db = testCommon.factory({ keyEncoding: 'json' })
db.hooks.prewrite.add(function (op, batch) {
t.is(op.keyEncoding, db.keyEncoding('json'))
op.key = '456'
op.keyEncoding = 'utf8'
// Test adding a userland option
op.userland = 456
})
db.on('write', function (ops) {
t.is(ops.length, 1)
t.is(ops[0].key, '456')
t.is(ops[0].keyEncoding, db.keyEncoding('utf8'))
t.same(ops[0].encodedKey, db.keyEncoding('utf8').encode('456'))
t.is(ops[0].userland, 456)
})
await db.del(123)
await db.batch([{ type: 'del', key: 123 }])
await db.batch().del(123).write()
return db.close()
})
test('second prewrite hook function sees modified operation of first', async function (t) {
t.plan(6 * 2)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
t.is(op.key, '1')
op.key = '2'
})
db.hooks.prewrite.add(function (op, batch) {
t.is(op.key, '2')
})
await db.put('1', 'boop')
await db.batch([{ type: 'put', key: '1', value: 'boop' }])
await db.batch().put('1', 'boop').write()
await db.del('1')
await db.batch([{ type: 'del', key: '1' }])
await db.batch().del('1').write()
return db.close()
})
test('prewrite hook function triggered by put can add put operation', async function (t) {
t.plan(3)
const db = testCommon.factory()
// Note: may return a transcoder encoding
const utf8 = db.keyEncoding('utf8')
const json = db.valueEncoding('json')
db.hooks.prewrite.add(function (op, batch) {
batch.add({
type: 'put',
key: 'from-hook',
value: { abc: 123 },
valueEncoding: 'json'
})
})
db.on('write', function (ops) {
t.same(ops, [
{
type: 'put',
key: 'beep',
value: 'boop',
keyEncoding: db.keyEncoding('utf8'),
valueEncoding: db.valueEncoding('utf8'),
encodedKey: utf8.encode('beep'),
encodedValue: utf8.encode('boop')
},
{
type: 'put',
key: 'from-hook',
value: { abc: 123 },
keyEncoding: db.keyEncoding('utf8'),
valueEncoding: db.valueEncoding('json'),
encodedKey: utf8.encode('from-hook'),
encodedValue: json.encode({ abc: 123 })
}
])
})
await db.put('beep', 'boop')
await db.batch([{ type: 'put', key: 'beep', value: 'boop' }])
await db.batch().put('beep', 'boop').write()
return db.close()
})
test('prewrite hook function triggered by del can add del operation', async function (t) {
t.plan(3)
const db = testCommon.factory()
// Note: may return a transcoder encoding
const utf8 = db.keyEncoding('utf8')
db.hooks.prewrite.add(function (op, batch) {
batch.add({ type: 'del', key: 'from-hook' })
})
db.on('write', function (ops) {
t.same(ops, [
{
type: 'del',
key: 'beep',
keyEncoding: db.keyEncoding('utf8'),
encodedKey: utf8.encode('beep')
},
{
type: 'del',
key: 'from-hook',
keyEncoding: db.keyEncoding('utf8'),
encodedKey: utf8.encode('from-hook')
}
])
})
await db.del('beep')
await db.batch([{ type: 'del', key: 'beep' }])
await db.batch().del('beep').write()
return db.close()
})
test('prewrite hook function can add operations with sublevel option', async function (t) {
t.plan(2 * 6)
const db = testCommon.factory()
const sublevel = db.sublevel('sub', { keyEncoding: 'json', valueEncoding: 'json' })
// Note: may return a transcoder encoding
const utf8 = db.keyEncoding('utf8')
db.hooks.prewrite.add(function (op, batch) {
batch.add({ type: 'put', key: 'from-hook-1', value: { x: 22 }, sublevel })
batch.add({ type: 'del', key: 'from-hook-2', sublevel })
})
db.on('write', function (ops) {
t.is(ops[0].key, 'from-input')
t.same(ops.slice(1), [
{
type: 'put',
key: utf8.encode('!sub!"from-hook-1"'),
value: utf8.encode('{"x":22}'),
keyEncoding: db.keyEncoding(sublevel.keyEncoding().format),
valueEncoding: db.valueEncoding(sublevel.valueEncoding().format),
encodedKey: utf8.encode('!sub!"from-hook-1"'),
encodedValue: utf8.encode('{"x":22}'),
sublevel: null // Should be unset
},
{
type: 'del',
key: utf8.encode('!sub!"from-hook-2"'),
keyEncoding: db.keyEncoding(sublevel.keyEncoding().format),
encodedKey: utf8.encode('!sub!"from-hook-2"'),
sublevel: null // Should be unset
}
])
})
await db.put('from-input', 'abc')
await db.batch([{ type: 'put', key: 'from-input', value: 'abc' }])
await db.batch().put('from-input', 'abc').write()
await db.del('from-input')
await db.batch([{ type: 'del', key: 'from-input' }])
await db.batch().del('from-input').write()
return db.close()
})
test('prewrite hook function can add operations with descendant sublevel option', async function (t) {
t.plan(20)
const db = testCommon.factory()
await db.open()
const a = db.sublevel('a')
const b = a.sublevel('b')
const c = b.sublevel('c')
// Note: may return a transcoder encoding
const utf8 = db.keyEncoding('utf8')
const put = async (db, key, opts) => {
const fn = function (op, batch) {
batch.add({ type: 'put', key, value: 'x', ...opts })
}
db.hooks.prewrite.add(fn)
try {
await db.put('0', '0')
} finally {
db.hooks.prewrite.delete(fn)
}
}
const del = async (db, key, opts) => {
const fn = function (op, batch) {
batch.add({ type: 'del', key, ...opts })
}
db.hooks.prewrite.add(fn)
try {
await db.del('0')
} finally {
db.hooks.prewrite.delete(fn)
}
}
// Note: not entirely a noop. Use of sublevel option triggers data to be encoded early
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('1'), 'got put 1'))
await put(db, '1', { sublevel: db })
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!2'), 'got put 2'))
await put(db, '2', { sublevel: a })
await put(a, '2', { sublevel: a }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!3'), 'got put 3'))
await put(db, '3', { sublevel: b })
await put(a, '3', { sublevel: b }) // Same
await put(b, '3', { sublevel: b }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!!c!4'), 'got put 4'))
await put(db, '4', { sublevel: c })
await put(a, '4', { sublevel: c }) // Same
await put(b, '4', { sublevel: c }) // Same
await put(c, '4', { sublevel: c }) // Same
// Test deletes
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('1'), 'got del 1'))
await del(db, '1', { sublevel: db })
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!2'), 'got del 2'))
await del(db, '2', { sublevel: a })
await del(a, '2', { sublevel: a }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!3'), 'got del 3'))
await del(db, '3', { sublevel: b })
await del(a, '3', { sublevel: b }) // Same
await del(b, '3', { sublevel: b }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[1].key, utf8.encode('!a!!b!!c!4'), 'got del 4'))
await del(db, '4', { sublevel: c })
await del(a, '4', { sublevel: c }) // Same
await del(b, '4', { sublevel: c }) // Same
await del(c, '4', { sublevel: c }) // Same
return db.close()
})
test('prewrite hook is triggered bottom-up for nested sublevels', async function (t) {
const db = testCommon.factory()
const a = db.sublevel('a')
const b = a.sublevel('b')
const order = []
const triggers = [
[['b', 'a', 'root'], () => b.put('a', 'a')],
[['b', 'a', 'root'], () => b.batch([{ type: 'put', key: 'a', value: 'a' }])],
[['b', 'a', 'root'], () => b.batch().put('a', 'a').write()],
[['b', 'a', 'root'], () => b.del('a')],
[['b', 'a', 'root'], () => b.batch([{ type: 'del', key: 'a' }])],
[['b', 'a', 'root'], () => b.batch().del('a').write()],
[['a', 'root'], () => a.put('a', 'a')],
[['a', 'root'], () => a.batch([{ type: 'put', key: 'a', value: 'a' }])],
[['a', 'root'], () => a.batch().put('a', 'a').write()],
[['a', 'root'], () => a.del('a')],
[['a', 'root'], () => a.batch([{ type: 'del', key: 'a' }])],
[['a', 'root'], () => a.batch().del('a').write()],
[['root'], () => db.put('a', 'a')],
[['root'], () => db.batch([{ type: 'put', key: 'a', value: 'a' }])],
[['root'], () => db.batch().put('a', 'a').write()],
[['root'], () => db.del('a')],
[['root'], () => db.batch([{ type: 'del', key: 'a' }])],
[['root'], () => db.batch().del('a').write()],
// The sublevel option should not trigger the prewrite hook
[['root'], () => db.put('a', 'a', { sublevel: a })],
[['root'], () => db.batch([{ type: 'put', key: 'a', value: 'a', sublevel: a }])],
[['root'], () => db.batch().put('a', 'a', { sublevel: a }).write()],
[['root'], () => db.del('a', { sublevel: a })],
[['root'], () => db.batch([{ type: 'del', key: 'a', sublevel: a }])],
[['root'], () => db.batch().del('a', { sublevel: a }).write()]
]
t.plan(triggers.length)
db.hooks.prewrite.add((op, batch) => { order.push('root') })
a.hooks.prewrite.add((op, batch) => { order.push('a') })
b.hooks.prewrite.add((op, batch) => { order.push('b') })
for (const [expectedOrder, trigger] of triggers) {
await trigger()
t.same(order.splice(0, order.length), expectedOrder)
}
return db.close()
})
test('db catches invalid operations added by prewrite hook function', async function (t) {
const db = testCommon.factory()
const errEncoding = {
name: 'test',
format: 'utf8',
encode () {
throw new Error()
},
decode () {
throw new Error()
}
}
const hookFunctions = [
(op, batch) => batch.add(),
(op, batch) => batch.add({}),
(op, batch) => batch.add({ type: 'del' }),
(op, batch) => batch.add({ type: 'del', key: null }),
(op, batch) => batch.add({ type: 'del', key: undefined }),
(op, batch) => batch.add({ type: 'put', key: 'a' }),
(op, batch) => batch.add({ type: 'put', key: 'a', value: null }),
(op, batch) => batch.add({ type: 'put', key: 'a', value: undefined }),
(op, batch) => batch.add({ type: 'nope', key: 'a', value: 'b' }),
(op, batch) => batch.add({ type: 'del', key: 'a', keyEncoding: errEncoding }),
(op, batch) => batch.add({ type: 'put', key: 'a', value: 'b', keyEncoding: errEncoding }),
(op, batch) => batch.add({ type: 'put', key: 'a', value: 'b', valueEncoding: errEncoding })
]
const triggers = [
() => db.put('beep', 'boop'),
() => db.batch([{ type: 'put', key: 'beep', value: 'boop' }]),
() => db.batch().put('beep', 'boop').write(),
() => db.del('beep'),
() => db.batch([{ type: 'del', key: 'beep' }]),
() => db.batch().del('beep').write()
]
t.plan(hookFunctions.length * triggers.length * 2)
db.on('write', function (ops) {
t.fail('should not write')
})
for (const trigger of triggers) {
for (const fn of hookFunctions) {
db.hooks.prewrite.add(fn)
try {
await trigger()
} catch (err) {
t.is(err.code, 'LEVEL_HOOK_ERROR')
}
db.hooks.prewrite.delete(fn)
t.is(db.hooks.prewrite.noop, true)
}
}
return db.close()
})
test('prewrite hook function is called once for every input operation', async function (t) {
t.plan(2)
const calls = []
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
calls.push(op.key)
})
await db.batch([{ type: 'del', key: '1' }, { type: 'put', key: '2', value: '123' }])
t.same(calls.splice(0, calls.length), ['1', '2'])
await db.batch().del('1').put('2', '123').write()
t.same(calls.splice(0, calls.length), ['1', '2'])
return db.close()
})
test('prewrite hook adds operations after input operations', async function (t) {
t.plan(2)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
if (op.key === 'input1') {
batch
.add({ type: 'del', key: 'hook1' })
.add({ type: 'del', key: 'hook2' })
.add({ type: 'put', key: 'hook3', value: 'foo' })
}
})
db.on('write', function (ops) {
t.same(ops.map(op => op.key), [
'input1', 'input2', 'hook1', 'hook2', 'hook3'
], 'order is correct')
})
await db.batch([{ type: 'del', key: 'input1' }, { type: 'put', key: 'input2', value: '123' }])
await db.batch().del('input1').put('input2', '123').write()
return db.close()
})
test('prewrite hook does not copy input options to added operations', async function (t) {
t.plan(6)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
batch.add({ type: 'put', key: 'from-hook-a', value: 'xyz' })
batch.add({ type: 'del', key: 'from-hook-b' })
})
db.on('write', function (ops) {
const relevant = ops.map(op => {
return {
key: op.key,
hasOption: 'userland' in op,
keyEncoding: op.keyEncoding.commonName
}
})
t.same(relevant, [
{
key: 'input-a',
keyEncoding: 'json',
hasOption: true
},
{
key: 'from-hook-a',
keyEncoding: 'utf8', // Should be the database default (2x)
hasOption: false
},
{
key: 'from-hook-b',
keyEncoding: 'utf8',
hasOption: false
}
])
})
await db.put('input-a', 'boop', { keyEncoding: 'json', userland: 123 })
await db.batch([{ type: 'put', key: 'input-a', value: 'boop', keyEncoding: 'json', userland: 123 }])
await db.batch().put('input-a', 'boop', { keyEncoding: 'json', userland: 123 }).write()
await db.del('input-a', { keyEncoding: 'json', userland: 123 })
await db.batch([{ type: 'del', key: 'input-a', keyEncoding: 'json', userland: 123 }])
await db.batch().del('input-a', { keyEncoding: 'json', userland: 123 }).write()
return db.close()
})
test('error thrown from prewrite hook function is catched', async function (t) {
t.plan(6 * 2)
const db = testCommon.factory()
db.hooks.prewrite.add(function (op, batch) {
throw new Error('test')
})
const verify = (err) => {
t.is(err.code, 'LEVEL_HOOK_ERROR')
t.is(err.cause.message, 'test')
}
await db.batch([{ type: 'del', key: '1' }]).catch(verify)
await db.batch([{ type: 'put', key: '1', value: '2' }]).catch(verify)
const batch1 = db.batch()
const batch2 = db.batch()
try { batch1.del('1') } catch (err) { verify(err) }
try { batch2.put('1', '2') } catch (err) { verify(err) }
await batch1.close()
await batch2.close()
await db.del('1').catch(verify)
await db.put('1', '2').catch(verify)
return db.close()
})
test('operations added by prewrite hook function count towards chained batch length', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
db.hooks.prewrite.add(function (op, batch) {
batch.add({ type: 'del', key: 'hook1' })
})
const batch = db.batch()
batch.del('input1')
t.is(batch.length, 2)
batch.put('input2', 'foo')
t.is(batch.length, 4)
await batch.close()
return db.close()
})
test('operations added by prewrite hook function can be cleared from chained batch', async function (t) {
t.plan(3)
const db = testCommon.factory()
await db.open()
db.hooks.prewrite.add(function (op, batch) {
batch.add({ type: 'put', key: 'x', value: 'y' })
})
const batch = db.batch()
batch.del('a')
t.is(batch.length, 2)
batch.clear()
t.is(batch.length, 0)
db.on('write', t.fail.bind(t))
await batch.write()
t.same(await db.keys().all(), [], 'did not write to db')
return db.close()
})
test('prewrite hook function is not called for earlier chained batch', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
const calls = []
const batchBefore = db.batch()
db.hooks.prewrite.add(function (op, batch) {
calls.push(op.key)
})
batchBefore.del('before')
t.same(calls, [])
const batchAfter = db.batch()
batchAfter.del('after')
t.same(calls, ['after'])
await Promise.all([batchBefore.close(), batchAfter.close()])
return db.close()
})
// See https://github.com/Level/abstract-level/issues/80
test('prewrite hook function can write to nondescendant sublevel', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
const textDecoder = new TextDecoder()
const books = db.sublevel('books', { valueEncoding: 'json' })
const index = db.sublevel('authors', {
// Use JSON, which normally doesn't make sense for keys but
// helps to assert that there's no double encoding happening.
keyEncoding: 'json'
})
db.on('write', (ops) => {
// Check that data is written to correct sublevels, specifically
// !authors!Hesse~12 rather than !books!!authors!Hesse~12.
t.same(ops.map(x => decode(x.key)), ['!books!12', '!authors!"Hesse~12"'])
// It's unfortunate DX but because the write is made via the sublevel, the
// format of keys depends on the supported encodings of db. For example on
// a MemoryLevel({ storeEncoding: 'buffer' }) the key will be a buffer.
function decode (key) {
return db.keyEncoding('utf8').format === 'utf8' ? key : textDecoder.decode(key)
}
})
books.on('write', (ops) => {
// Should not include the op of the index
t.same(ops.map(x => x.key), ['12'])
})
index.on('write', (ops) => {
t.fail('Did not expect an event on index')
})
books.hooks.prewrite.add(function (op, batch) {
if (op.type === 'put') {
batch.add({
type: 'put',
// Key structure is synthetic and not relevant to the test
key: op.value.author + '~' + op.key,
value: '',
sublevel: index
})
}
})
await books.put('12', { title: 'Siddhartha', author: 'Hesse' })
})
}

38
test/hooks/shared.js Normal file
View File

@ -0,0 +1,38 @@
'use strict'
module.exports = function (test, testCommon, hook) {
test(`can add and remove functions to/from ${hook} hook`, async function (t) {
const db = testCommon.factory()
const fn1 = function () {}
const fn2 = function () {}
t.is(db.hooks[hook].noop, true, 'is initially a noop')
t.is(typeof db.hooks[hook].run, 'function')
db.hooks[hook].add(fn1)
t.is(db.hooks[hook].noop, false, 'not a noop')
t.is(typeof db.hooks[hook].run, 'function')
db.hooks[hook].add(fn2)
t.is(db.hooks[hook].noop, false, 'not a noop')
t.is(typeof db.hooks[hook].run, 'function')
db.hooks[hook].delete(fn1)
t.is(db.hooks[hook].noop, false, 'not a noop')
t.is(typeof db.hooks[hook].run, 'function')
db.hooks[hook].delete(fn2)
t.is(db.hooks[hook].noop, true, 'is a noop again')
t.is(typeof db.hooks[hook].run, 'function')
for (const invalid of [null, undefined, 123]) {
t.throws(() => db.hooks[hook].add(invalid), (err) => err.name === 'TypeError')
t.throws(() => db.hooks[hook].delete(invalid), (err) => err.name === 'TypeError')
}
t.is(db.hooks[hook].noop, true, 'still a noop')
t.is(typeof db.hooks[hook].run, 'function')
return db.close()
})
}

82
test/index.js Normal file
View File

@ -0,0 +1,82 @@
'use strict'
const common = require('./common')
const kSublevels = Symbol('sublevels')
function suite (options) {
const testCommon = common(options)
const test = testCommon.test
require('./factory-test')(test, testCommon)
require('./manifest-test')(test, testCommon)
require('./open-test').all(test, testCommon)
if (testCommon.supports.createIfMissing) {
require('./open-create-if-missing-test').all(test, testCommon)
}
if (testCommon.supports.errorIfExists) {
require('./open-error-if-exists-test').all(test, testCommon)
}
require('./put-test').all(test, testCommon)
require('./get-test').all(test, testCommon)
require('./del-test').all(test, testCommon)
require('./put-get-del-test').all(test, testCommon)
require('./get-many-test').all(test, testCommon)
if (testCommon.supports.has) {
require('./has-test').all(test, testCommon)
require('./has-many-test').all(test, testCommon)
}
require('./batch-test').all(test, testCommon)
require('./chained-batch-test').all(test, testCommon)
require('./iterator-test').all(test, testCommon)
require('./iterator-range-test').all(test, testCommon)
require('./async-iterator-test').all(test, testCommon)
require('./iterator-seek-test').all(test, testCommon)
require('./deferred-open-test').all(test, testCommon)
require('./encoding-test').all(test, testCommon)
require('./encoding-json-test').all(test, testCommon)
require('./encoding-custom-test').all(test, testCommon)
require('./encoding-buffer-test').all(test, testCommon)
require('./encoding-decode-error-test').all(test, testCommon)
if (testCommon.supports.implicitSnapshots) {
require('./iterator-snapshot-test').all(test, testCommon)
} else {
require('./iterator-no-snapshot-test').all(test, testCommon)
}
if (testCommon.supports.explicitSnapshots) {
require('./iterator-explicit-snapshot-test').all(test, testCommon)
}
require('./clear-test').all(test, testCommon)
require('./clear-range-test').all(test, testCommon)
require('./sublevel-test').all(test, testCommon)
require('./events/write')(test, testCommon)
require('./hooks/postopen')(test, testCommon)
require('./hooks/newsub')(test, testCommon)
require('./hooks/prewrite')(test, testCommon)
// Run the same suite on a sublevel
if (!testCommon.internals[kSublevels]) {
const factory = testCommon.factory
suite({
...testCommon,
internals: { [kSublevels]: true },
factory (opts) {
return factory().sublevel('test', opts)
}
})
}
}
suite.common = common
module.exports = suite

View File

@ -0,0 +1,370 @@
'use strict'
const traits = require('./traits')
exports.traits = function (test, testCommon) {
// TODO: document (or fix...) that deferred open is not supported
traits.open('snapshot()', testCommon, { deferred: false }, async function (t, db) {
const snapshot = db.snapshot()
return snapshot.close()
})
traits.closed('snapshot()', testCommon, async function (t, db) {
db.snapshot()
})
}
exports.get = function (test, testCommon) {
const { testFresh, testClose } = testFactory(test, testCommon)
testFresh('get() changed entry from snapshot', async function (t, db) {
t.plan(3)
await db.put('abc', 'before')
const snapshot = db.snapshot()
await db.put('abc', 'after')
t.is(await db.get('abc'), 'after')
t.is(await db.get('abc', { snapshot }), 'before')
t.is(await db.get('other', { snapshot }), undefined)
return snapshot.close()
})
testFresh('get() deleted entry from snapshot', async function (t, db) {
t.plan(3)
await db.put('abc', 'before')
const snapshot = db.snapshot()
await db.del('abc')
t.is(await db.get('abc'), undefined)
t.is(await db.get('abc', { snapshot }), 'before')
t.is(await db.get('other', { snapshot }), undefined)
return snapshot.close()
})
testFresh('get() non-existent entry from snapshot', async function (t, db) {
t.plan(2)
const snapshot = db.snapshot()
await db.put('abc', 'after')
t.is(await db.get('abc'), 'after')
t.is(await db.get('abc', { snapshot }), undefined)
return snapshot.close()
})
testFresh('get() entries from multiple snapshots', async function (t, db) {
const snapshots = []
const iterations = 100
t.plan(iterations)
for (let i = 0; i < iterations; i++) {
await db.put('number', i.toString())
snapshots.push(db.snapshot())
}
for (let i = 0; i < iterations; i++) {
const snapshot = snapshots[i]
const value = i.toString()
t.is(await db.get('number', { snapshot }), value)
}
return Promise.all(snapshots.map(x => x.close()))
})
testFresh('get() entries from snapshot after closing another', async function (t, db) {
await db.put('abc', 'before')
const snapshot1 = db.snapshot()
const snapshot2 = db.snapshot()
await db.put('abc', 'after')
await snapshot1.close()
// Closing one snapshot should not affect the other
t.is(await db.get('abc', { snapshot: snapshot2 }), 'before')
return snapshot2.close()
})
testClose('get()', async function (db, snapshot) {
return db.get('xyz', { snapshot })
})
}
exports.getMany = function (test, testCommon) {
const { testFresh, testClose } = testFactory(test, testCommon)
testFresh('getMany() entries from snapshot', async function (t, db) {
t.plan(3)
await db.put('a', '1')
await db.put('b', '2')
await db.put('c', '3')
const snapshot = db.snapshot()
await db.put('a', 'abc')
await db.del('b')
await db.put('c', 'xyz')
t.same(await db.getMany(['a', 'b', 'c']), ['abc', undefined, 'xyz'])
t.same(await db.getMany(['a', 'b', 'c'], { snapshot }), ['1', '2', '3'])
t.same(await db.getMany(['a', 'b', 'c']), ['abc', undefined, 'xyz'], 'no side effects')
return snapshot.close()
})
testClose('getMany()', async function (db, snapshot) {
return db.getMany(['xyz'], { snapshot })
})
}
exports.iterator = function (test, testCommon) {
const { testFresh, testClose } = testFactory(test, testCommon)
testFresh('iterator(), keys(), values() with snapshot', async function (t, db) {
t.plan(10)
await db.put('a', '1')
await db.put('b', '2')
await db.put('c', '3')
const snapshot = db.snapshot()
await db.put('a', 'after')
await db.del('b')
await db.put('c', 'after')
await db.put('d', 'after')
t.same(
await db.iterator().all(),
[['a', 'after'], ['c', 'after'], ['d', 'after']],
'data was written'
)
for (const fn of [all, nextv, next]) {
t.same(await fn(db.iterator({ snapshot })), [['a', '1'], ['b', '2'], ['c', '3']], 'iterator')
t.same(await fn(db.keys({ snapshot })), ['a', 'b', 'c'], 'keys')
t.same(await fn(db.values({ snapshot })), ['1', '2', '3'], 'values')
}
async function all (iterator) {
return iterator.all()
}
async function nextv (iterator) {
try {
return iterator.nextv(10)
} finally {
await iterator.close()
}
}
async function next (iterator) {
try {
const entries = []
let entry
while ((entry = await iterator.next()) !== undefined) {
entries.push(entry)
}
return entries
} finally {
await iterator.close()
}
}
return snapshot.close()
})
// Test that every iterator type and read method checks snapshot state
for (const type of ['iterator', 'keys', 'values']) {
testClose(`${type}().all()`, async function (db, snapshot) {
return db[type]({ snapshot }).all()
})
testClose(`${type}().next()`, async function (db, snapshot) {
const iterator = db[type]({ snapshot })
try {
await iterator.next()
} finally {
iterator.close()
}
})
testClose(`${type}().nextv()`, async function (db, snapshot) {
const iterator = db[type]({ snapshot })
try {
await iterator.nextv(10)
} finally {
iterator.close()
}
})
}
}
exports.clear = function (test, testCommon) {
const { testFresh, testClose } = testFactory(test, testCommon)
testFresh('clear() entries from snapshot', async function (t, db) {
t.plan(2)
await db.put('a', 'xyz')
const snapshot = db.snapshot()
await db.put('b', 'xyz')
await db.clear({ snapshot })
t.same(await db.keys().all(), ['b'])
t.same(await db.keys({ snapshot }).all(), ['a'])
return snapshot.close()
})
testFresh('clear() entries from empty snapshot', async function (t, db) {
t.plan(2)
const snapshot = db.snapshot()
await db.put('a', 'xyz')
await db.clear({ snapshot })
t.same(await db.keys().all(), ['a'])
t.same(await db.keys({ snapshot }).all(), [])
return snapshot.close()
})
testClose('clear()', async function (db, snapshot) {
return db.clear({ snapshot })
})
}
exports.cleanup = function (test, testCommon) {
test('snapshot is closed on database close', async function (t) {
t.plan(1)
const db = testCommon.factory()
await db.open()
const snapshot = db.snapshot()
const promise = db.close()
try {
snapshot.ref()
} catch (err) {
t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN')
}
return promise
})
test('snapshot is closed along with iterator', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
await db.put('beep', 'boop')
// These resources have a potentially tricky relationship. If all is well,
// db.close() calls both snapshot.close() and iterator.close() in parallel,
// and snapshot.close() and iterator.close() wait on the read. Crucially,
// closing the snapshot only waits for individual operations on the iterator
// rather than for the entire iterator to be closed (which may never happen).
const snapshot = db.snapshot()
const iterator = db.iterator({ snapshot })
const readPromise = iterator.all()
const closePromise = db.close()
try {
snapshot.ref()
} catch (err) {
t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN', 'snapshot is closing')
}
try {
await iterator.next()
} catch (err) {
// Effectively also asserts that the LEVEL_ITERATOR_NOT_OPEN error takes
// precedence over LEVEL_SNAPSHOT_NOT_OPEN.
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', 'iterator is closing')
}
return Promise.all([readPromise, closePromise])
})
}
exports.dispose = function (test, testCommon) {
// Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management)
Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) {
const db = testCommon.factory()
await db.open()
const snapshot = db.snapshot()
await snapshot[Symbol.asyncDispose]()
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.traits(test, testCommon)
exports.get(test, testCommon)
exports.getMany(test, testCommon)
exports.iterator(test, testCommon)
exports.clear(test, testCommon)
exports.cleanup(test, testCommon)
exports.dispose(test, testCommon)
}
function testFactory (test, testCommon) {
const testFresh = function (name, run) {
test(name, async function (t) {
const db = testCommon.factory()
await db.open()
await run(t, db)
return db.close()
})
}
const testClose = function (name, run) {
testFresh(`${name} after closing snapshot`, async function (t, db) {
t.plan(1)
const snapshot = db.snapshot()
await snapshot.close()
try {
await run(db, snapshot)
} catch (err) {
t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN')
}
})
testFresh(`${name} while closing snapshot`, async function (t, db) {
t.plan(1)
const snapshot = db.snapshot()
const promise = snapshot.close()
try {
await run(db, snapshot)
} catch (err) {
t.is(err.code, 'LEVEL_SNAPSHOT_NOT_OPEN')
}
return promise
})
}
return { testFresh, testClose }
}

View File

@ -0,0 +1,49 @@
'use strict'
exports.noSnapshot = function (test, testCommon) {
const make = (run) => async function (t) {
const db = testCommon.factory()
const operations = [
{ type: 'put', key: 'a', value: 'a' },
{ type: 'put', key: 'b', value: 'b' },
{ type: 'put', key: 'c', value: 'c' }
]
await db.open()
await db.batch(operations)
// For this test it is important that we don't read eagerly.
// NOTE: highWaterMarkBytes is not an abstract option, but
// it is supported by classic-level and others. Also set the
// old & equivalent leveldown highWaterMark option for compat.
const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 })
await run(db)
await verify(t, it, db)
return db.close()
}
async function verify (t, it, db) {
const entries = await it.all()
const kv = entries.map(([key, value]) => key + value)
if (kv.length === 3) {
t.same(kv, ['aa', 'bb', 'cc'], 'maybe supports snapshots')
} else {
t.same(kv, ['aa', 'cc'], 'ignores keys that have been deleted in the mean time')
}
}
test('delete key after creating iterator', make(async function (db) {
return db.del('b')
}))
test('batch delete key after creating iterator', make(async function (db) {
return db.batch([{ type: 'del', key: 'b' }])
}))
}
exports.all = function (test, testCommon) {
exports.noSnapshot(test, testCommon)
}

277
test/iterator-range-test.js Normal file
View File

@ -0,0 +1,277 @@
'use strict'
let db
const data = (function () {
const d = []
let i = 0
let k
for (; i < 100; i++) {
k = (i < 10 ? '0' : '') + i
d.push({
key: k,
value: String(Math.random())
})
}
return d
}())
exports.setUp = function (test, testCommon) {
test('iterator() range setup', async function (t) {
db = testCommon.factory()
await db.open()
return db.batch(data.map(function ({ key, value }) {
return { type: 'put', key, value }
}))
})
}
exports.range = function (test, testCommon) {
function rangeTest (name, opts, expected) {
test('iterator() range with ' + name, async function (t) {
const entries = await db.iterator(opts).all()
t.is(entries.length, expected.length, 'correct number of entries')
t.same(entries, expected.map(o => [o.key, o.value]))
})
// Test the documented promise that in reverse mode,
// "the returned entries are the same, but in reverse".
if (!opts.reverse && !('limit' in opts)) {
const reverseOpts = { ...opts, reverse: true }
rangeTest(
name + ' (flipped)',
reverseOpts,
expected.slice().reverse()
)
}
}
rangeTest('no options', {}, data)
rangeTest('reverse=true', {
reverse: true
}, data.slice().reverse())
rangeTest('gte=00', {
gte: '00'
}, data)
rangeTest('gte=50', {
gte: '50'
}, data.slice(50))
rangeTest('lte=50 and reverse=true', {
lte: '50',
reverse: true
}, data.slice().reverse().slice(49))
rangeTest('gte=49.5 (midway)', {
gte: '49.5'
}, data.slice(50))
rangeTest('gte=49999 (midway)', {
gte: '49999'
}, data.slice(50))
rangeTest('lte=49.5 (midway) and reverse=true', {
lte: '49.5',
reverse: true
}, data.slice().reverse().slice(50))
rangeTest('lt=49.5 (midway) and reverse=true', {
lt: '49.5',
reverse: true
}, data.slice().reverse().slice(50))
rangeTest('lt=50 and reverse=true', {
lt: '50',
reverse: true
}, data.slice().reverse().slice(50))
rangeTest('lte=50', {
lte: '50'
}, data.slice(0, 51))
rangeTest('lte=50.5 (midway)', {
lte: '50.5'
}, data.slice(0, 51))
rangeTest('lte=50555 (midway)', {
lte: '50555'
}, data.slice(0, 51))
rangeTest('lt=50555 (midway)', {
lt: '50555'
}, data.slice(0, 51))
rangeTest('gte=50.5 (midway) and reverse=true', {
gte: '50.5',
reverse: true
}, data.slice().reverse().slice(0, 49))
rangeTest('gt=50.5 (midway) and reverse=true', {
gt: '50.5',
reverse: true
}, data.slice().reverse().slice(0, 49))
rangeTest('gt=50 and reverse=true', {
gt: '50',
reverse: true
}, data.slice().reverse().slice(0, 49))
// first key is actually '00' so it should avoid it
rangeTest('lte=0', {
lte: '0'
}, [])
// first key is actually '00' so it should avoid it
rangeTest('lt=0', {
lt: '0'
}, [])
rangeTest('gte=30 and lte=70', {
gte: '30',
lte: '70'
}, data.slice(30, 71))
// The gte and lte options should take precedence over gt and lt respectively.
rangeTest('gte=30 and lte=70 and gt=40 and lt=60', {
gte: '30',
lte: '70',
gt: '40',
lt: '60'
}, data.slice(30, 71))
// Also test the other way around: if gt and lt were to select a bigger range.
rangeTest('gte=30 and lte=70 and gt=20 and lt=80', {
gte: '30',
lte: '70',
gt: '20',
lt: '80'
}, data.slice(30, 71))
rangeTest('gt=29 and lt=71', {
gt: '29',
lt: '71'
}, data.slice(30, 71))
rangeTest('gte=30 and lte=70 and reverse=true', {
lte: '70',
gte: '30',
reverse: true
}, data.slice().reverse().slice(29, 70))
rangeTest('gt=29 and lt=71 and reverse=true', {
lt: '71',
gt: '29',
reverse: true
}, data.slice().reverse().slice(29, 70))
rangeTest('limit=20', {
limit: 20
}, data.slice(0, 20))
rangeTest('limit=20 and gte=20', {
limit: 20,
gte: '20'
}, data.slice(20, 40))
rangeTest('limit=20 and reverse=true', {
limit: 20,
reverse: true
}, data.slice().reverse().slice(0, 20))
rangeTest('limit=20 and lte=79 and reverse=true', {
limit: 20,
lte: '79',
reverse: true
}, data.slice().reverse().slice(20, 40))
// the default limit value is -1
rangeTest('limit=-1 (all)', {
limit: -1
}, data)
rangeTest('limit=0 (empty)', {
limit: 0
}, [])
rangeTest('lte after limit', {
limit: 20,
lte: '50'
}, data.slice(0, 20))
rangeTest('lte before limit', {
limit: 50,
lte: '19'
}, data.slice(0, 20))
rangeTest('gte after database end', {
gte: '9a'
}, [])
rangeTest('gt after database end', {
gt: '9a'
}, [])
rangeTest('lte after database end and reverse=true', {
lte: '9a',
reverse: true
}, data.slice().reverse())
rangeTest('lt after database end', {
lt: 'a'
}, data.slice())
rangeTest('lt at database end', {
lt: data[data.length - 1].key
}, data.slice(0, -1))
rangeTest('lte at database end', {
lte: data[data.length - 1].key
}, data.slice())
rangeTest('lt before database end', {
lt: data[data.length - 2].key
}, data.slice(0, -2))
rangeTest('lte before database end', {
lte: data[data.length - 2].key
}, data.slice(0, -1))
rangeTest('lte and gte after database and reverse=true', {
lte: '9b',
gte: '9a',
reverse: true
}, [])
rangeTest('lt and gt after database and reverse=true', {
lt: '9b',
gt: '9a',
reverse: true
}, [])
rangeTest('gt greater than lt', {
gt: '20',
lt: '10'
}, [])
rangeTest('gte greater than lte', {
gte: '20',
lte: '10'
}, [])
}
exports.tearDown = function (test, testCommon) {
test('iterator() range teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.range(test, testCommon)
exports.tearDown(test, testCommon)
}

335
test/iterator-seek-test.js Normal file
View File

@ -0,0 +1,335 @@
'use strict'
const { Buffer } = require('buffer')
const identity = (v) => v
exports.all = function (test, testCommon) {
exports.sequence(test, testCommon)
exports.seek(test, testCommon)
}
exports.sequence = function (test, testCommon) {
for (const deferred of [false, true]) {
for (const mode of ['iterator', 'keys', 'values']) {
test(`${mode}().seek() throws if next() has not completed (deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
if (!deferred) await db.open()
const it = db[mode]()
const promise = it.next()
t.throws(() => it.seek('two'), (err) => err.code === 'LEVEL_ITERATOR_BUSY')
await promise
await db.close()
})
test(`${mode}().seek() does not throw after close() (deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
if (!deferred) await db.open()
const it = db[mode]()
await it.close()
t.doesNotThrow(() => it.seek('two'))
await db.close()
})
}
}
}
exports.seek = function (test, testCommon) {
const testData = () => [
// Note that 'three' sorts before 'two'
{ type: 'put', key: 'one', value: '1' },
{ type: 'put', key: 'two', value: '2' },
{ type: 'put', key: 'three', value: '3' }
]
const bufferTestData = () => [
// Note that 'b9' sorts before 'c0'
{ type: 'put', key: Buffer.from('80', 'hex'), value: '1', keyEncoding: 'buffer' },
{ type: 'put', key: Buffer.from('c0', 'hex'), value: '2', keyEncoding: 'buffer' },
{ type: 'put', key: Buffer.from('b9', 'hex'), value: '3', keyEncoding: 'buffer' }
]
test('prepare byte-aware tests', function (t) {
const data = bufferTestData()
t.ok(data[0].key.toString() === data[1].key.toString(), 'would be equal when not byte-aware')
t.ok(data[0].key.compare(data[1].key) < 0, 'but less than when byte-aware')
t.end()
})
for (const mode of ['iterator', 'keys', 'values']) {
const mapEntry = mode === 'iterator' ? e => e : mode === 'keys' ? e => e[0] : e => e[1]
test(`${mode}().seek() to string target`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]()
it.seek('two')
t.same(await it.next(), mapEntry(['two', '2']), 'match')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
if (testCommon.supports.encodings.buffer) {
test(`${mode}().seek() to buffer target`, async function (t) {
// For this test to be meaningful it must use bytes outside the utf8 range
const data = bufferTestData()
const db = testCommon.factory()
await db.batch(data)
const it = db[mode]({ keyEncoding: 'buffer' })
// Seek to second key
it.seek(data[1].key)
t.same(await it.next(), mapEntry([data[1].key, '2']), 'match')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
}
test(`${mode}().seek() to target with custom encoding`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]()
const keyEncoding = { encode: () => 'two', decode: identity, format: 'utf8' }
it.seek('xyz', { keyEncoding })
t.same(await it.next(), mapEntry(['two', '2']), 'match')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
test(`${mode}().seek() on reverse iterator`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]({ reverse: true, limit: 1 })
// Should land on key equal to or smaller than 'three!' which is 'three'
it.seek('three!')
t.same(await it.next(), mapEntry(['three', '3']), 'match')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
test(`${mode}().seek() to out of range target`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]()
it.seek('zzz')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
test(`${mode}().seek() on reverse iterator to out of range target`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]({ reverse: true })
it.seek('zzz')
t.same(await it.next(), mapEntry(['two', '2']), 'match')
t.same(await it.next(), mapEntry(['three', '3']), 'match')
t.same(await it.next(), mapEntry(['one', '1']), 'match')
t.same(await it.next(), undefined, 'end of iterator')
return db.close()
})
test(`${mode}().seek() can be used to iterate twice`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const it = db[mode]()
t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match')
t.same(await it.nextv(10), [], 'end of iterator')
it.seek('one')
t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match again')
t.same(await it.nextv(10), [], 'end of iterator again')
await it.close()
return db.close()
})
test(`${mode}().seek() can be used to iterate twice, within limit`, async function (t) {
const db = testCommon.factory()
await db.batch(testData())
const limit = 4
const it = db[mode]({ limit })
t.same(await it.nextv(10), [['one', '1'], ['three', '3'], ['two', '2']].map(mapEntry), 'match')
t.same(await it.nextv(10), [], 'end of iterator')
it.seek('one')
t.same(await it.nextv(10), [['one', '1']].map(mapEntry), 'limit reached')
t.same(await it.nextv(10), [], 'end of iterator')
it.seek('one')
t.same(await it.nextv(10), [], 'does not reset after limit has been reached')
await it.close()
return db.close()
})
if (testCommon.supports.implicitSnapshots) {
for (const reverse of [false, true]) {
for (const deferred of [false, true]) {
test(`${mode}().seek() respects snapshot (reverse: ${reverse}, deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
if (!deferred) await db.open()
const it = db[mode]({ reverse })
// Add entry after having created the iterator (and its snapshot)
await db.put('a', 'a')
// Seeking should not create a new snapshot, which'd include the new entry
it.seek('a')
t.same(await it.next(), undefined)
return db.close()
})
}
}
}
test(`${mode}().seek() respects range`, async function (t) {
const db = testCommon.factory()
await db.open()
const ops = []
for (let i = 0; i < 10; i++) {
ops.push({ type: 'put', key: String(i), value: String(i) })
}
await db.batch(ops)
const promises = []
expect({ gt: '5' }, '4', undefined)
expect({ gt: '5' }, '5', undefined)
expect({ gt: '5' }, '6', '6')
expect({ gte: '5' }, '4', undefined)
expect({ gte: '5' }, '5', '5')
expect({ gte: '5' }, '6', '6')
// The gte option should take precedence over gt.
expect({ gte: '5', gt: '7' }, '4', undefined)
expect({ gte: '5', gt: '7' }, '5', '5')
expect({ gte: '5', gt: '7' }, '6', '6')
expect({ gte: '5', gt: '3' }, '4', undefined)
expect({ gte: '5', gt: '3' }, '5', '5')
expect({ gte: '5', gt: '3' }, '6', '6')
expect({ lt: '5' }, '4', '4')
expect({ lt: '5' }, '5', undefined)
expect({ lt: '5' }, '6', undefined)
expect({ lte: '5' }, '4', '4')
expect({ lte: '5' }, '5', '5')
expect({ lte: '5' }, '6', undefined)
// The lte option should take precedence over lt.
expect({ lte: '5', lt: '3' }, '4', '4')
expect({ lte: '5', lt: '3' }, '5', '5')
expect({ lte: '5', lt: '3' }, '6', undefined)
expect({ lte: '5', lt: '7' }, '4', '4')
expect({ lte: '5', lt: '7' }, '5', '5')
expect({ lte: '5', lt: '7' }, '6', undefined)
expect({ lt: '5', reverse: true }, '4', '4')
expect({ lt: '5', reverse: true }, '5', undefined)
expect({ lt: '5', reverse: true }, '6', undefined)
expect({ lte: '5', reverse: true }, '4', '4')
expect({ lte: '5', reverse: true }, '5', '5')
expect({ lte: '5', reverse: true }, '6', undefined)
expect({ gt: '5', reverse: true }, '4', undefined)
expect({ gt: '5', reverse: true }, '5', undefined)
expect({ gt: '5', reverse: true }, '6', '6')
expect({ gte: '5', reverse: true }, '4', undefined)
expect({ gte: '5', reverse: true }, '5', '5')
expect({ gte: '5', reverse: true }, '6', '6')
expect({ gt: '7', lt: '8' }, '7', undefined)
expect({ gte: '7', lt: '8' }, '7', '7')
expect({ gte: '7', lt: '8' }, '8', undefined)
expect({ gt: '7', lte: '8' }, '8', '8')
await Promise.all(promises)
return db.close()
function expect (range, target, expected) {
promises.push(async function () {
const ite = db[mode](range)
ite.seek(target)
const item = await ite.next()
const json = JSON.stringify(range)
const msg = 'seek(' + target + ') on ' + json + ' yields ' + expected
// Either a key or value depending on mode
t.is(mode === 'iterator' ? item[0] : item, expected, msg)
return ite.close()
})
}
})
// Tests the specific case where an iterator can (theoretically) tell that
// a seek() would be out of range by comparing the seek target against
// range options, before performing an actual seek. MemoryLevel works this
// way for example. Also test the same scenario without an explicit seek()
// which should have the same result.
for (const reverse of [false, true]) {
for (const seek of [true, false]) {
const props = `reverse = ${reverse}, seek = ${seek}`
const name = `${mode}() seek outside of range options (${props})`
const key = 'a'
test(name, async function (t) {
const db = testCommon.factory()
await db.open()
await db.put(key, '123')
// Pick ranges that exclude the key
const ranges = [
{ gt: 'x', reverse },
{ gte: 'x', reverse },
{ lt: '0', reverse },
{ lte: '0', reverse }
]
// Test each range
for (let i = 0; i < ranges.length; i++) {
const iterator = db[mode](ranges[i])
if (seek) iterator.seek(key)
t.same(await iterator.next(), undefined, `end of iterator ${i}`)
await iterator.close()
}
return db.close()
})
}
}
}
}

View File

@ -0,0 +1,56 @@
'use strict'
exports.snapshot = function (test, testCommon) {
const make = (run) => async function (t) {
const db = testCommon.factory()
await db.open()
await db.put('z', 'from snapshot')
// For this test it is important that we don't read eagerly.
// NOTE: highWaterMarkBytes is not an abstract option, but
// it is supported by classic-level and others. Also set the
// old & equivalent leveldown highWaterMark option for compat.
const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 })
await run(t, db, it)
await it.close()
return db.close()
}
test('delete key after snapshotting', make(async function (t, db, it) {
await db.del('z')
t.same(await it.next(), ['z', 'from snapshot'], 'correct entry')
}))
test('overwrite key after snapshotting', make(async function (t, db, it) {
await db.put('z', 'not from snapshot')
t.same(await it.next(), ['z', 'from snapshot'], 'correct entry')
}))
test('add key after snapshotting that sorts first', make(async function (t, db, it) {
await db.put('a', 'not from snapshot')
t.same(await it.next(), ['z', 'from snapshot'], 'correct entry')
}))
// NOTE: adapted from memdown
test('delete key after snapshotting, with more entries available', async function (t) {
const db = testCommon.factory()
await db.open()
await Promise.all([db.put('a', 'A'), db.put('b', 'B'), db.put('c', 'C')])
const iterator = db.iterator({ gte: 'a' })
t.same(await iterator.next(), ['a', 'A'])
await db.del('b')
t.same(await iterator.next(), ['b', 'B'])
t.same(await iterator.next(), ['c', 'C'])
await iterator.close()
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.snapshot(test, testCommon)
}

621
test/iterator-test.js Normal file
View File

@ -0,0 +1,621 @@
'use strict'
const { Buffer } = require('buffer')
const identity = (v) => v
let db
exports.setUp = function (test, testCommon) {
test('iterator setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
for (const mode of ['iterator', 'keys', 'values']) {
test(`${mode}() has db reference`, async function (t) {
const it = db[mode]()
// May return iterator of an underlying db, that's okay.
t.ok(it.db === db || it.db === (db.db || db._db || db))
await it.close()
})
test(`${mode}() has limit and count properties`, async function (t) {
const iterators = [db[mode]()]
t.is(iterators[0].limit, Infinity, 'defaults to infinite')
for (const limit of [-1, 0, 1, Infinity]) {
const it = db[mode]({ limit })
iterators.push(it)
t.is(it.limit, limit === -1 ? Infinity : limit, 'has limit property')
}
t.ok(iterators.every(it => it.count === 0), 'has count property')
await Promise.all(iterators.map(it => it.close()))
})
test(`${mode}().nextv() yields error if size is invalid`, async function (t) {
t.plan(4)
const it = db[mode]()
for (const args of [[], [NaN], ['1'], [2.5]]) {
try {
await it.nextv(...args)
} catch (err) {
t.is(err.message, "The first argument 'size' must be an integer")
}
}
await it.close()
})
}
}
exports.sequence = function (test, testCommon) {
for (const mode of ['iterator', 'keys', 'values']) {
test(`${mode}().close() is idempotent`, async function (t) {
const iterator = db[mode]()
await iterator.close()
await iterator.close()
return Promise.all([iterator.close(), iterator.close()])
})
for (const method of ['next', 'nextv', 'all']) {
const requiredArgs = method === 'nextv' ? [1] : []
test(`${mode}().${method}() after close() yields error`, async function (t) {
t.plan(1)
const iterator = db[mode]()
await iterator.close()
try {
await iterator[method](...requiredArgs)
} catch (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', 'correct message')
}
})
for (const otherMethod of ['next', 'nextv', 'all']) {
const otherRequiredArgs = otherMethod === 'nextv' ? [1] : []
test(`${mode}().${method}() while busy with ${otherMethod}() yields error`, async function (t) {
t.plan(1)
const iterator = db[mode]()
const promise = iterator[otherMethod](...otherRequiredArgs)
try {
await iterator[method](...requiredArgs)
} catch (err) {
t.is(err.code, 'LEVEL_ITERATOR_BUSY')
}
await promise
return iterator.close()
})
}
for (const deferred of [false, true]) {
test(`${mode}().${method}() during close() yields error (deferred: ${deferred})`, async function (t) {
t.plan(2)
const db = testCommon.factory()
if (!deferred) await db.open()
const it = db[mode]()
// The first call *may* succeed, because it was scheduled before close(). The
// default implementations of nextv() and all() fallback to next*() and thus
// make multiple calls, so they're allowed to fail.
let promise = it[method](...requiredArgs).then(() => {
t.pass('Optionally succeeded')
}, (err) => {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
})
// The second call *must* fail, because it was scheduled after close()
promise = promise.then(() => {
return it[method](...requiredArgs).then(() => {
t.fail('Expected an error')
}, (err) => {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
})
})
await Promise.all([it.close(), promise])
return db.close()
})
}
// 1) At the moment, we can only be sure that signals are supported if the iterator is deferred
if (globalThis.AbortController) {
test(`${mode}().${method}() with aborted signal yields error (deferred)`, async function (t) {
t.plan(3)
const db = testCommon.factory()
const ac = new globalThis.AbortController()
const it = db[mode]({ signal: ac.signal })
t.is(db.status, 'opening', 'is deferred')
ac.abort()
try {
await it[method](...requiredArgs)
} catch (err) {
t.is(err.code, 'LEVEL_ABORTED')
t.is(err.name, 'AbortError')
}
await it.close()
return db.close()
})
}
// 2) Unless the implementation opts-in
if (globalThis.AbortController && testCommon.supports.signals && testCommon.supports.signals.iterators) {
test(`${mode}().${method}() with signal yields error when aborted`, async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
await db.batch().put('a', 'a').put('b', 'b').write()
const ac = new globalThis.AbortController()
const it = db[mode]({ signal: ac.signal })
const promise = it[method](...requiredArgs)
ac.abort()
try {
await promise
} catch (err) {
t.is(err.code, 'LEVEL_ABORTED')
t.is(err.name, 'AbortError')
}
await it.close()
return db.close()
})
test(`${mode}().${method}() with non-aborted signal`, async function (t) {
const db = testCommon.factory()
await db.open()
await db.batch().put('a', 'a').put('b', 'b').write()
const ac = new globalThis.AbortController()
const it = db[mode]({ signal: ac.signal })
// We're merely testing that this does not throw. And implicitly testing (through
// coverage) that abort listeners are removed. An implementation might choose to
// periodically check signal.aborted instead of using an abort listener, so we
// can't directly assert that cleanup indeed happens.
await it[method](...requiredArgs)
await it.close()
return db.close()
})
}
}
}
}
exports.iterator = function (test, testCommon) {
test('iterator data setup', function (t) {
return db.batch([
{ type: 'put', key: 'foobatch1', value: 'bar1' },
{ type: 'put', key: 'foobatch2', value: 'bar2' },
{ type: 'put', key: 'foobatch3', value: 'bar3' }
])
})
test('simple iterator().next()', async function (t) {
const iterator = db.iterator()
t.same(await iterator.next(), ['foobatch1', 'bar1'])
t.same(await iterator.next(), ['foobatch2', 'bar2'])
t.same(await iterator.next(), ['foobatch3', 'bar3'])
t.is(await iterator.next(), undefined)
return iterator.close()
})
// NOTE: adapted from leveldown
test('iterator().next() with values: false', async function (t) {
const it = db.iterator({ values: false })
t.same(await it.next(), ['foobatch1', undefined])
t.same(await it.next(), ['foobatch2', undefined])
t.same(await it.next(), ['foobatch3', undefined])
t.is(await it.next(), undefined)
return it.close()
})
// NOTE: adapted from leveldown
test('iterator().next() with keys: false', async function (t) {
const it = db.iterator({ keys: false })
t.same(await it.next(), [undefined, 'bar1'])
t.same(await it.next(), [undefined, 'bar2'])
t.same(await it.next(), [undefined, 'bar3'])
t.is(await it.next(), undefined)
return it.close()
})
test('keys().next()', async function (t) {
const it = db.keys()
t.is(await it.next(), 'foobatch1')
t.is(await it.next(), 'foobatch2')
t.is(await it.next(), 'foobatch3')
t.is(await it.next(), undefined)
return it.close()
})
test('values().next()', async function (t) {
const it = db.values()
t.is(await it.next(), 'bar1')
t.is(await it.next(), 'bar2')
t.is(await it.next(), 'bar3')
t.is(await it.next(), undefined)
return it.close()
})
for (const mode of ['iterator', 'keys', 'values']) {
const mapEntry = e => mode === 'iterator' ? e : mode === 'keys' ? e[0] : e[1]
test(`${mode}().nextv()`, async function (t) {
const it = db[mode]()
t.same(await it.nextv(1), [['foobatch1', 'bar1']].map(mapEntry))
t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch3', 'bar3']].map(mapEntry))
t.same(await it.nextv(2), [])
await it.close()
})
test(`${mode}().nextv() in reverse`, async function (t) {
const it = db[mode]({ reverse: true })
t.same(await it.nextv(1), [['foobatch3', 'bar3']].map(mapEntry))
t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch1', 'bar1']].map(mapEntry))
t.same(await it.nextv(2), [])
await it.close()
})
test(`${mode}().nextv() has soft minimum of 1`, async function (t) {
const it = db[mode]()
t.same(await it.nextv(0), [['foobatch1', 'bar1']].map(mapEntry))
t.same(await it.nextv(0), [['foobatch2', 'bar2']].map(mapEntry))
t.same(await it.nextv(0, {}), [['foobatch3', 'bar3']].map(mapEntry))
t.same(await it.nextv(0), [])
await it.close()
})
test(`${mode}().nextv() requesting more than available`, async function (t) {
const it = db[mode]()
t.same(await it.nextv(10), [
['foobatch1', 'bar1'],
['foobatch2', 'bar2'],
['foobatch3', 'bar3']
].map(mapEntry))
t.same(await it.nextv(10), [])
await it.close()
})
test(`${mode}().nextv() honors limit`, async function (t) {
const it = db[mode]({ limit: 2 })
t.same(await it.nextv(10), [['foobatch1', 'bar1'], ['foobatch2', 'bar2']].map(mapEntry))
t.same(await it.nextv(10), [])
await it.close()
})
test(`${mode}().nextv() honors limit and size`, async function (t) {
const it = db[mode]({ limit: 2 })
t.same(await it.nextv(1), [['foobatch1', 'bar1']].map(mapEntry))
t.same(await it.nextv(10), [['foobatch2', 'bar2']].map(mapEntry))
t.same(await it.nextv(10), [])
await it.close()
})
test(`${mode}().nextv() honors limit in reverse`, async function (t) {
const it = db[mode]({ limit: 2, reverse: true })
t.same(await it.nextv(10), [['foobatch3', 'bar3'], ['foobatch2', 'bar2']].map(mapEntry))
t.same(await it.nextv(10), [])
await it.close()
})
test(`${mode}().nextv() honors limit and size in reverse`, async function (t) {
const it = db[mode]({ limit: 2, reverse: true })
t.same(await it.nextv(1), [['foobatch3', 'bar3']].map(mapEntry))
t.same(await it.nextv(10), [['foobatch2', 'bar2']].map(mapEntry))
t.same(await it.nextv(10), [])
await it.close()
})
test(`${mode}().all()`, async function (t) {
t.same(await db[mode]().all(), [
['foobatch1', 'bar1'],
['foobatch2', 'bar2'],
['foobatch3', 'bar3']
].map(mapEntry))
t.same(await db[mode]().all({}), [
['foobatch1', 'bar1'],
['foobatch2', 'bar2'],
['foobatch3', 'bar3']
].map(mapEntry))
})
test(`${mode}().all() with keys: false`, async function (t) {
// keys option should be ignored on db.keys() and db.values()
t.same(await db[mode]({ keys: false }).all(), [
[mode === 'iterator' ? undefined : 'foobatch1', 'bar1'],
[mode === 'iterator' ? undefined : 'foobatch2', 'bar2'],
[mode === 'iterator' ? undefined : 'foobatch3', 'bar3']
].map(mapEntry))
})
test(`${mode}().all() with values: false`, async function (t) {
// values option should be ignored on db.keys() and db.values()
t.same(await db[mode]({ values: false }).all(), [
['foobatch1', mode === 'iterator' ? undefined : 'bar1'],
['foobatch2', mode === 'iterator' ? undefined : 'bar2'],
['foobatch3', mode === 'iterator' ? undefined : 'bar3']
].map(mapEntry))
})
test(`${mode}().all() in reverse`, async function (t) {
t.same(await db[mode]({ reverse: true }).all(), [
['foobatch3', 'bar3'],
['foobatch2', 'bar2'],
['foobatch1', 'bar1']
].map(mapEntry))
})
test(`${mode}().all() honors limit`, async function (t) {
t.same(await db[mode]({ limit: 2 }).all(), [
['foobatch1', 'bar1'],
['foobatch2', 'bar2']
].map(mapEntry))
const it = db[mode]({ limit: 2 })
t.same(await it.next(), mapEntry(['foobatch1', 'bar1']))
t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry))
})
test(`${mode}().all() honors limit in reverse`, async function (t) {
t.same(await db[mode]({ limit: 2, reverse: true }).all(), [
['foobatch3', 'bar3'],
['foobatch2', 'bar2']
].map(mapEntry))
const it = db[mode]({ limit: 2, reverse: true })
t.same(await it.next(), mapEntry(['foobatch3', 'bar3']))
t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry))
})
}
// NOTE: adapted from memdown
test('iterator() sorts lexicographically', async function (t) {
const db = testCommon.factory()
await db.open()
// Write in unsorted order with multiple operations
await db.put('f', 'F')
await db.put('a', 'A')
await db.put('~', '~')
await db.put('e', 'E')
await db.put('🐄', '🐄')
await db.batch([
{ type: 'put', key: 'd', value: 'D' },
{ type: 'put', key: 'b', value: 'B' },
{ type: 'put', key: 'ff', value: 'FF' },
{ type: 'put', key: 'a🐄', value: 'A🐄' }
])
await db.batch([
{ type: 'put', key: '', value: 'empty' },
{ type: 'put', key: '2', value: '2' },
{ type: 'put', key: '12', value: '12' },
{ type: 'put', key: '\t', value: '\t' }
])
t.same(await db.iterator().all(), [
['', 'empty'],
['\t', '\t'],
['12', '12'],
['2', '2'],
['a', 'A'],
['a🐄', 'A🐄'],
['b', 'B'],
['d', 'D'],
['e', 'E'],
['f', 'F'],
['ff', 'FF'],
['~', '~'],
['🐄', '🐄']
])
t.same(await db.iterator({ lte: '' }).all(), [
['', 'empty']
])
return db.close()
})
for (const keyEncoding of ['buffer', 'view']) {
if (!testCommon.supports.encodings[keyEncoding]) continue
test(`iterators have byte order (${keyEncoding} encoding)`, async function (t) {
const db = testCommon.factory({ keyEncoding })
await db.open()
const ctor = keyEncoding === 'buffer' ? Buffer : Uint8Array
const bytes = [2, 11, 1]
const keys = bytes.map(b => ctor.from([b]))
const values = bytes.map(b => String(b))
await db.batch(keys.map((key, i) => ({ type: 'put', key, value: values[i] })))
t.same((await db.keys().all()).map(k => k[0]), [1, 2, 11], 'order of keys() is ok')
t.same((await db.iterator().all()).map(e => e[0][0]), [1, 2, 11], 'order of iterator() is ok')
t.same(await db.values().all(), ['1', '2', '11'], 'order of values() is ok')
return db.close()
})
// NOTE: adapted from memdown and level-js
test(`iterator() with byte range (${keyEncoding} encoding)`, async function (t) {
const db = testCommon.factory({ keyEncoding })
await db.open()
await db.put(Uint8Array.from([0x0]), '0')
await db.put(Uint8Array.from([128]), '128')
await db.put(Uint8Array.from([160]), '160')
await db.put(Uint8Array.from([192]), '192')
const collect = async (range) => {
const entries = await db.iterator(range).all()
t.ok(entries.every(e => e[0] instanceof Uint8Array)) // True for both encodings
t.ok(entries.every(e => e[1] === String(e[0][0])))
return entries.map(e => e[0][0])
}
t.same(await collect({ gt: Uint8Array.from([255]) }), [])
t.same(await collect({ gt: Uint8Array.from([192]) }), [])
t.same(await collect({ gt: Uint8Array.from([160]) }), [192])
t.same(await collect({ gt: Uint8Array.from([128]) }), [160, 192])
t.same(await collect({ gt: Uint8Array.from([0x0]) }), [128, 160, 192])
t.same(await collect({ gt: Uint8Array.from([]) }), [0x0, 128, 160, 192])
t.same(await collect({ lt: Uint8Array.from([255]) }), [0x0, 128, 160, 192])
t.same(await collect({ lt: Uint8Array.from([192]) }), [0x0, 128, 160])
t.same(await collect({ lt: Uint8Array.from([160]) }), [0x0, 128])
t.same(await collect({ lt: Uint8Array.from([128]) }), [0x0])
t.same(await collect({ lt: Uint8Array.from([0x0]) }), [])
t.same(await collect({ lt: Uint8Array.from([]) }), [])
t.same(await collect({ gte: Uint8Array.from([255]) }), [])
t.same(await collect({ gte: Uint8Array.from([192]) }), [192])
t.same(await collect({ gte: Uint8Array.from([160]) }), [160, 192])
t.same(await collect({ gte: Uint8Array.from([128]) }), [128, 160, 192])
t.same(await collect({ gte: Uint8Array.from([0x0]) }), [0x0, 128, 160, 192])
t.same(await collect({ gte: Uint8Array.from([]) }), [0x0, 128, 160, 192])
t.same(await collect({ lte: Uint8Array.from([255]) }), [0x0, 128, 160, 192])
t.same(await collect({ lte: Uint8Array.from([192]) }), [0x0, 128, 160, 192])
t.same(await collect({ lte: Uint8Array.from([160]) }), [0x0, 128, 160])
t.same(await collect({ lte: Uint8Array.from([128]) }), [0x0, 128])
t.same(await collect({ lte: Uint8Array.from([0x0]) }), [0x0])
t.same(await collect({ lte: Uint8Array.from([]) }), [])
return db.close()
})
}
}
exports.decode = function (test, testCommon) {
for (const deferred of [false, true]) {
for (const mode of ['iterator', 'keys', 'values']) {
for (const method of ['next', 'nextv', 'all']) {
const requiredArgs = method === 'nextv' ? [1] : []
for (const encodingOption of ['keyEncoding', 'valueEncoding']) {
if (mode === 'keys' && encodingOption === 'valueEncoding') continue
if (mode === 'values' && encodingOption === 'keyEncoding') continue
// NOTE: adapted from encoding-down
test(`${mode}().${method}() catches decoding error from ${encodingOption} (deferred: ${deferred})`, async function (t) {
t.plan(4)
const encoding = {
format: 'utf8',
decode: function (x) {
t.is(x, encodingOption === 'keyEncoding' ? 'testKey' : 'testValue')
throw new Error('from encoding')
},
encode: identity
}
const db = testCommon.factory()
await db.put('testKey', 'testValue')
if (deferred) {
await db.close()
db.open().then(t.pass.bind(t))
} else {
t.pass('non-deferred')
}
const it = db[mode]({ [encodingOption]: encoding })
try {
await it[method](...requiredArgs)
} catch (err) {
t.is(err.code, 'LEVEL_DECODE_ERROR')
t.is(err.cause && err.cause.message, 'from encoding')
}
return db.close()
})
}
}
}
}
}
exports.tearDown = function (test, testCommon) {
test('iterator teardown', async function (t) {
return db.close()
})
}
exports.dispose = function (test, testCommon) {
// Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management)
Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) {
const db = testCommon.factory()
await db.open()
const iterator = db.iterator()
await iterator[Symbol.asyncDispose]()
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.sequence(test, testCommon)
exports.iterator(test, testCommon)
exports.decode(test, testCommon)
exports.tearDown(test, testCommon)
exports.dispose(test, testCommon)
}

21
test/manifest-test.js Normal file
View File

@ -0,0 +1,21 @@
'use strict'
const suite = require('level-supports/test')
module.exports = function (test, testCommon) {
suite(test, testCommon)
test('manifest has expected properties', async function (t) {
const db = testCommon.factory()
t.is(db.supports.deferredOpen, true)
testCommon.supports = db.supports
t.ok(testCommon.supports, 'can be accessed via testCommon')
t.ok(db.supports.encodings.utf8, 'supports utf8')
t.ok(db.supports.encodings.json, 'supports json')
return db.close()
})
}

View File

@ -0,0 +1,39 @@
'use strict'
exports.createIfMissing = function (test, testCommon) {
test('open() with createIfMissing: false', async function (t) {
t.plan(2)
const db = testCommon.factory()
try {
await db.open({ createIfMissing: false })
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.ok(/does not exist/.test(err.cause.message), 'error is about dir not existing')
}
// Should be a noop
return db.close()
})
test('open() with createIfMissing: false via constructor', async function (t) {
t.plan(2)
const db = testCommon.factory({ createIfMissing: false })
try {
await db.open()
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.ok(/does not exist/.test(err.cause.message), 'error is about dir not existing')
}
// Should be a noop
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.createIfMissing(test, testCommon)
}

View File

@ -0,0 +1,25 @@
'use strict'
exports.errorIfExists = function (test, testCommon) {
test('open() with errorIfExists: true', async function (t) {
t.plan(2)
const db = testCommon.factory()
await db.open()
await db.close()
try {
await db.open({ createIfMissing: false, errorIfExists: true })
} catch (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.ok(/exists/.test(err.cause.message), 'error is about already existing')
}
// Should be a noop
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.errorIfExists(test, testCommon)
}

263
test/open-test.js Normal file
View File

@ -0,0 +1,263 @@
'use strict'
exports.open = function (test, testCommon) {
test('open() and close(), no options', async function (t) {
const db = testCommon.factory()
t.is(db.status, 'opening')
const promise1 = db.open()
t.is(db.status, 'opening')
await promise1
t.is(db.status, 'open')
const promise2 = db.close()
t.is(db.status, 'closing')
await promise2
t.is(db.status, 'closed')
})
test('open() and close(), with empty options', async function (t) {
const db = testCommon.factory()
await db.open({})
return db.close()
})
test('open(), close() and open()', async function (t) {
const db = testCommon.factory()
await db.open()
t.is(db.status, 'open')
await db.close()
t.is(db.status, 'closed')
await db.open()
t.is(db.status, 'open')
return db.close()
})
test('open() and close() in same tick', function (t) {
t.plan(5)
const db = testCommon.factory()
const order = []
db.open().then(function () {
order.push('A')
t.is(db.status, 'open', 'is open')
})
t.is(db.status, 'opening', 'is opening')
// This eventually wins from the open() call
db.close().then(function () {
order.push('B')
t.same(order, ['open event', 'A', 'closed event', 'B'], 'order is correct')
t.is(db.status, 'closed', 'is closed')
})
// But open() is still in progress
t.is(db.status, 'opening', 'is still opening')
db.on('open', () => { order.push('open event') })
db.on('closed', () => { order.push('closed event') })
})
test('open(), close() and open() in same tick', function (t) {
t.plan(8)
const db = testCommon.factory()
const order = []
db.open().then(function () {
order.push('A')
t.is(db.status, 'open', 'is open')
})
t.is(db.status, 'opening', 'is opening')
// This wins from the open() call
db.close().then(function () {
order.push('B')
t.is(db.status, 'closed', 'is closed')
})
t.is(db.status, 'opening', 'is still opening')
// This wins from the close() call
db.open().then(function () {
order.push('C')
t.same(order, ['open event', 'A', 'closed event', 'B', 'open event', 'C'], 'callback order is the same as call order')
t.is(db.status, 'open', 'is open')
db.close().then(() => t.pass('done'))
})
db.on('closed', () => { order.push('closed event') })
db.on('open', () => { order.push('open event') })
t.is(db.status, 'opening', 'is still opening')
})
test('open() if already open (sequential)', async function (t) {
t.plan(3)
const db = testCommon.factory()
await db.open()
t.is(db.status, 'open', 'is open')
const promise = db.open()
t.is(db.status, 'open', 'not reopening')
db.on('open', t.fail.bind(t))
await promise
t.is(db.status, 'open', 'is open')
return db.close()
})
test('open() if already opening (parallel)', function (t) {
t.plan(4)
const db = testCommon.factory()
let called = false
db.open().then(function () {
called = true
t.is(db.status, 'open')
})
db.open().then(function () {
t.is(db.status, 'open')
t.ok(called)
db.close(() => t.pass('done'))
})
t.is(db.status, 'opening')
})
test('close() if already closed', async function (t) {
t.plan(3)
const db = testCommon.factory()
await db.open()
await db.close()
t.is(db.status, 'closed', 'is closed')
const promise = db.close()
t.is(db.status, 'closed', 'is closed', 'not reclosing')
db.on('closed', t.fail.bind(t))
await promise
t.is(db.status, 'closed', 'still closed')
})
test('close() if new', function (t) {
t.plan(4)
const db = testCommon.factory()
t.is(db.status, 'opening', 'status ok')
db.close().then(function () {
t.is(db.status, 'closed', 'status ok')
})
// This behaves differently in abstract-level v1: status remains 'opening' because
// the db let's opening finish (or start, really) and only then closes the db.
t.is(db.status, 'closing', 'status ok')
if (!db.supports.deferredOpen) {
t.pass('skip')
db.on('closed', t.fail.bind(t, 'should not emit closed'))
} else {
db.on('closed', t.pass.bind(t, 'got closed event'))
}
})
for (const event of ['open', 'opening']) {
test(`close() on ${event} event`, function (t) {
t.plan(3)
const db = testCommon.factory()
const order = []
db.on(event, function () {
order.push(`${event} event`)
// This eventually wins from the in-progress open() call
db.close().then(function () {
order.push('B')
t.same(order, [`${event} event`, 'A', 'closed event', 'B'], 'order is correct')
t.is(db.status, 'closed', 'is closed')
}, t.fail.bind(t))
})
db.open().then(function () {
order.push('A')
t.is(db.status, 'open', 'is open')
}, t.fail.bind(t))
db.on('closed', () => { order.push('closed event') })
})
}
for (const event of ['closed', 'closing']) {
test(`open() on ${event} event`, function (t) {
t.plan(3)
const db = testCommon.factory()
const order = []
db.on(event, function () {
order.push(`${event} event`)
// This eventually wins from the in-progress close() call
db.open().then(function () {
order.push('B')
t.same(order, [`${event} event`, 'A', 'open event', 'B'], 'order is correct')
t.is(db.status, 'open', 'is open')
}, t.fail.bind(t))
})
db.close().then(function () {
order.push('A')
t.is(db.status, 'closed', 'is closed')
}, t.fail.bind(t))
db.on('open', () => { order.push('open event') })
})
}
test('passive open()', async function (t) {
t.plan(1)
const db = testCommon.factory()
await db.open({ passive: true }) // OK, already opening
await db.close()
await db.open({ passive: true }).catch(err => {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
})
await db.open()
await db.open({ passive: true }) // OK, already open
return db.close()
})
test('passive option is ignored if set in constructor options', async function (t) {
const db = testCommon.factory({ passive: true })
await new Promise((resolve) => db.once('open', resolve))
return db.close()
})
// Can't use the syntax yet (https://github.com/tc39/proposal-explicit-resource-management)
Symbol.asyncDispose && test('Symbol.asyncDispose', async function (t) {
const db = testCommon.factory()
await db.open()
await db[Symbol.asyncDispose]()
t.is(db.status, 'closed')
})
}
exports.all = function (test, testCommon) {
exports.open(test, testCommon)
}

86
test/put-get-del-test.js Normal file
View File

@ -0,0 +1,86 @@
'use strict'
const { Buffer } = require('buffer')
let db
function makeTest (test, type, key, value, expectedValue) {
const stringValue = arguments.length === 5 ? expectedValue : value.toString()
test('put(), get(), del() with ' + type, async function (t) {
await db.put(key, value)
t.is((await db.get(key)).toString(), stringValue)
await db.del(key)
t.is(await db.get(key), undefined, 'not found')
})
}
exports.setUp = function (test, testCommon) {
test('put(), get(), del() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.nonErrorKeys = function (test, testCommon) {
// valid falsey keys
makeTest(test, '`0` key', 0, 'foo 0')
makeTest(test, 'empty string key', 0, 'foo')
// standard String key
makeTest(
test
, 'long String key'
, 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!'
, 'foo'
)
if (testCommon.supports.encodings.buffer) {
makeTest(test, 'Buffer key', Buffer.from('0080c0ff', 'hex'), 'foo')
makeTest(test, 'empty Buffer key', Buffer.alloc(0), 'foo')
}
// non-empty Array as a value
makeTest(test, 'Array value', 'foo', [1, 2, 3, 4])
}
exports.nonErrorValues = function (test, testCommon) {
// valid falsey values
makeTest(test, '`false` value', 'foo false', false)
makeTest(test, '`0` value', 'foo 0', 0)
makeTest(test, '`NaN` value', 'foo NaN', NaN)
// all of the following result in an empty-string value:
makeTest(test, 'empty String value', 'foo', '', '')
makeTest(test, 'empty Buffer value', 'foo', Buffer.alloc(0), '')
makeTest(test, 'empty Array value', 'foo', [], '')
// String value
makeTest(
test
, 'long String value'
, 'foo'
, 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!'
)
// Buffer value
if (testCommon.supports.encodings.buffer) {
makeTest(test, 'Buffer value', 'foo', Buffer.from('0080c0ff', 'hex'))
}
// non-empty Array as a key
makeTest(test, 'Array key', [1, 2, 3, 4], 'foo')
}
exports.tearDown = function (test, testCommon) {
test('put(), get(), del() teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.nonErrorKeys(test, testCommon)
exports.nonErrorValues(test, testCommon)
exports.tearDown(test, testCommon)
}

70
test/put-test.js Normal file
View File

@ -0,0 +1,70 @@
'use strict'
const { illegalKeys, illegalValues, assertPromise } = require('./util')
const traits = require('./traits')
let db
exports.setUp = function (test, testCommon) {
test('put() setup', async function (t) {
db = testCommon.factory()
return db.open()
})
}
exports.args = function (test, testCommon) {
test('put() with illegal keys', function (t) {
t.plan(illegalKeys.length * 2)
for (const { name, key } of illegalKeys) {
db.put(key, 'value').catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code')
})
}
})
test('put() with illegal values', function (t) {
t.plan(illegalValues.length * 2)
for (const { name, value } of illegalValues) {
db.put('key', value).catch(function (err) {
t.ok(err instanceof Error, name + ' - is Error')
t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code')
})
}
})
}
exports.put = function (test, testCommon) {
test('simple put()', async function (t) {
t.is(await assertPromise(db.put('foo', 'bar')), undefined, 'void promise')
t.is(await db.get('foo'), 'bar')
await db.put('foo', 'new')
t.is(await db.get('foo'), 'new', 'value was overwritten')
await db.put('bar', 'foo', {}) // same but with {}
t.is(await db.get('bar'), 'foo')
})
traits.open('put()', testCommon, async function (t, db) {
t.is(await assertPromise(db.put('foo', 'bar')), undefined, 'void promise')
t.is(await db.get('foo'), 'bar', 'value is ok')
})
traits.closed('put()', testCommon, async function (t, db) {
return db.put('foo', 'bar')
})
}
exports.tearDown = function (test, testCommon) {
test('put() teardown', async function (t) {
return db.close()
})
}
exports.all = function (test, testCommon) {
exports.setUp(test, testCommon)
exports.args(test, testCommon)
exports.put(test, testCommon)
exports.tearDown(test, testCommon)
}

905
test/self.js Normal file
View File

@ -0,0 +1,905 @@
'use strict'
const test = require('tape')
const isBuffer = require('is-buffer')
const { Buffer } = require('buffer')
const { AbstractLevel, AbstractChainedBatch } = require('..')
const { MinimalLevel, createSpy } = require('./util')
const getRangeOptions = require('../lib/range-options')
const testCommon = require('./common')({
test,
factory () {
return new AbstractLevel({ encodings: { utf8: true } })
}
})
const rangeOptions = ['gt', 'gte', 'lt', 'lte']
function implement (ctor, methods) {
class Test extends ctor {}
for (const k in methods) {
Test.prototype[k] = methods[k]
}
return Test
}
/**
* Extensibility
*/
test('test core extensibility', function (t) {
const Test = implement(AbstractLevel)
const test = new Test({ encodings: { utf8: true } })
t.is(test.status, 'opening', 'status is opening')
t.end()
})
test('manifest is required', function (t) {
t.plan(3 * 2)
const Test = implement(AbstractLevel)
for (const args of [[], [null], [123]]) {
try {
// eslint-disable-next-line no-new
new Test(...args)
} catch (err) {
t.is(err.name, 'TypeError')
t.is(err.message, "The first argument 'manifest' must be an object")
}
}
})
test('test open() extensibility when new', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { createIfMissing: true, errorIfExists: false }
const Test = implement(AbstractLevel, { _open: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
t.is(spy.callCount, 1, 'got _open() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _open() was correct')
t.is(spy.getCall(0).args.length, 1, 'got one argument')
t.same(spy.getCall(0).args[0], expectedOptions, 'got default options argument')
const test2 = new Test({ encodings: { utf8: true } })
await test2.open({ options: 1 })
expectedOptions.options = 1
t.is(spy.callCount, 2, 'got _open() call')
t.is(spy.getCall(1).thisValue, test2, '`this` on _open() was correct')
t.is(spy.getCall(1).args.length, 1, 'got one argument')
t.same(spy.getCall(1).args[0], expectedOptions, 'got expected options argument')
})
test('test open() extensibility when open', function (t) {
t.plan(2)
const spy = createSpy(async function () {})
const Test = implement(AbstractLevel, { _open: spy })
const test = new Test({ encodings: { utf8: true } })
test.once('open', function () {
t.is(spy.callCount, 1, 'got _open() call')
test.open().then(function () {
t.is(spy.callCount, 1, 'did not get second _open() call')
})
})
})
test('test opening explicitly gives a chance to capture an error', async function (t) {
t.plan(3)
const spy = createSpy(async function (options) { throw new Error('_open error') })
const Test = implement(AbstractLevel, { _open: spy })
const test = new Test({ encodings: { utf8: true } })
try {
await test.open()
} catch (err) {
t.is(spy.callCount, 1, 'got _open() call')
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause.message, '_open error')
}
})
test('test constructor options are forwarded to open()', async function (t) {
const spy = createSpy(async function (options) { })
const Test = implement(AbstractLevel, { _open: spy })
const test = new Test({ encodings: { utf8: true } }, {
passive: true,
keyEncoding: 'json',
valueEncoding: 'json',
createIfMissing: false,
foo: 123
})
await test.open()
t.is(spy.callCount, 1, 'got _open() call')
t.same(spy.getCall(0).args[0], {
foo: 123,
createIfMissing: false,
errorIfExists: false
}, 'does not forward passive, keyEncoding and valueEncoding options')
})
test('test close() extensibility when open', async function (t) {
const spy = createSpy(async function () {})
const Test = implement(AbstractLevel, { _close: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.close()
t.is(spy.callCount, 1, 'got _close() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _close() was correct')
t.is(spy.getCall(0).args.length, 0, 'got 0 arguments')
})
test('test close() extensibility when new', async function (t) {
const spy = createSpy(async function () {})
const Test = implement(AbstractLevel, { _close: spy })
const test = new Test({ encodings: { utf8: true } })
await test.close()
t.is(spy.callCount, 0, 'not called because _open was never called')
})
test('test open(), close(), open() with twice failed open', function (t) {
t.plan(7)
const db = testCommon.factory()
const order = []
let opens = 0
db.on('open', t.fail.bind(t))
db.on('closed', t.fail.bind(t))
db._open = async function (options) {
t.pass('called')
throw new Error('test' + (++opens))
}
db._close = async function () {
t.fail('should not be called')
}
db.open().then(t.fail.bind(t), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause && err.cause.message, 'test1')
order.push('A')
})
db.close().then(function () {
order.push('B')
})
db.open().then(t.fail.bind(t), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause && err.cause.message, 'test2')
order.push('C')
t.same(order, ['A', 'B', 'C'], 'order is ok')
})
})
test('test open(), close(), open() with first failed open', function (t) {
t.plan(6)
const db = testCommon.factory()
const order = []
let opens = 0
db.on('open', () => { order.push('open event') })
db.on('closed', t.fail.bind(t, 'should not emit closed'))
db._open = async function (options) {
t.pass('called')
if (!opens++) throw new Error('test')
}
db.open().then(t.fail.bind(t, 'should not open'), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(db.status, 'closed')
order.push('A')
})
db.close().then(function () {
// Status is actually 'opening' due to the parallel open() call, which starts
// its work after close() finished but before this then() handler. Can't be helped.
// t.is(db.status, 'closed')
order.push('B')
})
db.open().then(function () {
t.is(db.status, 'open')
order.push('C')
t.same(order, ['A', 'B', 'open event', 'C'], 'order is ok')
})
})
test('test open(), close(), open() with second failed open', function (t) {
t.plan(8)
const db = testCommon.factory()
const order = []
let opens = 0
db.on('open', () => order.push('open event'))
db.on('closed', () => order.push('closed event'))
db._open = async function (options) {
t.pass('called')
if (opens++) throw new Error('test')
}
db.open().then(function () {
t.is(db.status, 'open')
order.push('A')
})
db.close().then(function () {
t.is(db.status, 'closed')
order.push('B')
})
db.open().then(t.fail.bind(t, 'should not open'), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause.message, 'test')
t.is(db.status, 'closed')
order.push('C')
t.same(order, ['open event', 'A', 'closed event', 'B', 'C'], 'order is ok')
})
})
test('open() error is combined with resource error', async function (t) {
t.plan(4)
const db = testCommon.factory()
const resource = db.iterator()
db._open = async function (options) {
throw new Error('error from open')
}
resource.close = async function () {
throw new Error('error from resource')
}
try {
await db.open()
} catch (err) {
t.is(db.status, 'closed')
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause.name, 'CombinedError')
t.is(err.cause.message, 'error from open; error from resource')
}
})
test('test get() extensibility', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { keyEncoding: 'utf8', valueEncoding: 'utf8' }
const expectedKey = 'a key'
const Test = implement(AbstractLevel, { _get: spy })
const test = new Test({ encodings: { utf8: true } }, { keyEncoding: 'utf8' })
await test.open()
await test.get(expectedKey)
t.is(spy.callCount, 1, 'got _get() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _get() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument')
t.same(spy.getCall(0).args[1], expectedOptions, 'got default options argument')
await test.get(expectedKey, { options: 1 })
expectedOptions.options = 1
t.is(spy.callCount, 2, 'got _get() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _get() was correct')
t.is(spy.getCall(1).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument')
t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument')
})
test('test getMany() extensibility', async function (t) {
const spy = createSpy(async () => ['x'])
const expectedOptions = { keyEncoding: 'utf8', valueEncoding: 'utf8' }
const expectedKey = 'a key'
const Test = implement(AbstractLevel, { _getMany: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.getMany([expectedKey])
t.is(spy.callCount, 1, 'got _getMany() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _getMany() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.same(spy.getCall(0).args[0], [expectedKey], 'got expected keys argument')
t.same(spy.getCall(0).args[1], expectedOptions, 'got default options argument')
await test.getMany([expectedKey], { options: 1 })
expectedOptions.options = 1
t.is(spy.callCount, 2, 'got _getMany() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _getMany() was correct')
t.is(spy.getCall(1).args.length, 2, 'got 2 arguments')
t.same(spy.getCall(1).args[0], [expectedKey], 'got expected key argument')
t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument')
})
test('test del() extensibility', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { options: 1, keyEncoding: 'utf8' }
const expectedKey = 'a key'
const Test = implement(AbstractLevel, { _del: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.del(expectedKey)
t.is(spy.callCount, 1, 'got _del() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _del() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument')
t.same(spy.getCall(0).args[1], { keyEncoding: 'utf8' }, 'got blank options argument')
await test.del(expectedKey, expectedOptions)
t.is(spy.callCount, 2, 'got _del() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _del() was correct')
t.is(spy.getCall(1).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument')
t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument')
})
test('test put() extensibility', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { options: 1, keyEncoding: 'utf8', valueEncoding: 'utf8' }
const expectedKey = 'a key'
const expectedValue = 'a value'
const Test = implement(AbstractLevel, { _put: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.put(expectedKey, expectedValue)
t.is(spy.callCount, 1, 'got _put() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _put() was correct')
t.is(spy.getCall(0).args.length, 3, 'got 3 arguments')
t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument')
t.is(spy.getCall(0).args[1], expectedValue, 'got expected value argument')
t.same(spy.getCall(0).args[2], { keyEncoding: 'utf8', valueEncoding: 'utf8' }, 'got default options argument')
await test.put(expectedKey, expectedValue, expectedOptions)
t.is(spy.callCount, 2, 'got _put() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _put() was correct')
t.is(spy.getCall(1).args.length, 3, 'got 3 arguments')
t.is(spy.getCall(1).args[0], expectedKey, 'got expected key argument')
t.is(spy.getCall(1).args[1], expectedValue, 'got expected value argument')
t.same(spy.getCall(1).args[2], expectedOptions, 'got expected options argument')
})
test('batch([]) extensibility', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { options: 1 }
const expectedArray = [
{ type: 'put', key: '1', value: '1', keyEncoding: 'utf8', valueEncoding: 'utf8' },
{ type: 'del', key: '2', keyEncoding: 'utf8' }
]
const Test = implement(AbstractLevel, { _batch: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.batch(expectedArray)
t.is(spy.callCount, 1, 'got _batch() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _batch() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.same(spy.getCall(0).args[0], expectedArray, 'got expected array argument')
t.same(spy.getCall(0).args[1], {}, 'got expected options argument')
await test.batch(expectedArray, expectedOptions)
t.is(spy.callCount, 2, 'got _batch() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _batch() was correct')
t.is(spy.getCall(1).args.length, 2, 'got 2 arguments')
t.same(spy.getCall(1).args[0], expectedArray.map(o => ({ ...expectedOptions, ...o })), 'got expected array argument')
t.same(spy.getCall(1).args[1], expectedOptions, 'got expected options argument')
await test.batch(expectedArray, null)
t.is(spy.callCount, 3, 'got _batch() call')
t.is(spy.getCall(2).thisValue, test, '`this` on _batch() was correct')
t.is(spy.getCall(2).args.length, 2, 'got 2 arguments')
t.same(spy.getCall(2).args[0], expectedArray, 'got expected array argument')
t.ok(spy.getCall(2).args[1], 'options should not be null')
})
test('batch([]) with empty array is a noop', function (t) {
t.plan(1)
const spy = createSpy()
const Test = implement(AbstractLevel, { _batch: spy })
const test = new Test({ encodings: { utf8: true } })
test.once('open', function () {
test.batch([]).then(function () {
t.is(spy.callCount, 0, '_batch() call was bypassed')
})
})
})
test('test chained batch() extensibility', async function (t) {
const spy = createSpy(async function () {})
const expectedOptions = { options: 1 }
const Test = implement(AbstractLevel, { _batch: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
await test.batch().put('foo', 'bar').del('bang').write()
t.is(spy.callCount, 1, 'got _batch() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _batch() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(0).args[0].length, 2, 'got expected array argument')
t.same(spy.getCall(0).args[0][0], { keyEncoding: 'utf8', valueEncoding: 'utf8', type: 'put', key: 'foo', value: 'bar' }, 'got expected array argument[0]')
t.same(spy.getCall(0).args[0][1], { keyEncoding: 'utf8', type: 'del', key: 'bang' }, 'got expected array argument[1]')
t.same(spy.getCall(0).args[1], {}, 'got expected options argument')
await test.batch().put('foo', 'bar', expectedOptions).del('bang', expectedOptions).write(expectedOptions)
t.is(spy.callCount, 2, 'got _batch() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _batch() was correct')
t.is(spy.getCall(1).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(1).args[0].length, 2, 'got expected array argument')
t.same(spy.getCall(1).args[0][0], { keyEncoding: 'utf8', valueEncoding: 'utf8', type: 'put', key: 'foo', value: 'bar', options: 1 }, 'got expected array argument[0]')
t.same(spy.getCall(1).args[0][1], { keyEncoding: 'utf8', type: 'del', key: 'bang', options: 1 }, 'got expected array argument[1]')
t.same(spy.getCall(1).args[1], { options: 1 }, 'got expected options argument')
})
test('test chained batch() with no operations is a noop', function (t) {
t.plan(1)
const spy = createSpy(async function () {})
const Test = implement(AbstractLevel, { _batch: spy })
const test = new Test({ encodings: { utf8: true } })
test.once('open', function () {
test.batch().write().then(function () {
t.is(spy.callCount, 0, '_batch() call was bypassed')
})
})
})
test('test chained batch() (custom _chainedBatch) extensibility', async function (t) {
const spy = createSpy()
const Test = implement(AbstractLevel, { _chainedBatch: spy })
const test = new Test({ encodings: { utf8: true } })
await test.open()
test.batch()
t.is(spy.callCount, 1, 'got _chainedBatch() call')
t.is(spy.getCall(0).thisValue, test, '`this` on _chainedBatch() was correct')
test.batch()
t.is(spy.callCount, 2, 'got _chainedBatch() call')
t.is(spy.getCall(1).thisValue, test, '`this` on _chainedBatch() was correct')
})
test('test AbstractChainedBatch extensibility', async function (t) {
const Batch = implement(AbstractChainedBatch)
const db = testCommon.factory()
await db.open()
const test = new Batch(db)
t.ok(test.db === db, 'instance has db reference')
})
test('test AbstractChainedBatch expects a db', function (t) {
t.plan(1)
const Test = implement(AbstractChainedBatch)
try {
// eslint-disable-next-line no-new
new Test()
} catch (err) {
t.is(err.message, 'The first argument must be an abstract-level database, received undefined')
}
})
test('test AbstractChainedBatch#write() extensibility', async function (t) {
t.plan(2)
const Test = implement(AbstractChainedBatch, {
async _write (options) {
t.same(options, {})
t.is(this, batch, 'thisArg on _write() is correct')
}
})
const db = testCommon.factory()
await db.open()
const batch = new Test(db)
// Without any operations, _write isn't called
batch.put('foo', 'bar')
return batch.write()
})
test('test AbstractChainedBatch#write() extensibility with null options', async function (t) {
t.plan(2)
const Test = implement(AbstractChainedBatch, {
async _write (options) {
t.same(options, {})
t.is(this, batch, 'thisArg on _write() is correct')
}
})
const db = testCommon.factory()
await db.open()
const batch = new Test(db)
// Without any operations, _write isn't called
batch.put('foo', 'bar')
return batch.write(null)
})
test('test AbstractChainedBatch#write() extensibility with options', async function (t) {
t.plan(2)
const Test = implement(AbstractChainedBatch, {
async _write (options) {
t.same(options, { test: true })
t.is(this, batch, 'thisArg on _write() is correct')
}
})
const db = testCommon.factory()
await db.open()
const batch = new Test(db)
// Without any operations, _write isn't called
batch.put('foo', 'bar')
return batch.write({ test: true })
})
test('test AbstractChainedBatch#put() extensibility', function (t) {
t.plan(8)
const spy = createSpy()
const expectedKey = 'key'
const expectedValue = 'value'
const Test = implement(AbstractChainedBatch, { _put: spy })
const db = testCommon.factory()
db.once('open', function () {
const test = new Test(db)
const returnValue = test.put(expectedKey, expectedValue)
t.is(spy.callCount, 1, 'got _put call')
t.is(spy.getCall(0).thisValue, test, '`this` on _put() was correct')
t.is(spy.getCall(0).args.length, 3, 'got 3 arguments')
t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument')
t.is(spy.getCall(0).args[1], expectedValue, 'got expected value argument')
// May contain more options, just because it's cheaper to not remove them
t.is(spy.getCall(0).args[2].keyEncoding, 'utf8', 'got expected keyEncoding option')
t.is(spy.getCall(0).args[2].valueEncoding, 'utf8', 'got expected valueEncoding option')
t.is(returnValue, test, 'get expected return value')
})
})
test('test AbstractChainedBatch#del() extensibility', function (t) {
t.plan(6)
const spy = createSpy()
const expectedKey = 'key'
const Test = implement(AbstractChainedBatch, { _del: spy })
const db = testCommon.factory()
db.once('open', function () {
const test = new Test(db)
const returnValue = test.del(expectedKey)
t.is(spy.callCount, 1, 'got _del call')
t.is(spy.getCall(0).thisValue, test, '`this` on _del() was correct')
t.is(spy.getCall(0).args.length, 2, 'got 2 arguments')
t.is(spy.getCall(0).args[0], expectedKey, 'got expected key argument')
// May contain more options, just because it's cheaper to not remove them
t.is(spy.getCall(0).args[1].keyEncoding, 'utf8', 'got expected keyEncoding option')
t.is(returnValue, test, 'get expected return value')
})
})
test('test AbstractChainedBatch#clear() extensibility', function (t) {
t.plan(4)
const spy = createSpy()
const Test = implement(AbstractChainedBatch, { _clear: spy })
const db = testCommon.factory()
db.once('open', function () {
const test = new Test(db)
const returnValue = test.clear()
t.is(spy.callCount, 1, 'got _clear call')
t.is(spy.getCall(0).thisValue, test, '`this` on _clear() was correct')
t.is(spy.getCall(0).args.length, 0, 'got zero arguments')
t.is(returnValue, test, 'get expected return value')
})
})
test('test clear() extensibility', async function (t) {
t.plan((7 * 4) - 3)
const spy = createSpy()
const Test = implement(AbstractLevel, { _clear: spy })
const db = new Test({ encodings: { utf8: true } })
await db.open()
call([], { keyEncoding: 'utf8', reverse: false, limit: -1 })
call([null], { keyEncoding: 'utf8', reverse: false, limit: -1 })
call([undefined], { keyEncoding: 'utf8', reverse: false, limit: -1 })
call([{ custom: 1 }], { custom: 1, keyEncoding: 'utf8', reverse: false, limit: -1 })
call([{ reverse: true, limit: 0 }], { keyEncoding: 'utf8', reverse: true, limit: 0 }, true)
call([{ reverse: 1 }], { keyEncoding: 'utf8', reverse: true, limit: -1 })
call([{ reverse: null }], { keyEncoding: 'utf8', reverse: false, limit: -1 })
function call (args, expectedOptions, shouldSkipCall) {
db.clear.apply(db, args).catch(t.fail.bind(t))
t.is(spy.callCount, shouldSkipCall ? 0 : 1, 'got _clear() call')
if (!shouldSkipCall) {
t.is(spy.getCall(0).thisValue, db, '`this` on _clear() was correct')
t.is(spy.getCall(0).args.length, 1, 'got 1 argument')
t.same(spy.getCall(0).args[0], expectedOptions, 'got expected options argument')
}
spy.resetHistory()
}
})
// TODO: replace with encoding test
test.skip('test serialization extensibility (batch array is not mutated)', function (t) {
t.plan(7)
const spy = createSpy()
const Test = implement(AbstractLevel, {
_batch: spy,
_serializeKey: function (key) {
t.is(key, 'no')
return 'foo'
},
_serializeValue: function (value) {
t.is(value, 'nope')
return 'bar'
}
})
const test = new Test({ encodings: { utf8: true } })
test.once('open', function () {
const op = { type: 'put', key: 'no', value: 'nope' }
test.batch([op], function () {})
t.is(spy.callCount, 1, 'got _batch() call')
t.is(spy.getCall(0).args[0][0].key, 'foo', 'got expected key')
t.is(spy.getCall(0).args[0][0].value, 'bar', 'got expected value')
t.is(op.key, 'no', 'did not mutate input key')
t.is(op.value, 'nope', 'did not mutate input value')
})
})
test('clear() does not delete empty or nullish range options', function (t) {
const rangeValues = [Uint8Array.from([]), '', null, undefined]
t.plan(rangeOptions.length * rangeValues.length)
rangeValues.forEach(function (value) {
const Test = implement(AbstractLevel, {
async _clear (options) {
rangeOptions.forEach(function (key) {
t.ok(key in options, key + ' option should not be deleted')
})
}
})
const db = new Test({ encodings: { utf8: true } })
const options = {}
rangeOptions.forEach(function (key) {
options[key] = value
})
db.once('open', function () {
db.clear(options).catch(t.fail.bind(t))
})
})
})
test('open error', function (t) {
t.plan(3)
const Test = implement(AbstractLevel, {
async _open (options) {
throw new Error('_open error')
}
})
const test = new Test({ encodings: { utf8: true } })
test.open().then(t.fail.bind(t), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
t.is(err.cause && err.cause.message, '_open error')
t.is(test.status, 'closed')
})
})
test('close error', function (t) {
t.plan(3)
const Test = implement(AbstractLevel, {
async _close () {
throw new Error('_close error')
}
})
const test = new Test({ encodings: { utf8: true } })
test.open().then(function () {
test.close().then(t.fail.bind(t), function (err) {
t.is(err.code, 'LEVEL_DATABASE_NOT_CLOSED')
t.is(err.cause && err.cause.message, '_close error')
t.is(test.status, 'open')
})
})
})
test('rangeOptions', function (t) {
const keys = rangeOptions.slice()
const db = new AbstractLevel({
encodings: {
utf8: true, buffer: true, view: true
}
})
function setupOptions (create) {
const options = {}
for (const key of keys) {
options[key] = create()
}
return options
}
function verifyOptions (t, options) {
for (const key of keys) {
t.ok(key in options, key + ' option should not be deleted')
}
t.end()
}
t.plan(10)
t.test('setup', async (t) => db.open())
t.test('default options', function (t) {
t.same(getRangeOptions(undefined, db.keyEncoding('utf8')), {
reverse: false,
limit: -1
}, 'correct defaults')
t.end()
})
t.test('set options', function (t) {
t.same(getRangeOptions({ reverse: false, limit: 20 }, db.keyEncoding('utf8')), {
reverse: false,
limit: 20
}, 'options set correctly')
t.end()
})
t.test('ignores invalid limit', function (t) {
// Infinity is valid but is normalized to -1 for use in private API
for (const limit of [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, NaN, -2, 5.5]) {
t.same(getRangeOptions({ limit }, db.keyEncoding('utf8')).limit, -1)
}
t.end()
})
t.test('ignores not-own property', function (t) {
class Options {}
Options.prototype.limit = 20
const options = new Options()
t.is(options.limit, 20)
t.same(getRangeOptions(options, db.keyEncoding('utf8')), {
reverse: false,
limit: -1
})
t.end()
})
t.test('does not delete empty buffers', function (t) {
const options = setupOptions(() => Buffer.alloc(0))
keys.forEach(function (key) {
t.is(isBuffer(options[key]), true, 'should be buffer')
t.is(options[key].byteLength, 0, 'should be empty')
})
verifyOptions(t, getRangeOptions(options, db.keyEncoding('buffer')))
})
t.test('does not delete empty views', function (t) {
const options = setupOptions(() => Uint8Array.from([]))
keys.forEach(function (key) {
t.is(options[key] instanceof Uint8Array, true, 'should be Uint8Array')
t.is(options[key].byteLength, 0, 'should be empty')
})
verifyOptions(t, getRangeOptions(options, db.keyEncoding('view')))
})
t.test('does not delete empty strings', function (t) {
const options = setupOptions(() => '')
keys.forEach(function (key) {
t.is(typeof options[key], 'string', 'should be string')
t.is(options[key].length, 0, 'should be empty')
})
verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8')))
})
t.test('does not delete null', function (t) {
const options = setupOptions(() => null)
keys.forEach(function (key) {
t.is(options[key], null)
})
verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8')))
})
t.test('does not delete undefined', function (t) {
const options = setupOptions(() => undefined)
keys.forEach(function (key) {
t.is(options[key], undefined)
})
verifyOptions(t, getRangeOptions(options, db.keyEncoding('utf8')))
})
})
require('./self/deferred-queue-test')
require('./self/errors-test')
require('./self/defer-test')
require('./self/attach-resource-test')
require('./self/abstract-iterator-test')
require('./self/iterator-test')
require('./self/deferred-iterator-test')
require('./self/deferred-operations-test')
require('./self/async-iterator-test')
require('./self/encoding-test')
require('./self/sublevel-test')
// Test the abstract test suite using a minimal implementation
require('./index')({
test,
factory (options) {
return new MinimalLevel(options)
}
})

View File

@ -0,0 +1,182 @@
'use strict'
const test = require('tape')
const { AbstractLevel, AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
const testCommon = require('../common')({
test,
factory: function () {
return new AbstractLevel({ encodings: { utf8: true } })
}
})
for (const Ctor of [AbstractIterator, AbstractKeyIterator, AbstractValueIterator]) {
// Note, these tests don't create fully functional iterators, because they're not
// created via db.iterator() and therefore lack the options necessary to decode data.
// Not relevant for these tests.
test(`test ${Ctor.name} extensibility`, function (t) {
const Test = class TestIterator extends Ctor {}
const db = testCommon.factory()
const test = new Test(db, {})
t.ok(test.db === db, 'instance has db reference')
t.end()
})
test(`${Ctor.name} throws on invalid db argument`, function (t) {
t.plan(4 * 2)
for (const args of [[], [null], [undefined], 'foo']) {
const hint = args[0] === null ? 'null' : typeof args[0]
try {
// eslint-disable-next-line no-new
new Ctor(...args)
} catch (err) {
t.is(err.name, 'TypeError')
t.is(err.message, 'The first argument must be an abstract-level database, received ' + hint)
}
}
})
test(`${Ctor.name} throws on invalid options argument`, function (t) {
t.plan(4 * 2)
for (const args of [[], [null], [undefined], 'foo']) {
try {
// eslint-disable-next-line no-new
new Ctor({}, ...args)
} catch (err) {
t.is(err.name, 'TypeError')
t.is(err.message, 'The second argument must be an options object')
}
}
})
test(`${Ctor.name}.next() extensibility`, async function (t) {
t.plan(2)
class TestIterator extends Ctor {
async _next () {
t.is(this, it, 'thisArg on _next() was correct')
t.is(arguments.length, 0, 'got 0 arguments')
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.next()
await db.close()
})
test(`${Ctor.name}.nextv() extensibility`, async function (t) {
t.plan(4)
class TestIterator extends Ctor {
async _nextv (size, options) {
t.is(this, it, 'thisArg on _nextv() was correct')
t.is(arguments.length, 2, 'got 2 arguments')
t.is(size, 100)
t.same(options, {})
return []
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.nextv(100)
await db.close()
})
test(`${Ctor.name}.nextv() extensibility (options)`, async function (t) {
t.plan(2)
class TestIterator extends Ctor {
async _nextv (size, options) {
t.is(size, 100)
t.same(options, { foo: 123 }, 'got userland options')
return []
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.nextv(100, { foo: 123 })
return db.close()
})
test(`${Ctor.name}.all() extensibility`, async function (t) {
t.plan(2 * 3)
for (const args of [[], [{}]]) {
class TestIterator extends Ctor {
async _all (options) {
t.is(this, it, 'thisArg on _all() was correct')
t.is(arguments.length, 1, 'got 1 argument')
t.same(options, {}, '')
return []
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.all(...args)
await db.close()
}
})
test(`${Ctor.name}.all() extensibility (options)`, async function (t) {
t.plan(1)
class TestIterator extends Ctor {
async _all (options) {
t.same(options, { foo: 123 }, 'got userland options')
return []
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.all({ foo: 123 })
await db.close()
})
test(`${Ctor.name}.seek() throws if not implemented`, async function (t) {
t.plan(1)
const db = testCommon.factory()
await db.open()
const it = new Ctor(db, {})
try {
it.seek('123')
} catch (err) {
t.is(err.code, 'LEVEL_NOT_SUPPORTED')
}
return db.close()
})
test(`${Ctor.name}.close() extensibility`, async function (t) {
t.plan(2)
class TestIterator extends Ctor {
async _close () {
t.is(this, it, 'thisArg on _close() was correct')
t.is(arguments.length, 0, 'got 0 arguments')
}
}
const db = testCommon.factory()
await db.open()
const it = new TestIterator(db, {})
await it.close()
await db.close()
})
}

View File

@ -0,0 +1,242 @@
'use strict'
const test = require('tape')
const { AbstractLevel, AbstractIterator } = require('../..')
const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator')
function withIterator (methods) {
class TestIterator extends AbstractIterator { }
for (const k in methods) {
TestIterator.prototype[k] = methods[k]
}
class Test extends AbstractLevel {
_iterator (options) {
return new TestIterator(this, options)
}
}
return new Test({ encodings: { utf8: true } })
}
for (const mode of ['iterator', 'keys', 'values']) {
for (const type of ['explicit', 'deferred']) {
const verify = function (t, db, it) {
t.is(db.status, type === 'explicit' ? 'open' : 'opening')
if (type === 'explicit') {
t.is(
it.constructor.name,
mode === 'iterator' ? 'TestIterator' : mode === 'keys' ? 'DefaultKeyIterator' : 'DefaultValueIterator'
)
} else {
t.is(
it.constructor,
mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator
)
}
}
test(`for await...of ${mode}() (${type} open)`, async function (t) {
t.plan(4)
const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }]
const output = []
const db = withIterator({
async _next () {
const entry = input[n++]
return entry ? [entry.key, entry.value] : undefined
},
async _close () {
// Wait a tick
await undefined
closed = true
}
})
if (type === 'explicit') await db.open()
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
verify(t, db, it)
let n = 0
let closed = false
for await (const item of it) {
output.push(item)
}
t.same(output, input.map(x => mode === 'iterator' ? [x.key, x.value] : mode === 'keys' ? x.key : x.value))
t.ok(closed, 'closed')
})
test(`for await...of ${mode}() closes on user error (${type} open)`, async function (t) {
t.plan(4)
const db = withIterator({
async _next () {
if (n++ > 10) throw new Error('Infinite loop')
return [n.toString(), n.toString()]
},
async _close () {
// Wait a tick
await undefined
closed = true
throw new Error('close error')
}
})
if (type === 'explicit') await db.open()
const it = db[mode]()
verify(t, db, it)
let n = 0
let closed = false
try {
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
for await (const kv of it) {
throw new Error('user error')
}
} catch (err) {
t.is(err.message, 'user error')
t.ok(closed, 'closed')
}
})
test(`for await...of ${mode}() closes on iterator error (${type} open)`, async function (t) {
t.plan(5)
const db = withIterator({
async _next (callback) {
t.pass('nexted')
throw new Error('iterator error')
},
async _close (callback) {
// Wait a tick
await undefined
closed = true
}
})
if (type === 'explicit') await db.open()
const it = db[mode]()
verify(t, db, it)
let closed = false
try {
// eslint-disable-next-line no-unused-vars
for await (const kv of it) {
t.fail('should not yield items')
}
} catch (err) {
t.is(err.message, 'iterator error')
t.ok(closed, 'closed')
}
})
test(`for await...of ${mode}() combines errors (${type} open)`, async function (t) {
t.plan(6)
const db = withIterator({
async _next (callback) {
t.pass('nexted')
throw new Error('next error')
},
async _close (callback) {
closed = true
throw new Error('close error')
}
})
if (type === 'explicit') await db.open()
const it = db[mode]()
verify(t, db, it)
let closed = false
try {
// eslint-disable-next-line no-unused-vars
for await (const kv of it) {
t.fail('should not yield items')
}
} catch (err) {
t.is(err.name, 'CombinedError')
t.is(err.message, 'next error; close error')
t.ok(closed, 'closed')
}
})
test(`for await...of ${mode}() closes on user break (${type} open)`, async function (t) {
t.plan(4)
const db = withIterator({
async _next () {
if (n++ > 10) throw new Error('Infinite loop')
return [n.toString(), n.toString()]
},
async _close () {
// Wait a tick
await undefined
closed = true
}
})
if (type === 'explicit') await db.open()
const it = db[mode]()
verify(t, db, it)
let n = 0
let closed = false
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
for await (const kv of it) {
t.pass('got a chance to break')
break
}
t.ok(closed, 'closed')
})
test(`for await...of ${mode}() closes on user return (${type} open)`, async function (t) {
t.plan(4)
const db = withIterator({
async _next () {
if (n++ > 10) throw new Error('Infinite loop')
return [n.toString(), n.toString()]
},
async _close (callback) {
// Wait a tick
await undefined
closed = true
}
})
if (type === 'explicit') await db.open()
const it = db[mode]()
verify(t, db, it)
let n = 0
let closed = false
await (async () => {
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
for await (const kv of it) {
t.pass('got a chance to return')
return
}
})()
t.ok(closed, 'closed')
})
}
}

View File

@ -0,0 +1,74 @@
'use strict'
const test = require('tape')
const { mockLevel } = require('../util')
test('resource must be an object with a close() method', async function (t) {
t.plan(4)
const db = mockLevel()
for (const invalid of [null, undefined, {}, { close: 123 }]) {
try {
db.attachResource(invalid)
} catch (err) {
t.is(err && err.message, 'The first argument must be a resource object')
}
}
return db.close()
})
test('resource is closed on failed open', function (t) {
t.plan(2)
const db = mockLevel({
async _open (options) {
t.pass('opened')
throw new Error('_open error')
}
})
const resource = {
async close () {
// Note: resource shouldn't care about db.status
t.is(arguments.length, 0)
}
}
db.attachResource(resource)
})
for (const open of [true, false]) {
test(`resource is closed on db.close() (explicit open: ${open})`, async function (t) {
t.plan(1)
const db = mockLevel()
const resource = {
async close () {
// Note: resource shouldn't care about db.status
t.pass('closed')
}
}
if (open) await db.open()
db.attachResource(resource)
return db.close()
})
test(`resource is not closed on db.close() if detached (explicit open: ${open})`, async function (t) {
const db = mockLevel()
const resource = {
async close () {
t.fail('should not be called')
}
}
if (open) await db.open()
db.attachResource(resource)
db.detachResource(resource)
return db.close()
})
}

140
test/self/defer-test.js Normal file
View File

@ -0,0 +1,140 @@
'use strict'
const test = require('tape')
const { mockLevel } = require('../util')
test('defer() and deferAsync() require valid function argument', async function (t) {
t.plan(6 * 2)
const db = mockLevel()
for (const invalid of [123, true, false, null, undefined, {}]) {
try {
db.defer(invalid)
} catch (err) {
t.is(err.message, 'The first argument must be a function')
}
try {
await db.deferAsync(invalid)
} catch (err) {
t.is(err.message, 'The first argument must be a function')
}
}
return db.close()
})
test('defer() custom operation', async function (t) {
t.plan(3)
const db = mockLevel({
custom (arg) {
t.is(this.status, 'opening')
t.is(arg, 123)
this.defer(() => {
t.is(this.status, 'open')
})
}
})
db.custom(123)
await db.open()
return db.close()
})
test('deferAsync() custom operation', async function (t) {
t.plan(4)
const db = mockLevel({
async custom (arg) {
if (this.status === 'opening') {
t.is(arg, 123)
return this.deferAsync(() => this.custom(456))
} else {
t.is(db.status, 'open')
t.is(arg, 456)
return 987
}
}
})
const result = await db.custom(123)
t.is(result, 987, 'result ok')
return db.close()
})
test('deferAsync() custom operation with promise rejection', async function (t) {
t.plan(4)
const db = mockLevel({
async custom (arg) {
if (this.status === 'opening') {
t.is(arg, 123)
return this.deferAsync(() => this.custom(456))
} else {
t.is(db.status, 'open')
t.is(arg, 456)
throw new Error('test')
}
}
})
try {
await db.custom(123)
} catch (err) {
t.is(err.message, 'test', 'got error')
}
return db.close()
})
test('deferAsync() custom operation with failed open', async function (t) {
t.plan(3)
const db = mockLevel({
async _open (options) {
t.pass('opened')
throw new Error('_open error')
},
async custom (arg) {
if (this.status === 'opening') {
return this.deferAsync(() => this.custom(arg))
} else {
t.is(db.status, 'closed')
throw new Error('Database is not open (from custom)')
}
}
})
try {
await db.custom()
} catch (err) {
t.is(err.message, 'Database is not open (from custom)')
}
})
test('defer() can drop custom synchronous operation', function (t) {
t.plan(3)
const db = mockLevel({
async _open (options) {
t.pass('opened')
throw new Error('_open error')
},
custom (arg) {
if (this.status === 'opening') {
this.defer(() => this.custom(arg * 2))
} else {
// Handling other states is a userland responsibility
t.is(db.status, 'closed')
t.is(arg, 246)
}
}
})
db.custom(123)
})

View File

@ -0,0 +1,314 @@
'use strict'
const test = require('tape')
const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator')
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
const { mockLevel } = require('../util')
const noop = () => {}
const identity = (v) => v
for (const mode of ['iterator', 'keys', 'values']) {
const RealCtor = mode === 'iterator' ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
const DeferredCtor = mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator
const nextArg = mode === 'iterator' ? ['key', 'value'] : mode === 'keys' ? 'key' : 'value'
const privateMethod = '_' + mode
const publicMethod = mode
// NOTE: adapted from deferred-leveldown
test(`deferred ${mode}().next()`, async function (t) {
t.plan(5)
const keyEncoding = {
format: 'utf8',
encode (key) {
t.is(key, 'foo', 'encoding got key')
return key.toUpperCase()
},
decode: identity
}
class MockIterator extends RealCtor {
async _next () {
return nextArg
}
async _close () {}
}
const db = mockLevel({
[privateMethod]: function (options) {
t.is(options.gt, 'FOO', 'got encoded range option')
return new MockIterator(this, options)
},
async _open (options) {
t.pass('opened')
}
}, { encodings: { utf8: true } }, {
keyEncoding
})
const it = db[publicMethod]({ gt: 'foo' })
t.ok(it instanceof DeferredCtor, 'is deferred')
t.is(await it.next(), nextArg)
return it.close()
})
// NOTE: adapted from deferred-leveldown
test(`deferred ${mode}(): non-deferred operations`, async function (t) {
t.plan(3)
class MockIterator extends RealCtor {
_seek (target) {
t.is(target, '123')
}
async _next () {
return nextArg
}
}
const db = mockLevel({
[privateMethod]: function (options) {
return new MockIterator(this, options)
}
})
const it = db[publicMethod]({ gt: 'foo' })
t.ok(it instanceof DeferredCtor)
await db.open()
it.seek(123)
t.is(await it.next(), nextArg)
return it.close()
})
// NOTE: adapted from deferred-leveldown
test(`deferred ${mode}(): iterators are created in order`, function (t) {
t.plan(4)
const order1 = []
const order2 = []
class MockIterator extends RealCtor {}
function db (order) {
return mockLevel({
[privateMethod]: function (options) {
order.push('iterator created')
return new MockIterator(this, options)
},
async _put (key, value, options) {
order.push('put')
}
})
}
const db1 = db(order1)
const db2 = db(order2)
db1.open().then(function () {
t.same(order1, ['iterator created', 'put'])
})
db2.open().then(function () {
t.same(order2, ['put', 'iterator created'])
})
t.ok(db1[publicMethod]() instanceof DeferredCtor)
db1.put('key', 'value', noop)
db2.put('key', 'value', noop)
t.ok(db2[publicMethod]() instanceof DeferredCtor)
})
for (const method of ['next', 'nextv', 'all']) {
test(`deferred ${mode}(): closed upon failed open, verified by ${method}()`, async function (t) {
t.plan(5)
const db = mockLevel({
async _open (options) {
t.pass('opening')
throw new Error('_open error')
},
_iterator () {
t.fail('should not be called')
},
[privateMethod] () {
t.fail('should not be called')
}
})
const it = db[publicMethod]()
t.ok(it instanceof DeferredCtor)
const original = it._close
it._close = async function (...args) {
t.pass('closed')
return original.call(this, ...args)
}
return verifyClosed(t, it, method)
})
test(`deferred ${mode}(): deferred and real iterators are closed on db.close(), verified by ${method}()`, async function (t) {
t.plan(7)
class MockIterator extends RealCtor {
async _close () {
t.pass('closed')
}
}
const db = mockLevel({
[privateMethod] (options) {
return new MockIterator(this, options)
}
})
const it = db[publicMethod]()
t.ok(it instanceof DeferredCtor)
const original = it._close
it._close = async function (...args) {
t.pass('closed')
return original.call(this, ...args)
}
await db.open()
await db.close()
await verifyClosed(t, it, method)
await db.open()
// Should still be closed
await verifyClosed(t, it, method)
return db.close()
})
}
test(`deferred ${mode}(): deferred and real iterators are detached on db.close()`, async function (t) {
class MockIterator extends RealCtor {}
let real
const db = mockLevel({
[privateMethod] (options) {
real = new MockIterator(this, options)
return real
}
})
const it = db[publicMethod]()
t.ok(it instanceof DeferredCtor)
const calls = []
const wrap = (obj, name) => {
const original = obj.close
obj.close = async function (...args) {
calls.push(name)
return original.apply(this, args)
}
}
// First open(), to also create the real iterator.
await db.open()
wrap(it, 'deferred')
wrap(real, 'real')
await db.close()
// There may have been 2 real.close() calls: one by the db closing resources, and
// another by the deferred iterator that wraps real. Not relevant for this test.
t.same(calls.splice(0, calls.length).slice(0, 2), ['deferred', 'real'])
// Reopen. Resources should be detached at this point.
await db.open()
await db.close()
// So close() should not have been called again.
t.same(calls, [], 'no new calls')
})
test(`deferred ${mode}(): defers underlying close()`, function (t) {
t.plan(2)
class MockIterator extends RealCtor {
async _close () {
order.push('_close')
}
}
const order = []
const db = mockLevel({
async _open (options) {
order.push('_open')
},
[privateMethod] (options) {
order.push(privateMethod)
return new MockIterator(this, options)
}
})
const it = db[publicMethod]()
t.ok(it instanceof DeferredCtor)
it.close().then(function () {
t.same(order, ['_open', privateMethod, '_close'])
})
})
globalThis.AbortController && test(`deferred ${mode}(): skips real iterator if aborted`, function (t) {
t.plan(3)
const order = []
const db = mockLevel({
async _open (options) {
order.push('_open')
},
[privateMethod] (options) {
t.fail('should not be called')
}
})
const ac = new globalThis.AbortController()
const it = db[publicMethod]({ signal: ac.signal })
t.ok(it instanceof DeferredCtor)
// Test synchronous call, which should be silently skipped on abort
it.seek('foo')
// Test asynchronous call, which should be rejected
it.next().then(t.fail.bind(t, 'should not succeed'), function (err) {
t.is(err.code, 'LEVEL_ABORTED')
})
// Signal should prevent real iterator from being created.
ac.abort()
it.close().then(function () {
t.same(order, ['_open'])
})
})
const verifyClosed = async function (t, it, method) {
const requiredArgs = method === 'nextv' ? [10] : []
try {
await it[method](...requiredArgs)
t.fail('should not succeed')
} catch (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on first ${method}()`)
}
try {
await it[method](...requiredArgs)
t.fail('should not succeed')
} catch (err) {
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on second ${method}()`)
}
}
}

View File

@ -0,0 +1,86 @@
'use strict'
const test = require('tape')
const { mockLevel, mockIterator } = require('../util')
// NOTE: copied from deferred-leveldown
test('deferred operations are called in order', function (t) {
t.plan(3)
const calls = []
const db = mockLevel({
async _put (key, value, options) {
calls.push({ type: 'put', key, value, options })
},
async _get (key, options) {
calls.push({ type: 'get', key, options })
},
async _del (key, options) {
calls.push({ type: 'del', key, options })
},
async _batch (arr, options) {
calls.push({ type: 'batch', keys: arr.map(op => op.key).join(',') })
},
async _clear (options) {
calls.push({ ...options, type: 'clear' })
},
_iterator (options) {
calls.push({ type: 'iterator' })
return mockIterator(this, options, {
async _next () {
calls.push({ type: 'iterator.next' })
}
})
},
async _open (options) {
t.is(calls.length, 0, 'not yet called')
}
}, {
encodings: {
utf8: true,
buffer: true
}
}, {
keyEncoding: 'utf8',
valueEncoding: 'utf8'
})
db.open().then(function () {
t.same(calls, [
{ type: 'put', key: '001', value: 'bar1', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
{ type: 'get', key: '002', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
{ type: 'clear', reverse: false, limit: -1, keyEncoding: 'utf8' },
{ type: 'put', key: '010', value: 'bar2', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
{ type: 'get', key: Buffer.from('011'), options: { keyEncoding: 'buffer', valueEncoding: 'utf8' } },
{ type: 'del', key: '020', options: { customOption: 123, keyEncoding: 'utf8' } },
{ type: 'del', key: '021', options: { keyEncoding: 'utf8' } },
{ type: 'batch', keys: '040,041' },
{ type: 'iterator' },
{ type: 'batch', keys: '050,051' },
{ type: 'iterator.next' },
{ type: 'clear', gt: '060', reverse: false, limit: -1, keyEncoding: 'utf8' }
], 'calls correctly behaved')
})
// We have dangling promises here, but it's a self test, so no worries.
db.put('001', 'bar1')
db.get('002')
db.clear()
db.put('010', 'bar2')
db.get('011', { keyEncoding: 'buffer' })
db.del('020', { customOption: 123 })
db.del('021')
db.batch([
{ type: 'put', key: '040', value: 'a' },
{ type: 'put', key: '041', value: 'b' }
])
const it = db.iterator()
db.batch([
{ type: 'put', key: '050', value: 'c' },
{ type: 'put', key: '051', value: 'd' }
])
it.next()
db.clear({ gt: '060' })
t.is(calls.length, 0, 'not yet called')
})

View File

@ -0,0 +1,93 @@
'use strict'
const test = require('tape')
const { DeferredQueue } = require('../../lib/deferred-queue')
const supported = !!globalThis.AbortController
test('DeferredQueue calls operations in FIFO order', async function (t) {
const queue = new DeferredQueue()
const calls = []
queue.add(() => { calls.push(1) })
queue.add(() => { calls.push(2) })
queue.add(() => { calls.push(3) })
queue.drain()
t.same(calls, [1, 2, 3])
})
test('DeferredQueue only calls operation once', async function (t) {
const queue = new DeferredQueue()
let calls = 0
queue.add(() => { calls++ })
queue.drain()
t.same(calls, 1)
queue.drain()
t.same(calls, 1, 'no new calls')
})
supported && test('DeferredQueue does not add operation if given an aborted signal', async function (t) {
const ac = new globalThis.AbortController()
const queue = new DeferredQueue()
const calls = []
ac.abort()
queue.add((abortError) => { calls.push(abortError) }, { signal: ac.signal })
t.is(calls.length, 1)
t.is(calls[0].code, 'LEVEL_ABORTED')
queue.drain()
t.is(calls.length, 1, 'not called again')
})
supported && test('DeferredQueue aborts operation on signal abort', async function (t) {
const ac1 = new globalThis.AbortController()
const ac2 = new globalThis.AbortController()
const queue = new DeferredQueue()
const calls = []
queue.add((abortError) => { calls.push([1, abortError]) }, { signal: ac1.signal })
queue.add((abortError) => { calls.push([2, abortError]) }, { signal: ac2.signal })
t.is(calls.length, 0, 'not yet called')
ac1.abort()
t.is(calls.length, 1, 'called')
t.is(calls[0][0], 1, 'signal1')
t.is(calls[0][1].code, 'LEVEL_ABORTED')
ac2.abort()
t.is(calls.length, 2, 'called')
t.is(calls[1][0], 2, 'signal2')
t.is(calls[1][1].code, 'LEVEL_ABORTED')
queue.drain()
ac2.abort()
t.is(calls.length, 2, 'not called again')
})
supported && test('DeferredQueue calls operation if signal is not aborted', async function (t) {
const ac1 = new globalThis.AbortController()
const ac2 = new globalThis.AbortController()
const queue = new DeferredQueue()
const calls = []
queue.add((abortError) => { calls.push([1, abortError]) }, { signal: ac1.signal })
queue.add((abortError) => { calls.push([2, abortError]) }, { signal: ac2.signal })
t.is(calls.length, 0, 'not yet called')
queue.drain()
t.is(calls.length, 2, 'called')
t.is(calls[0][0], 1, 'signal1')
t.is(calls[0][1], undefined, 'no abort error')
t.is(calls[1][0], 2, 'signal2')
t.is(calls[1][1], undefined, 'no abort error')
queue.drain()
ac1.abort()
ac2.abort()
t.is(calls.length, 2, 'not called again')
})

391
test/self/encoding-test.js Normal file
View File

@ -0,0 +1,391 @@
'use strict'
// TODO: move to per-method test files
const test = require('tape')
const { Buffer } = require('buffer')
const { mockLevel, mockChainedBatch, nullishEncoding } = require('../util')
const identity = (v) => v
const utf8Manifest = { encodings: { utf8: true } }
const dualManifest = { encodings: { utf8: true, buffer: true } }
const hasOwnProperty = Object.prototype.hasOwnProperty
for (const deferred of [false, true]) {
// NOTE: adapted from encoding-down
test(`get() encodes utf8 key (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _get (key, options) {
t.is(key, '8')
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
return 'foo'
}
}, utf8Manifest)
if (!deferred) await db.open()
t.same(await db.get(8), 'foo')
})
// NOTE: adapted from encoding-down
test(`get() takes encoding options (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _get (key, options) {
t.is(key, '[1,"2"]')
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
return '123'
}
}, utf8Manifest)
if (!deferred) await db.open()
t.same(await db.get([1, '2'], { keyEncoding: 'json', valueEncoding: 'json' }), 123)
})
// NOTE: adapted from encoding-down
test(`get() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) {
t.plan(3)
const db = mockLevel({
async _get (key, options) {
t.same(key, 'key')
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' })
return Buffer.alloc(1)
}
}, dualManifest, {
keyEncoding: 'utf8',
valueEncoding: { encode: identity, decode: identity, format: 'buffer' }
})
if (!deferred) await db.open()
t.same(await db.get('key'), Buffer.alloc(1))
})
// NOTE: adapted from encoding-down
test(`get() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) {
t.plan(3)
const db = mockLevel({
async _get (key, options) {
t.same(key, Buffer.from('key'))
t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' })
return 'x'
}
}, dualManifest, {
keyEncoding: 'buffer',
valueEncoding: { encode: identity, decode: identity, format: 'utf8' }
})
if (!deferred) await db.open()
t.same(await db.get(Buffer.from('key')), 'x')
})
// NOTE: adapted from encoding-down
test(`put() encodes utf8 key and value (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _put (key, value, options) {
t.is(key, '8')
t.is(value, '4')
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.put(8, 4)
})
// NOTE: adapted from encoding-down
test(`put() takes encoding options (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _put (key, value, options) {
t.is(key, '[1,"2"]')
t.is(value, '{"x":3}')
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.put([1, '2'], { x: 3 }, { keyEncoding: 'json', valueEncoding: 'json' })
})
// NOTE: adapted from encoding-down
test(`del() encodes utf8 key (deferred: ${deferred})`, async function (t) {
t.plan(2)
const db = mockLevel({
async _del (key, options) {
t.is(key, '2')
t.is(options.keyEncoding, 'utf8')
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.del(2)
})
// NOTE: adapted from encoding-down
test(`del() takes keyEncoding option (deferred: ${deferred})`, async function (t) {
t.plan(2)
const db = mockLevel({
async _del (key, options) {
t.is(key, '[1,"2"]')
t.is(options.keyEncoding, 'utf8')
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.del([1, '2'], { keyEncoding: 'json' })
})
test(`getMany() encodes utf8 key (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _getMany (keys, options) {
t.same(keys, ['8', '29'])
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
return ['foo', 'bar']
}
}, utf8Manifest)
if (!deferred) await db.open()
t.same(await db.getMany([8, 29]), ['foo', 'bar'])
})
test(`getMany() takes encoding options (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _getMany (keys, options) {
t.same(keys, ['[1,"2"]', '"x"'])
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
return ['123', '"hi"']
}
}, utf8Manifest)
if (!deferred) await db.open()
t.same(await db.getMany([[1, '2'], 'x'], { keyEncoding: 'json', valueEncoding: 'json' }), [123, 'hi'])
})
test(`getMany() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) {
t.plan(3)
const db = mockLevel({
async _getMany (keys, options) {
t.same(keys, ['key'])
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' })
return [Buffer.alloc(1)]
}
}, dualManifest, {
keyEncoding: 'utf8',
valueEncoding: { encode: identity, decode: identity, format: 'buffer' }
})
if (!deferred) await db.open()
t.same(await db.getMany(['key']), [Buffer.alloc(1)])
})
test(`getMany() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) {
t.plan(3)
const db = mockLevel({
async _getMany (keys, options) {
t.same(keys, [Buffer.from('key')])
t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' })
return ['x']
}
}, dualManifest, {
keyEncoding: 'buffer',
valueEncoding: { encode: identity, decode: identity, format: 'utf8' }
})
if (!deferred) await db.open()
t.same(await db.getMany([Buffer.from('key')]), ['x'])
})
// NOTE: adapted from encoding-down
deferred || test('chainedBatch.put() and del() encode utf8 key and value', async function (t) {
t.plan(5)
const db = mockLevel({
_chainedBatch () {
return mockChainedBatch(this, {
_put: function (key, value, options) {
t.same({ key, value }, { key: '1', value: '2' })
// May contain additional options just because it's cheaper to not remove them
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
},
_del: function (key, options) {
t.is(key, '3')
t.is(options.keyEncoding, 'utf8')
}
})
}
}, utf8Manifest)
await db.open()
await db.batch().put(1, 2).del(3).write()
})
// NOTE: adapted from encoding-down
deferred || test('chainedBatch.put() and del() take encoding options', async function (t) {
t.plan(5)
const putOptions = { keyEncoding: 'json', valueEncoding: 'json' }
const delOptions = { keyEncoding: 'json' }
const db = mockLevel({
_chainedBatch () {
return mockChainedBatch(this, {
_put: function (key, value, options) {
t.same({ key, value }, { key: '"1"', value: '{"x":[2]}' })
// May contain additional options just because it's cheaper to not remove them
t.is(options.keyEncoding, 'utf8')
t.is(options.valueEncoding, 'utf8')
},
_del: function (key, options) {
t.is(key, '"3"')
t.is(options.keyEncoding, 'utf8')
}
})
}
}, utf8Manifest)
await db.open()
await db.batch().put('1', { x: [2] }, putOptions).del('3', delOptions).write()
})
// NOTE: adapted from encoding-down
test(`clear() receives keyEncoding option (deferred: ${deferred})`, async function (t) {
t.plan(1)
const db = mockLevel({
async _clear (options) {
t.same(options, { keyEncoding: 'utf8', reverse: false, limit: -1 })
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.clear()
})
test(`clear() takes keyEncoding option (deferred: ${deferred})`, async function (t) {
t.plan(1)
const db = mockLevel({
async _clear (options) {
t.same(options, { keyEncoding: 'utf8', gt: '"a"', reverse: false, limit: -1 })
}
}, utf8Manifest)
if (!deferred) await db.open()
await db.clear({ keyEncoding: 'json', gt: 'a' })
})
// NOTE: adapted from encoding-down
test(`clear() encodes range options (deferred: ${deferred})`, async function (t) {
t.plan(5)
const keyEncoding = {
format: 'utf8',
encode: function (key) {
return 'encoded_' + key
},
decode: identity
}
const db = mockLevel({
async _clear (options) {
t.is(options.gt, 'encoded_1')
t.is(options.gte, 'encoded_2')
t.is(options.lt, 'encoded_3')
t.is(options.lte, 'encoded_4')
t.is(options.foo, 5)
}
}, utf8Manifest, { keyEncoding })
if (!deferred) await db.open()
await db.clear({ gt: 1, gte: 2, lt: 3, lte: 4, foo: 5 })
})
// NOTE: adapted from encoding-down
test(`clear() does not strip nullish range options (deferred: ${deferred})`, async function (t) {
t.plan(12)
const db1 = mockLevel({
async _clear (options) {
t.is(options.gt, '\x00', 'encoded null')
t.is(options.gte, '\x00', 'encoded null')
t.is(options.lt, '\x00', 'encoded null')
t.is(options.lte, '\x00', 'encoded null')
}
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
const db2 = mockLevel({
async _clear (options) {
t.is(hasOwnProperty.call(options, 'gt'), true)
t.is(hasOwnProperty.call(options, 'gte'), true)
t.is(hasOwnProperty.call(options, 'lt'), true)
t.is(hasOwnProperty.call(options, 'lte'), true)
t.is(options.gt, '\xff', 'encoded undefined')
t.is(options.gte, '\xff', 'encoded undefined')
t.is(options.lt, '\xff', 'encoded undefined')
t.is(options.lte, '\xff', 'encoded undefined')
}
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
if (!deferred) {
await Promise.all([db1.open(), db2.open()])
}
const promise1 = db1.clear({
gt: null,
gte: null,
lt: null,
lte: null
})
const promise2 = db2.clear({
gt: undefined,
gte: undefined,
lt: undefined,
lte: undefined
})
await Promise.all([promise1, promise2])
})
// NOTE: adapted from encoding-down
test(`clear() does not add nullish range options (deferred: ${deferred})`, async function (t) {
t.plan(4)
const db = mockLevel({
async _clear (options) {
t.is(hasOwnProperty.call(options, 'gt'), false)
t.is(hasOwnProperty.call(options, 'gte'), false)
t.is(hasOwnProperty.call(options, 'lt'), false)
t.is(hasOwnProperty.call(options, 'lte'), false)
}
})
if (!deferred) await db.open()
await db.clear({})
})
}

11
test/self/errors-test.js Normal file
View File

@ -0,0 +1,11 @@
'use strict'
const test = require('tape')
const { AbortError } = require('../../lib/errors')
test('AbortError', function (t) {
const err = new AbortError()
t.is(err.code, 'LEVEL_ABORTED')
t.is(err.name, 'AbortError')
t.end()
})

1050
test/self/iterator-test.js Normal file

File diff suppressed because it is too large Load Diff

1039
test/self/sublevel-test.js Normal file

File diff suppressed because it is too large Load Diff

209
test/sublevel-test.js Normal file
View File

@ -0,0 +1,209 @@
'use strict'
const { Buffer } = require('buffer')
exports.all = function (test, testCommon) {
for (const deferred of [false, true]) {
// NOTE: adapted from subleveldown
test(`sublevel.clear() (deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
const sub1 = db.sublevel('1')
const sub2 = db.sublevel('2')
if (!deferred) await sub1.open()
if (!deferred) await sub2.open()
await populate([sub1, sub2], ['a', 'b'])
await verify(['!1!a', '!1!b', '!2!a', '!2!b'])
await clear([sub1], {})
await verify(['!2!a', '!2!b'])
await populate([sub1], ['a', 'b'])
await clear([sub2], { lt: 'b' })
await verify(['!1!a', '!1!b', '!2!b'])
await db.close()
async function populate (subs, items) {
return Promise.all(subs.map(sub => {
return sub.batch(items.map(function (item) {
return { type: 'put', key: item, value: item }
}))
}))
}
async function clear (subs, opts) {
return Promise.all(subs.map(sub => {
return sub.clear(opts)
}))
}
async function verify (expected) {
const keys = await db.keys().all()
t.same(keys, expected)
}
})
}
for (const method of ['batch', 'chained batch']) {
test(`${method} with descendant sublevel option`, async function (t) {
t.plan(25)
const db = testCommon.factory()
await db.open()
const a = db.sublevel('a')
const b = a.sublevel('b')
const c = b.sublevel('c')
await Promise.all([a.open(), b.open(), c.open()])
// Note: may return a transcoder encoding
const utf8 = db.keyEncoding('utf8')
const put = method === 'batch'
? (db, key, opts) => db.batch([{ type: 'put', key, value: 'x', ...opts }])
: (db, key, opts) => db.batch().put(key, key, opts).write()
const del = method === 'batch'
? (db, key, opts) => db.batch([{ type: 'del', key, ...opts }])
: (db, key, opts) => db.batch().del(key, opts).write()
// Note: not entirely a noop. Use of sublevel option triggers data to be encoded early
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('1'), 'got put 1'))
await put(db, '1', { sublevel: db })
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!2'), 'got put 2'))
await put(db, '2', { sublevel: a })
await put(a, '2', { sublevel: a }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!3'), 'got put 3'))
await put(db, '3', { sublevel: b })
await put(a, '3', { sublevel: b }) // Same
await put(b, '3', { sublevel: b }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!!c!4'), 'got put 4'))
await put(db, '4', { sublevel: c })
await put(a, '4', { sublevel: c }) // Same
await put(b, '4', { sublevel: c }) // Same
await put(c, '4', { sublevel: c }) // Same
t.same(await db.keys().all(), ['!a!!b!!c!4', '!a!!b!3', '!a!2', '1'], 'db has entries')
t.same(await a.keys().all(), ['!b!!c!4', '!b!3', '2'], 'sublevel a has entries')
t.same(await b.keys().all(), ['!c!4', '3'], 'sublevel b has entries')
t.same(await c.keys().all(), ['4'], 'sublevel c has entries')
// Test deletes
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('1'), 'got del 1'))
await del(db, '1', { sublevel: db })
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!2'), 'got del 2'))
await del(db, '2', { sublevel: a })
await del(a, '2', { sublevel: a }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!3'), 'got del 3'))
await del(db, '3', { sublevel: b })
await del(a, '3', { sublevel: b }) // Same
await del(b, '3', { sublevel: b }) // Same
db.removeAllListeners('write')
db.on('write', (ops) => t.same(ops[0].key, utf8.encode('!a!!b!!c!4'), 'got del 4'))
await del(db, '4', { sublevel: c })
await del(a, '4', { sublevel: c }) // Same
await del(b, '4', { sublevel: c }) // Same
await del(c, '4', { sublevel: c }) // Same
t.same(await db.keys().all(), [], 'db has no entries')
return db.close()
})
// See https://github.com/Level/abstract-level/issues/80
test(`${method} with nondescendant sublevel option`, async function (t) {
const db = testCommon.factory()
await db.open()
const a = db.sublevel('a')
const b = db.sublevel('b')
await Promise.all([a.open(), b.open()])
// The b sublevel is not a descendant of a, so the sublevel option
// has to be forwarded to db so that the key gets the correct prefix.
if (method === 'batch') {
await a.batch([{ type: 'put', key: 'k', value: 'v', sublevel: b }])
} else {
await a.batch().put('k', 'v', { sublevel: b }).write()
}
t.same(await db.keys().all(), ['!b!k'], 'written to sublevel b')
})
}
for (const deferred of [false, true]) {
for (const keyEncoding of ['buffer', 'view']) {
if (!testCommon.supports.encodings[keyEncoding]) continue
// NOTE: adapted from subleveldown. See https://github.com/Level/subleveldown/issues/87
test(`iterate sublevel keys with bytes above 196 (${keyEncoding}, deferred: ${deferred})`, async function (t) {
const db = testCommon.factory()
const sub1 = db.sublevel('a', { keyEncoding })
const sub2 = db.sublevel('b', { keyEncoding })
const length = (db) => db.keys().all().then(x => x.length)
if (!deferred) await sub1.open()
if (!deferred) await sub2.open()
const batch1 = []
const batch2 = []
const keys = []
// TODO: write before creating the sublevels, to make the deferred test more meaningful
for (let i = 0; i < 256; i++) {
const key = keyEncoding === 'buffer' ? Buffer.from([i]) : new Uint8Array([i])
keys.push(key)
batch1.push({ type: 'put', key, value: 'aa' })
batch2.push({ type: 'put', key, value: 'bb' })
}
await Promise.all([sub1.batch(batch1), sub2.batch(batch2)])
const entries1 = await sub1.iterator().all()
const entries2 = await sub2.iterator().all()
t.is(entries1.length, 256, 'sub1 yielded all entries')
t.is(entries2.length, 256, 'sub2 yielded all entries')
t.ok(entries1.every(x => x[1] === 'aa'))
t.ok(entries2.every(x => x[1] === 'bb'))
const many1 = await sub1.getMany(keys)
const many2 = await sub2.getMany(keys)
t.is(many1.length, 256, 'sub1 yielded all values')
t.is(many2.length, 256, 'sub2 yielded all values')
t.ok(many1.every(x => x === 'aa'))
t.ok(many2.every(x => x === 'bb'))
const singles1 = await Promise.all(keys.map(k => sub1.get(k)))
const singles2 = await Promise.all(keys.map(k => sub2.get(k)))
t.is(singles1.length, 256, 'sub1 yielded all values')
t.is(singles2.length, 256, 'sub2 yielded all values')
t.ok(singles1.every(x => x === 'aa'))
t.ok(singles2.every(x => x === 'bb'))
await sub1.clear()
t.same(await length(sub1), 0, 'cleared sub1')
t.same(await length(sub2), 256, 'did not clear sub2')
await db.close()
})
}
}
}

42
test/traits/closed.js Normal file
View File

@ -0,0 +1,42 @@
'use strict'
module.exports = function (name, testCommon, run) {
const test = testCommon.test
for (const deferred of [false, true]) {
test(`${name} on closed db fails (deferred open: ${deferred})`, async function (t) {
let error
const db = testCommon.factory()
if (!deferred) await db.open()
await db.close()
try {
await run(t, db)
} catch (err) {
error = err
}
t.is(error && error.code, 'LEVEL_DATABASE_NOT_OPEN')
})
test(`${name} on closing db fails (deferred open: ${deferred})`, async function (t) {
let error
const db = testCommon.factory()
if (!deferred) await db.open()
const promise = db.close()
try {
await run(t, db)
} catch (err) {
error = err
}
await promise
t.is(error && error.code, 'LEVEL_DATABASE_NOT_OPEN')
})
}
}

4
test/traits/index.js Normal file
View File

@ -0,0 +1,4 @@
'use strict'
exports.open = require('./open')
exports.closed = require('./closed')

62
test/traits/open.js Normal file
View File

@ -0,0 +1,62 @@
'use strict'
module.exports = function (name, testCommon, options, run) {
if (typeof options === 'function') {
run = options
options = {}
}
const test = testCommon.test
const deferred = options.deferred !== false
test(`${name} on open db`, async function (t) {
const db = testCommon.factory()
await db.open()
t.is(db.status, 'open')
await run(t, db)
t.is(db.status, 'open')
return db.close()
})
deferred && test(`${name} on opening db`, async function (t) {
const db = testCommon.factory()
t.is(db.status, 'opening')
await run(t, db)
t.is(db.status, 'open')
return db.close()
})
test(`${name} on reopened db`, async function (t) {
const db = testCommon.factory()
await db.close()
t.is(db.status, 'closed')
await db.open()
t.is(db.status, 'open')
await run(t, db)
t.is(db.status, 'open')
return db.close()
})
deferred && test(`${name} on reopening db`, async function (t) {
const db = testCommon.factory()
await db.close()
t.is(db.status, 'closed')
const promise = db.open()
t.is(db.status, 'opening')
await run(t, db)
t.is(db.status, 'open')
await promise
return db.close()
})
}

269
test/util.js Normal file
View File

@ -0,0 +1,269 @@
'use strict'
const { AbstractLevel, AbstractChainedBatch, AbstractSnapshot } = require('..')
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('..')
const noop = function () {}
exports.illegalKeys = [
{ name: 'null key', key: null },
{ name: 'undefined key', key: undefined }
]
exports.illegalValues = [
{ name: 'null key', value: null },
{ name: 'undefined value', value: undefined }
]
// Utility to ensure we're not fooled by `await 123`. Instead do `await assertPromise(123)`
exports.assertPromise = function (p) {
if (typeof p !== 'object' || p === null || typeof p.then !== 'function') {
throw new TypeError('Expected a promise')
}
return p
}
exports.mockLevel = function (methods, ...args) {
class TestLevel extends AbstractLevel {}
for (const k in methods) TestLevel.prototype[k] = methods[k]
if (!args.length) args = [{ encodings: { utf8: true } }]
return new TestLevel(...args)
}
exports.mockIterator = function (db, options, methods, ...args) {
class TestIterator extends AbstractIterator {}
for (const k in methods) TestIterator.prototype[k] = methods[k]
return new TestIterator(db, options, ...args)
}
exports.mockChainedBatch = function (db, methods, ...args) {
class TestBatch extends AbstractChainedBatch {}
for (const k in methods) TestBatch.prototype[k] = methods[k]
return new TestBatch(db, ...args)
}
// Mock encoding where null and undefined are significant types
exports.nullishEncoding = {
name: 'nullish',
format: 'utf8',
encode (v) {
return v === null ? '\x00' : v === undefined ? '\xff' : String(v)
},
decode (v) {
return v === '\x00' ? null : v === '\xff' ? undefined : v
}
}
// Replacement for sinon package (which breaks too often, on features we don't use)
exports.createSpy = function (fn = noop) {
let calls = []
const spy = function (...args) {
const returnValue = fn(...args)
calls.push({ thisValue: this, args, returnValue })
spy.callCount++
return returnValue
}
spy.callCount = 0
spy.getCall = function (n) {
return calls[n]
}
spy.resetHistory = function () {
calls = []
spy.callCount = 0
}
return spy
}
const kEntries = Symbol('entries')
const kPosition = Symbol('position')
const kOptions = Symbol('options')
/**
* A minimal and non-optimized implementation for use in tests. Only supports utf8.
* Don't use this as a reference implementation.
*/
class MinimalLevel extends AbstractLevel {
constructor (options) {
super({
encodings: { utf8: true },
seek: true,
has: true,
explicitSnapshots: true
}, options)
this[kEntries] = new Map()
}
async _put (key, value, options) {
this[kEntries].set(key, value)
}
async _get (key, options) {
const entries = (options.snapshot || this)[kEntries]
// Is undefined if not found
return entries.get(key)
}
async _getMany (keys, options) {
const entries = (options.snapshot || this)[kEntries]
return keys.map(k => entries.get(k))
}
async _has (key, options) {
const entries = (options.snapshot || this)[kEntries]
return entries.has(key)
}
async _hasMany (keys, options) {
const entries = (options.snapshot || this)[kEntries]
return keys.map(k => entries.has(k))
}
async _del (key, options) {
this[kEntries].delete(key)
}
async _clear (options) {
const entries = (options.snapshot || this)[kEntries]
for (const [k] of sliceEntries(entries, options, true)) {
this[kEntries].delete(k)
}
}
async _batch (operations, options) {
const entries = new Map(this[kEntries])
for (const op of operations) {
if (op.type === 'put') entries.set(op.key, op.value)
else entries.delete(op.key)
}
this[kEntries] = entries
}
_iterator (options) {
return new MinimalIterator(this, options)
}
_keys (options) {
return new MinimalKeyIterator(this, options)
}
_values (options) {
return new MinimalValueIterator(this, options)
}
_snapshot (options) {
return new MinimalSnapshot(this, options)
}
}
class MinimalSnapshot extends AbstractSnapshot {
constructor (db, options) {
super(options)
this[kEntries] = new Map(db[kEntries])
}
}
class MinimalIterator extends AbstractIterator {
constructor (db, options) {
super(db, options)
const entries = (options.snapshot || db)[kEntries]
this[kEntries] = sliceEntries(entries, options, false)
this[kOptions] = options
this[kPosition] = 0
}
}
class MinimalKeyIterator extends AbstractKeyIterator {
constructor (db, options) {
super(db, options)
const entries = (options.snapshot || db)[kEntries]
this[kEntries] = sliceEntries(entries, options, false)
this[kOptions] = options
this[kPosition] = 0
}
}
class MinimalValueIterator extends AbstractValueIterator {
constructor (db, options) {
super(db, options)
const entries = (options.snapshot || db)[kEntries]
this[kEntries] = sliceEntries(entries, options, false)
this[kOptions] = options
this[kPosition] = 0
}
}
for (const Ctor of [MinimalIterator, MinimalKeyIterator, MinimalValueIterator]) {
const mapEntry = Ctor === MinimalIterator ? e => e.slice() : Ctor === MinimalKeyIterator ? e => e[0] : e => e[1]
Ctor.prototype._next = async function () {
const entry = this[kEntries][this[kPosition]++]
if (entry === undefined) return undefined
return mapEntry(entry)
}
Ctor.prototype._nextv = async function (size, options) {
const entries = this[kEntries].slice(this[kPosition], this[kPosition] + size)
this[kPosition] += entries.length
return entries.map(mapEntry)
}
Ctor.prototype._all = async function (options) {
const end = this.limit - this.count + this[kPosition]
const entries = this[kEntries].slice(this[kPosition], end)
this[kPosition] = this[kEntries].length
return entries.map(mapEntry)
}
Ctor.prototype._seek = function (target, options) {
this[kPosition] = this[kEntries].length
if (!outOfRange(target, this[kOptions])) {
// Don't care about performance here
for (let i = 0; i < this[kPosition]; i++) {
const key = this[kEntries][i][0]
if (this[kOptions].reverse ? key <= target : key >= target) {
this[kPosition] = i
}
}
}
}
}
const outOfRange = function (target, options) {
if ('gte' in options) {
if (target < options.gte) return true
} else if ('gt' in options) {
if (target <= options.gt) return true
}
if ('lte' in options) {
if (target > options.lte) return true
} else if ('lt' in options) {
if (target >= options.lt) return true
}
return false
}
const sliceEntries = function (entries, options, applyLimit) {
entries = Array.from(entries)
.filter((e) => !outOfRange(e[0], options))
.sort((a, b) => a[0] > b[0] ? 1 : a[0] < b[0] ? -1 : 0)
if (options.reverse) entries.reverse()
if (applyLimit && options.limit !== -1) entries = entries.slice(0, options.limit)
return entries
}
exports.MinimalLevel = MinimalLevel

7
tsconfig.json Normal file
View File

@ -0,0 +1,7 @@
{
"extends": "@voxpelli/tsconfig/node16.json",
"compilerOptions": {
"checkJs": false
},
"include": ["*.ts", "types/*.ts"]
}

126
types/abstract-chained-batch.d.ts vendored Normal file
View File

@ -0,0 +1,126 @@
import * as Transcoder from 'level-transcoder'
import { AbstractSublevel } from './abstract-sublevel'
import { AbstractResource } from './interfaces'
export class AbstractChainedBatch<TDatabase, KDefault, VDefault>
implements AbstractResource {
constructor (db: TDatabase)
/**
* A reference to the database that created this chained batch.
*/
db: TDatabase
/**
* The number of queued operations on the current batch.
*/
get length (): number
/**
* Queue a _put_ operation on this batch, not committed until {@link write} is
* called.
*/
put (key: KDefault, value: VDefault): this
put<K = KDefault, V = VDefault> (
key: K,
value: V,
options: AbstractChainedBatchPutOptions<TDatabase, K, V>
): this
/**
* Queue a _del_ operation on this batch, not committed until {@link write} is
* called.
*/
del (key: KDefault): this
del<K = KDefault> (key: K, options: AbstractChainedBatchDelOptions<TDatabase, K>): this
/**
* Clear all queued operations on this batch.
*/
clear (): this
/**
* Commit the queued operations for this batch. All operations will be written
* atomically, that is, they will either all succeed or fail with no partial
* commits.
*/
write (): Promise<void>
write (options: AbstractChainedBatchWriteOptions): Promise<void>
/**
* Free up underlying resources. This should be done even if the chained batch has
* zero queued operations. Automatically called by {@link write} so normally not
* necessary to call, unless the intent is to discard a chained batch without
* committing it.
*/
close (): Promise<void>
/**
* Close the batch.
*/
[Symbol.asyncDispose](): Promise<void>
}
/**
* Options for the {@link AbstractChainedBatch.put} method.
*/
export interface AbstractChainedBatchPutOptions<TDatabase, K, V> {
/**
* Custom key encoding for this _put_ operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Custom value encoding for this _put_ operation, used to encode the `value`.
*/
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
/**
* Act as though the _put_ operation is performed on the given sublevel, to similar
* effect as:
*
* ```js
* await sublevel.batch().put(key, value).write()
* ```
*
* This allows atomically committing data to multiple sublevels. The `key` will be
* prefixed with the `prefix` of the sublevel, and the `key` and `value` will be
* encoded by the sublevel (using the default encodings of the sublevel unless
* {@link keyEncoding} and / or {@link valueEncoding} are provided).
*/
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
}
/**
* Options for the {@link AbstractChainedBatch.del} method.
*/
export interface AbstractChainedBatchDelOptions<TDatabase, K> {
/**
* Custom key encoding for this _del_ operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Act as though the _del_ operation is performed on the given sublevel, to similar
* effect as:
*
* ```js
* await sublevel.batch().del(key).write()
* ```
*
* This allows atomically committing data to multiple sublevels. The `key` will be
* prefixed with the `prefix` of the sublevel, and the `key` will be encoded by the
* sublevel (using the default key encoding of the sublevel unless {@link keyEncoding}
* is provided).
*/
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
}
/**
* Options for the {@link AbstractChainedBatch.write} method.
*/
// eslint-disable-next-line @typescript-eslint/no-empty-interface
export interface AbstractChainedBatchWriteOptions {
// There are no abstract options but implementations may add theirs.
}

241
types/abstract-iterator.d.ts vendored Normal file
View File

@ -0,0 +1,241 @@
import * as Transcoder from 'level-transcoder'
import { AbstractReadOptions, AbstractResource, RangeOptions } from './interfaces'
declare interface CommonIteratorOptions extends AbstractReadOptions {
/**
* An [`AbortSignal`][1] to abort read operations on the iterator.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
*/
signal?: AbortSignal | undefined
}
export interface AbstractIteratorOptions<K, V> extends RangeOptions<K>, CommonIteratorOptions {
/**
* Whether to return the key of each entry. Defaults to `true`. If set to `false`,
* the iterator will yield keys that are `undefined`.
*/
keys?: boolean | undefined
/**
* Whether to return the value of each entry. Defaults to `true`. If set to
* `false`, the iterator will yield values that are `undefined`.
*/
values?: boolean | undefined
/**
* Custom key encoding for this iterator, used to encode range options, to encode
* {@link AbstractIterator.seek} targets and to decode keys.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Custom value encoding for this iterator, used to decode values.
*/
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
}
export interface AbstractKeyIteratorOptions<K> extends RangeOptions<K>, CommonIteratorOptions {
/**
* Custom key encoding for this iterator, used to encode range options, to encode
* {@link AbstractKeyIterator.seek} targets and to decode keys.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
}
export interface AbstractValueIteratorOptions<K, V> extends RangeOptions<K>, CommonIteratorOptions {
/**
* Custom key encoding for this iterator, used to encode range options and
* {@link AbstractValueIterator.seek} targets.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Custom value encoding for this iterator, used to decode values.
*/
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
}
/**
* @template TDatabase Type of the database that created this iterator.
* @template T Type of items yielded. Items can be entries, keys or values.
*/
declare class CommonIterator<TDatabase, T> implements AbstractResource {
/**
* A reference to the database that created this iterator.
*/
db: TDatabase
/**
* Read-only getter that indicates how many items have been yielded so far (by any
* method) excluding calls that errored or yielded `undefined`.
*/
get count (): number
/**
* Read-only getter that reflects the `limit` that was set in options. Greater than or
* equal to zero. Equals {@link Infinity} if no limit.
*/
get limit (): number
[Symbol.asyncIterator] (): AsyncGenerator<T, void, unknown>
/**
* Free up underlying resources. Not necessary to call if [`for await...of`][1] or
* `all()` is used.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*/
close (): Promise<void>
/**
* Close the iterator.
*/
[Symbol.asyncDispose](): Promise<void>
}
export class AbstractIterator<TDatabase, K, V> extends CommonIterator<TDatabase, [K, V]> {
constructor (db: TDatabase, options: AbstractIteratorOptions<K, V>)
/**
* Advance to the next entry and yield that entry. When possible, prefer to use
* [`for await...of`][1] instead.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*/
next (): Promise<[K, V] | undefined>
/**
* Advance repeatedly and get at most {@link size} amount of entries in a single call.
* Can be faster than repeated {@link next()} calls. The natural end of the iterator
* will be signaled by yielding an empty array.
*
* @param size Get at most this many entries. Has a soft minimum of 1.
* @param options Options (none at the moment, reserved for future use).
*/
nextv (size: number, options: {}): Promise<Array<[K, V]>>
nextv (size: number): Promise<Array<[K, V]>>
/**
* Advance repeatedly and get all (remaining) entries as an array, automatically
* closing the iterator. Assumes that those entries fit in memory. If that's not the
* case, instead use {@link next()}, {@link nextv()} or [`for await...of`][1].
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*
* @param options Options (none at the moment, reserved for future use).
*/
all (options: {}): Promise<Array<[K, V]>>
all (): Promise<Array<[K, V]>>
/**
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
* loop) will yield entries with keys equal to or larger than {@link target}, or equal
* to or smaller than {@link target} if the {@link AbstractIteratorOptions.reverse}
* option was true.
*/
seek (target: K): void
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
}
export class AbstractKeyIterator<TDatabase, K> extends CommonIterator<TDatabase, K> {
constructor (db: TDatabase, options: AbstractKeyIteratorOptions<K>)
/**
* Advance to the next key and yield that key. When possible, prefer to use
* [`for await...of`][1] instead.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*/
next (): Promise<K | undefined>
/**
* Advance repeatedly and get at most {@link size} amount of keys in a single call. Can
* be faster than repeated {@link next()} calls. The natural end of the iterator will
* be signaled by yielding an empty array.
*
* @param size Get at most this many keys. Has a soft minimum of 1.
* @param options Options (none at the moment, reserved for future use).
*/
nextv (size: number, options: {}): Promise<K[]>
nextv (size: number): Promise<K[]>
/**
* Advance repeatedly and get all (remaining) keys as an array, automatically closing
* the iterator. Assumes that those keys fit in memory. If that's not the case, instead
* use {@link next()}, {@link nextv()} or [`for await...of`][1].
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*
* @param options Options (none at the moment, reserved for future use).
*/
all (options: {}): Promise<K[]>
all (): Promise<K[]>
/**
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
* loop) will yield keys equal to or larger than {@link target}, or equal to or smaller
* than {@link target} if the {@link AbstractKeyIteratorOptions.reverse} option was
* true.
*/
seek (target: K): void
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
}
export class AbstractValueIterator<TDatabase, K, V> extends CommonIterator<TDatabase, V> {
constructor (db: TDatabase, options: AbstractValueIteratorOptions<K, V>)
/**
* Advance to the next value and yield that value. When possible, prefer
* to use [`for await...of`][1] instead.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*/
next (): Promise<V | undefined>
/**
* Advance repeatedly and get at most {@link size} amount of values in a single call.
* Can be faster than repeated {@link next()} calls. The natural end of the iterator
* will be signaled by yielding an empty array.
*
* @param size Get at most this many values. Has a soft minimum of 1.
* @param options Options (none at the moment, reserved for future use).
*/
nextv (size: number, options: {}): Promise<V[]>
nextv (size: number): Promise<V[]>
/**
* Advance repeatedly and get all (remaining) values as an array, automatically closing
* the iterator. Assumes that those values fit in memory. If that's not the case,
* instead use {@link next()}, {@link nextv()} or [`for await...of`][1].
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
*
* @param options Options (none at the moment, reserved for future use).
*/
all (options: {}): Promise<V[]>
all (): Promise<V[]>
/**
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
* loop) will yield the values of keys equal to or larger than {@link target}, or equal
* to or smaller than {@link target} if the {@link AbstractValueIteratorOptions.reverse}
* option was true.
*/
seek (target: K): void
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
}
/**
* Options for the {@link AbstractIterator.seek} method.
*/
export interface AbstractSeekOptions<K> {
/**
* Custom key encoding, used to encode the `target`. By default the keyEncoding option
* of the iterator is used, or (if that wasn't set) the keyEncoding of the database.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
}

634
types/abstract-level.d.ts vendored Normal file
View File

@ -0,0 +1,634 @@
import { IManifest } from 'level-supports'
import * as Transcoder from 'level-transcoder'
import { EventEmitter } from 'events'
import { AbstractChainedBatch } from './abstract-chained-batch'
import { AbstractSublevel, AbstractSublevelOptions } from './abstract-sublevel'
import { AbstractSnapshot } from './abstract-snapshot'
import {
AbstractIterator,
AbstractIteratorOptions,
AbstractKeyIterator,
AbstractKeyIteratorOptions,
AbstractValueIterator,
AbstractValueIteratorOptions
} from './abstract-iterator'
import { AbstractReadOptions, AbstractResource, RangeOptions } from './interfaces'
/**
* Abstract class for a lexicographically sorted key-value database.
*
* @template TFormat The type used internally by the database to store data.
* @template KDefault The default type of keys if not overridden on operations.
* @template VDefault The default type of values if not overridden on operations.
*/
declare class AbstractLevel<TFormat, KDefault = string, VDefault = string>
extends EventEmitter implements AbstractResource {
/**
* Private database constructor.
*
* @param manifest A [manifest](https://github.com/Level/supports) describing the
* features supported by (the private API of) this database.
* @param options Options, of which some will be forwarded to {@link open}.
*/
constructor (
manifest: Partial<IManifest>,
options?: AbstractDatabaseOptions<KDefault, VDefault> | undefined
)
/**
* A [manifest](https://github.com/Level/supports) describing the features
* supported by this database.
*/
supports: IManifest
/**
* Allows userland _hook functions_ to customize behavior of the database.
*/
hooks: AbstractDatabaseHooks<typeof this>
/**
* Read-only getter that returns a string reflecting the current state of the database:
*
* - `'opening'` - waiting for the database to be opened
* - `'open'` - successfully opened the database
* - `'closing'` - waiting for the database to be closed
* - `'closed'` - database is closed.
*/
get status (): 'opening' | 'open' | 'closing' | 'closed'
/**
* Open the database.
*/
open (): Promise<void>
open (options: AbstractOpenOptions): Promise<void>
/**
* Close the database.
*/
close (): Promise<void>
/**
* Close the database.
*/
[Symbol.asyncDispose](): Promise<void>
/**
* Get a value from the database by {@link key}.
*/
get (key: KDefault): Promise<VDefault | undefined>
get<K = KDefault, V = VDefault> (
key: K,
options: AbstractGetOptions<K, V>
): Promise<V | undefined>
/**
* Get multiple values from the database by an array of {@link keys}.
*/
getMany (keys: KDefault[]): Promise<(VDefault | undefined)[]>
getMany<K = KDefault, V = VDefault> (
keys: K[],
options: AbstractGetManyOptions<K, V>
): Promise<(V | undefined)[]>
/**
* Check if the database has an entry with the given {@link key}.
*
* @returns A promise for a boolean that will be true if the entry exists.
*
* @example
* ```js
* if (await db.has('fruit')) {
* console.log('We have fruit')
* }
* ```
*/
has (key: KDefault): Promise<boolean>
has<K = KDefault> (key: K, options: AbstractHasOptions<K>): Promise<boolean>
/**
* Check if the database has entries with the given {@link keys}.
*
* @returns A promise for an array of booleans with the same order as {@link keys}.
*
* @example
* ```js
* await db.put('a', '123')
* await db.hasMany(['a', 'b']) // [true, false]
* ```
*/
hasMany (keys: KDefault[]): Promise<boolean[]>
hasMany<K = KDefault> (keys: K[], options: AbstractHasManyOptions<K>): Promise<boolean[]>
/**
* Add a new entry or overwrite an existing entry.
*/
put (key: KDefault, value: VDefault): Promise<void>
put<K = KDefault, V = VDefault> (
key: K,
value: V,
options: AbstractPutOptions<K, V>
): Promise<void>
/**
* Delete an entry by {@link key}.
*/
del (key: KDefault): Promise<void>
del<K = KDefault> (
key: K,
options: AbstractDelOptions<K>
): Promise<void>
/**
* Perform multiple _put_ and/or _del_ operations in bulk.
*/
batch (
operations: Array<AbstractBatchOperation<typeof this, KDefault, VDefault>>
): Promise<void>
batch<K = KDefault, V = VDefault> (
operations: Array<AbstractBatchOperation<typeof this, K, V>>,
options: AbstractBatchOptions<K, V>
): Promise<void>
batch (): AbstractChainedBatch<typeof this, KDefault, VDefault>
/**
* Create an iterator. For example:
*
* ```js
* for await (const [key, value] of db.iterator({ gte: 'a' })) {
* console.log([key, value])
* }
* ```
*/
iterator (): AbstractIterator<typeof this, KDefault, VDefault>
iterator<K = KDefault, V = VDefault> (
options: AbstractIteratorOptions<K, V>
): AbstractIterator<typeof this, K, V>
/**
* Create a key iterator. For example:
*
* ```js
* for await (const key of db.keys({ gte: 'a' })) {
* console.log(key)
* }
* ```
*/
keys (): AbstractKeyIterator<typeof this, KDefault>
keys<K = KDefault> (
options: AbstractKeyIteratorOptions<K>
): AbstractKeyIterator<typeof this, K>
/**
* Create a value iterator. For example:
*
* ```js
* for await (const value of db.values({ gte: 'a' })) {
* console.log(value)
* }
* ```
*/
values (): AbstractValueIterator<typeof this, KDefault, VDefault>
values<K = KDefault, V = VDefault> (
options: AbstractValueIteratorOptions<K, V>
): AbstractValueIterator<typeof this, K, V>
/**
* Delete all entries or a range.
*/
clear (): Promise<void>
clear<K = KDefault> (options: AbstractClearOptions<K>): Promise<void>
/**
* Create a sublevel.
* @param name Name of the sublevel, used to prefix keys.
*/
sublevel (name: string | string[]): AbstractSublevel<typeof this, TFormat, string, string>
sublevel<K = string, V = string> (
name: string | string[],
options: AbstractSublevelOptions<K, V>
): AbstractSublevel<typeof this, TFormat, K, V>
/**
* Add sublevel prefix to the given {@link key}, which must be already-encoded. If this
* database is not a sublevel, the given {@link key} is returned as-is.
*
* @param key Key to add prefix to.
* @param keyFormat Format of {@link key}. One of `'utf8'`, `'buffer'`, `'view'`.
* If `'utf8'` then {@link key} must be a string and the return value will be a string.
* If `'buffer'` then Buffer, if `'view'` then Uint8Array.
* @param local If true, add prefix for parent database, else for root database (default).
*/
prefixKey (key: string, keyFormat: 'utf8', local?: boolean | undefined): string
prefixKey (key: Buffer, keyFormat: 'buffer', local?: boolean | undefined): Buffer
prefixKey (key: Uint8Array, keyFormat: 'view', local?: boolean | undefined): Uint8Array
/**
* Returns the given {@link encoding} argument as a normalized encoding object
* that follows the [`level-transcoder`](https://github.com/Level/transcoder)
* encoding interface.
*/
keyEncoding<N extends Transcoder.KnownEncodingName> (
encoding: N
): Transcoder.KnownEncoding<N, TFormat>
keyEncoding<TIn, TOut> (
encoding: Transcoder.MixedEncoding<TIn, any, TOut>
): Transcoder.Encoding<TIn, TFormat, TOut>
/**
* Returns the default key encoding of the database as a normalized encoding
* object that follows the [`level-transcoder`](https://github.com/Level/transcoder)
* encoding interface.
*/
keyEncoding (): Transcoder.Encoding<KDefault, TFormat, KDefault>
/**
* Returns the given {@link encoding} argument as a normalized encoding object
* that follows the [`level-transcoder`](https://github.com/Level/transcoder)
* encoding interface.
*/
valueEncoding<N extends Transcoder.KnownEncodingName> (
encoding: N
): Transcoder.KnownEncoding<N, TFormat>
valueEncoding<TIn, TOut> (
encoding: Transcoder.MixedEncoding<TIn, any, TOut>
): Transcoder.Encoding<TIn, TFormat, TOut>
/**
* Returns the default value encoding of the database as a normalized encoding
* object that follows the [`level-transcoder`](https://github.com/Level/transcoder)
* encoding interface.
*/
valueEncoding (): Transcoder.Encoding<VDefault, TFormat, VDefault>
/**
* Create an explicit snapshot. Throws a `LEVEL_NOT_SUPPORTED` error if
* `db.supports.explicitSnapshots` is false ([Level/community#118][1]).
*
* @param options There are currently no options but specific implementations
* may add their own.
*
* @example
* ```ts
* await db.put('example', 'before')
* await using snapshot = db.snapshot()
* await db.put('example', 'after')
* await db.get('example', { snapshot })) // Returns 'before'
* ```
*
* [1]: https://github.com/Level/community/issues/118
*/
snapshot (options?: any | undefined): AbstractSnapshot
/**
* Call the function {@link fn} at a later time when {@link status} changes to
* `'open'` or `'closed'`. Known as a _deferred operation_.
*
* @param fn Synchronous function to (eventually) call.
* @param options Options for the deferred operation.
*/
defer (fn: Function, options?: AbstractDeferOptions | undefined): void
/**
* Call the function {@link fn} at a later time when {@link status} changes to
* `'open'` or `'closed'`. Known as a _deferred operation_.
*
* @param fn Asynchronous function to (eventually) call.
* @param options Options for the deferred operation.
* @returns A promise for the result of {@link fn}.
*/
deferAsync<T> (fn: () => Promise<T>, options?: AbstractDeferOptions | undefined): Promise<T>
/**
* Keep track of the given {@link resource} in order to call its `close()` method when
* the database is closed. Once successfully closed, the resource will no longer be
* tracked, to the same effect as manually calling {@link detachResource}. When given
* multiple resources, the database will close them in parallel. Resources are kept in
* a {@link Set} so that the same object will not be attached (and closed) twice.
*
* Intended for objects that rely on an open database. Used internally for built-in
* resources like iterators and sublevels, and is publicly exposed for custom
* resources.
*/
attachResource(resource: AbstractResource): void
/**
* Stop tracking the given {@link resource}.
*/
detachResource(resource: AbstractResource): void
}
export { AbstractLevel }
/**
* Options for the database constructor.
*/
export interface AbstractDatabaseOptions<K, V>
extends Omit<AbstractOpenOptions, 'passive'> {
/**
* Encoding to use for keys.
* @defaultValue `'utf8'`
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Encoding to use for values.
* @defaultValue `'utf8'`
*/
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
}
/**
* Options for the {@link AbstractLevel.open} method.
*/
export interface AbstractOpenOptions {
/**
* If `true`, create an empty database if one doesn't already exist. If `false`
* and the database doesn't exist, opening will fail.
*
* @defaultValue `true`
*/
createIfMissing?: boolean | undefined
/**
* If `true` and the database already exists, opening will fail.
*
* @defaultValue `false`
*/
errorIfExists?: boolean | undefined
/**
* Wait for, but do not initiate, opening of the database.
*
* @defaultValue `false`
*/
passive?: boolean | undefined
}
/**
* Options for the {@link AbstractLevel.get} method.
*/
export interface AbstractGetOptions<K, V> extends AbstractReadOptions {
/**
* Custom key encoding for this operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Custom value encoding for this operation, used to decode the value.
*/
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
}
/**
* Options for the {@link AbstractLevel.getMany} method.
*/
export interface AbstractGetManyOptions<K, V> extends AbstractReadOptions {
/**
* Custom key encoding for this operation, used to encode the `keys`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Custom value encoding for this operation, used to decode values.
*/
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
}
/**
* Options for the {@link AbstractLevel.has} method.
*/
export interface AbstractHasOptions<K> extends AbstractReadOptions {
/**
* Custom key encoding for this operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
}
/**
* Options for the {@link AbstractLevel.hasMany} method.
*/
export interface AbstractHasManyOptions<K> extends AbstractReadOptions {
/**
* Custom key encoding for this operation, used to encode the `keys`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
}
/**
* Options for the {@link AbstractLevel.put} method.
*/
export interface AbstractPutOptions<K, V> {
/**
* Custom key encoding for this operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Custom value encoding for this operation, used to encode the `value`.
*/
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
}
/**
* Options for the {@link AbstractLevel.del} method.
*/
export interface AbstractDelOptions<K> {
/**
* Custom key encoding for this operation, used to encode the `key`.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
}
/**
* Options for the {@link AbstractLevel.batch} method.
*/
export interface AbstractBatchOptions<K, V> {
/**
* Custom key encoding for this batch, used to encode keys.
*/
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
/**
* Custom value encoding for this batch, used to encode values.
*/
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
}
/**
* A _put_ or _del_ operation to be committed with the {@link AbstractLevel.batch}
* method.
*/
export type AbstractBatchOperation<TDatabase, K, V> =
AbstractBatchPutOperation<TDatabase, K, V> | AbstractBatchDelOperation<TDatabase, K>
/**
* A _put_ operation to be committed with the {@link AbstractLevel.batch} method.
*/
export interface AbstractBatchPutOperation<TDatabase, K, V> {
type: 'put'
key: K
value: V
/**
* Custom key encoding for this _put_ operation, used to encode the {@link key}.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Custom key encoding for this _put_ operation, used to encode the {@link value}.
*/
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
/**
* Act as though the _put_ operation is performed on the given sublevel, to similar
* effect as:
*
* ```js
* await sublevel.batch([{ type: 'put', key, value }])
* ```
*
* This allows atomically committing data to multiple sublevels. The {@link key} will
* be prefixed with the `prefix` of the sublevel, and the {@link key} and {@link value}
* will be encoded by the sublevel (using the default encodings of the sublevel unless
* {@link keyEncoding} and / or {@link valueEncoding} are provided).
*/
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
}
/**
* A _del_ operation to be committed with the {@link AbstractLevel.batch} method.
*/
export interface AbstractBatchDelOperation<TDatabase, K> {
type: 'del'
key: K
/**
* Custom key encoding for this _del_ operation, used to encode the {@link key}.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Act as though the _del_ operation is performed on the given sublevel, to similar
* effect as:
*
* ```js
* await sublevel.batch([{ type: 'del', key }])
* ```
*
* This allows atomically committing data to multiple sublevels. The {@link key} will
* be prefixed with the `prefix` of the sublevel, and the {@link key} will be encoded
* by the sublevel (using the default key encoding of the sublevel unless
* {@link keyEncoding} is provided).
*/
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
}
/**
* Options for the {@link AbstractLevel.clear} method.
*/
export interface AbstractClearOptions<K> extends RangeOptions<K> {
/**
* Custom key encoding for this operation, used to encode range options.
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Explicit snapshot to read from, such that entries not present in the snapshot will
* not be deleted.
*/
snapshot?: AbstractSnapshot | undefined
}
/**
* Allows userland _hook functions_ to customize behavior of the database.
*
* @template TDatabase Type of database.
*/
export interface AbstractDatabaseHooks<
TDatabase,
TOpenOptions = AbstractOpenOptions,
TBatchOperation = AbstractBatchOperation<TDatabase, any, any>> {
/**
* An asynchronous hook that runs after the database has succesfully opened, but before
* deferred operations are executed and before events are emitted. Example:
*
* ```js
* db.hooks.postopen.add(async function () {
* // Initialize data
* })
* ```
*/
postopen: AbstractHook<(options: TOpenOptions) => Promise<void>>
/**
* A synchronous hook for modifying or adding operations. Example:
*
* ```js
* db.hooks.prewrite.add(function (op, batch) {
* op.key = op.key.toUpperCase()
* })
* ```
*
* @todo Define type of `op`.
*/
prewrite: AbstractHook<(op: any, batch: AbstractPrewriteBatch<TBatchOperation>) => void>
/**
* A synchronous hook that runs when an {@link AbstractSublevel} instance has been
* created by {@link AbstractLevel.sublevel()}.
*/
newsub: AbstractHook<(
sublevel: AbstractSublevel<TDatabase, any, any, any>,
options: AbstractSublevelOptions<any, any>
) => void>
}
/**
* An interface for prewrite hook functions to add operations, to be committed in the
* same batch as the input operation(s).
*/
export interface AbstractPrewriteBatch<TBatchOperation> {
/**
* Add a batch operation.
*/
add: (op: TBatchOperation) => this
}
/**
* @template TFn The hook-specific function signature.
*/
export interface AbstractHook<TFn extends Function> {
/**
* Add the given {@link fn} function to this hook, if it wasn't already added.
* @param fn Hook function.
*/
add: (fn: TFn) => void
/**
* Remove the given {@link fn} function from this hook.
* @param fn Hook function.
*/
delete: (fn: TFn) => void
}
/**
* Options for {@link AbstractLevel.defer()} and {@link AbstractLevel.deferAsync()}.
*/
export interface AbstractDeferOptions {
/**
* An [`AbortSignal`][1] to abort the deferred operation.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
*/
signal?: AbortSignal | undefined
}

30
types/abstract-snapshot.d.ts vendored Normal file
View File

@ -0,0 +1,30 @@
import { AbstractResource } from './interfaces'
/**
* A lightweight token that represents a version of a database at a particular point in
* time.
*/
export class AbstractSnapshot implements AbstractResource {
/**
* Increment reference count, to register work that should delay closing until
* {@link unref} is called an equal amount of times. The promise that will be returned
* by {@link close} will not resolve until the reference count returns to 0. This
* prevents prematurely closing underlying resources while the snapshot is in use.
*/
ref (): void
/**
* Decrement reference count, to indicate that the work has finished.
*/
unref (): void
/**
* Close the snapshot.
*/
close (): Promise<void>
/**
* Close the snapshot.
*/
[Symbol.asyncDispose](): Promise<void>
}

72
types/abstract-sublevel.d.ts vendored Normal file
View File

@ -0,0 +1,72 @@
import * as Transcoder from 'level-transcoder'
import { AbstractLevel } from './abstract-level'
/**
* @template TDatabase Type of parent database.
* @template TFormat The type used internally by the parent database to store data.
* @template KDefault The default type of keys if not overridden on operations.
* @template VDefault The default type of values if not overridden on operations.
*/
declare class AbstractSublevel<TDatabase, TFormat, KDefault, VDefault>
extends AbstractLevel<TFormat, KDefault, VDefault> {
/**
* Sublevel constructor.
*
* @param db Parent database.
* @param name Name of the sublevel, used to prefix keys.
*/
constructor (
db: TDatabase,
name: string,
options?: AbstractSublevelOptions<KDefault, VDefault> | undefined
)
/**
* Prefix of the sublevel. A read-only string property.
*/
get prefix (): string
/**
* Get the path of the sublevel, which is its prefix without separators.
*
* @param local If true, exclude path of parent database. If false (the default) then
* recurse to form a fully-qualified path that travels from the root database to this
* sublevel.
*/
path (local?: boolean | undefined): string[]
/**
* Parent database. A read-only property.
*/
get parent (): TDatabase
/**
* Root database. A read-only property.
*/
get db (): AbstractLevel<any, any, any>
}
/**
* Options for the {@link AbstractLevel.sublevel} method.
*/
export interface AbstractSublevelOptions<K, V> {
/**
* Character for separating sublevel names from user keys and each other. Must sort
* before characters used in `name`. An error will be thrown if that's not the case.
*
* @defaultValue `'!'`
*/
separator?: string | undefined
/**
* Encoding to use for keys.
* @defaultValue `'utf8'`
*/
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
/**
* Encoding to use for values.
* @defaultValue `'utf8'`
*/
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
}

42
types/interfaces.d.ts vendored Normal file
View File

@ -0,0 +1,42 @@
import { AbstractLevel } from './abstract-level'
import { AbstractSnapshot } from './abstract-snapshot'
export interface RangeOptions<K> {
gt?: K
gte?: K
lt?: K
lte?: K
reverse?: boolean | undefined
limit?: number | undefined
}
/**
* Common options for read methods like {@link AbstractLevel.get} and
* {@link AbstractLevel.iterator}.
*/
export interface AbstractReadOptions {
/**
* Explicit snapshot to read from.
*/
snapshot?: AbstractSnapshot | undefined
}
/**
* Represents a stateful resource that can be closed.
*/
export interface AbstractResource extends AsyncDisposable {
/**
* Close the resource.
*
* Note for implementors: if the resource is exposed to the user and can also be closed
* in an automated fashion - through `db.attachResource()` or other - then the
* `close()` method should be idempotent such that calling it twice will make no
* difference.
*/
close (): Promise<void>
/**
* Close the resource. Identical in functionality to {@link close}.
*/
[Symbol.asyncDispose](): Promise<void>
}