Compare commits
4 commits
master
...
moz-pr/798
Author | SHA1 | Date | |
---|---|---|---|
|
85f0497cf5 | ||
|
131398758b | ||
|
0fa466dd6b | ||
|
b838259a8b |
160 changed files with 5474 additions and 3291 deletions
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
|
@ -1,3 +0,0 @@
|
||||||
liberapay: svartalf
|
|
||||||
patreon: svartalf
|
|
||||||
custom: ["https://svartalf.info/donate/", "https://www.buymeacoffee.com/svartalf"]
|
|
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
|
@ -1,11 +0,0 @@
|
||||||
# To get started with Dependabot version updates, you'll need to specify which
|
|
||||||
# package ecosystems to update and where the package manifests are located.
|
|
||||||
# Please see the documentation for all configuration options:
|
|
||||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
|
||||||
|
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "cargo" # See documentation for possible values
|
|
||||||
directory: "/" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
20
.github/workflows/audit.yml
vendored
20
.github/workflows/audit.yml
vendored
|
@ -1,20 +0,0 @@
|
||||||
name: Security audit
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 0 1 * *'
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- '**/Cargo.toml'
|
|
||||||
- '**/Cargo.lock'
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
audit:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/audit-check@issue-104
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
13
.github/workflows/clippy-ng.yml
vendored
13
.github/workflows/clippy-ng.yml
vendored
|
@ -1,13 +0,0 @@
|
||||||
on: [push, pull_request]
|
|
||||||
name: Clippy (new version test, don't use it!)
|
|
||||||
jobs:
|
|
||||||
clippy_check_ng:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
components: clippy
|
|
||||||
override: true
|
|
||||||
- uses: actions-rs/clippy@master
|
|
16
.github/workflows/clippy_check.yml
vendored
16
.github/workflows/clippy_check.yml
vendored
|
@ -1,16 +0,0 @@
|
||||||
on: [push, pull_request]
|
|
||||||
name: Clippy check
|
|
||||||
jobs:
|
|
||||||
clippy_check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
components: clippy
|
|
||||||
override: true
|
|
||||||
- uses: actions-rs/clippy-check@v1
|
|
||||||
with:
|
|
||||||
args: --all-targets --all-features -- -D warnings
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
28
.github/workflows/cross_compile.yml
vendored
28
.github/workflows/cross_compile.yml
vendored
|
@ -1,28 +0,0 @@
|
||||||
# We could use `@actions-rs/cargo` Action ability to automatically install `cross` tool
|
|
||||||
# in order to compile our application for some unusual targets.
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
name: Cross-compile
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target:
|
|
||||||
- armv7-unknown-linux-gnueabihf
|
|
||||||
- powerpc64-unknown-linux-gnu
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
override: true
|
|
||||||
- uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
use-cross: true
|
|
||||||
command: build
|
|
||||||
args: --release --target=${{ matrix.target }}
|
|
66
.github/workflows/grcov.yml
vendored
66
.github/workflows/grcov.yml
vendored
|
@ -1,66 +0,0 @@
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
name: Code coverage with grcov
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
grcov:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os:
|
|
||||||
- ubuntu-latest
|
|
||||||
- macOS-latest
|
|
||||||
# - windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
override: true
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
- name: Execute tests
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --all
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests"
|
|
||||||
|
|
||||||
# Note that `actions-rs/grcov` Action can install `grcov` too,
|
|
||||||
# but can't use faster installation methods yet.
|
|
||||||
# As a temporary experiment `actions-rs/install` Action plugged in here.
|
|
||||||
# Consider **NOT** to copy that into your workflow,
|
|
||||||
# but use `actions-rs/grcov` only
|
|
||||||
- name: Pre-installing grcov
|
|
||||||
uses: actions-rs/install@v0.1
|
|
||||||
with:
|
|
||||||
crate: grcov
|
|
||||||
use-tool-cache: true
|
|
||||||
|
|
||||||
- name: Gather coverage data
|
|
||||||
id: coverage
|
|
||||||
uses: actions-rs/grcov@v0.1
|
|
||||||
with:
|
|
||||||
coveralls-token: ${{ secrets.COVERALLS_TOKEN }}
|
|
||||||
|
|
||||||
- name: Coveralls upload
|
|
||||||
uses: coverallsapp/github-action@master
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
parallel: true
|
|
||||||
path-to-lcov: ${{ steps.coverage.outputs.report }}
|
|
||||||
|
|
||||||
grcov_finalize:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: grcov
|
|
||||||
steps:
|
|
||||||
- name: Coveralls finalization
|
|
||||||
uses: coverallsapp/github-action@master
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
parallel-finished: true
|
|
110
.github/workflows/msrv.yml
vendored
110
.github/workflows/msrv.yml
vendored
|
@ -1,110 +0,0 @@
|
||||||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/msrv.md
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
name: MSRV
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
name: Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
- 1.31.0
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: ${{ matrix.rust }}
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run cargo check
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: check
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test Suite
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
- 1.31.0
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: ${{ matrix.rust }}
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run cargo test
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
name: Rustfmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
- 1.31.0
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: ${{ matrix.rust }}
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install rustfmt
|
|
||||||
run: rustup component add rustfmt
|
|
||||||
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
name: Clippy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
- 1.31.0
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: ${{ matrix.rust }}
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install clippy
|
|
||||||
run: rustup component add clippy
|
|
||||||
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: -- -D warnings
|
|
78
.github/workflows/nightly_lints.yml
vendored
78
.github/workflows/nightly_lints.yml
vendored
|
@ -1,78 +0,0 @@
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
name: Nightly lints
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
clippy:
|
|
||||||
name: Clippy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install nightly toolchain with clippy available
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly
|
|
||||||
override: true
|
|
||||||
components: clippy
|
|
||||||
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: -- -D warnings
|
|
||||||
|
|
||||||
rustfmt:
|
|
||||||
name: Format
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install nightly toolchain with rustfmt available
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly
|
|
||||||
override: true
|
|
||||||
components: rustfmt
|
|
||||||
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
combo:
|
|
||||||
name: Clippy + rustfmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install nightly toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly
|
|
||||||
override: true
|
|
||||||
components: rustfmt, clippy
|
|
||||||
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: -- -D warnings
|
|
||||||
|
|
79
.github/workflows/quickstart.yml
vendored
79
.github/workflows/quickstart.yml
vendored
|
@ -1,79 +0,0 @@
|
||||||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/quickstart.md
|
|
||||||
#
|
|
||||||
# While our "example" application has the platform-specific code,
|
|
||||||
# for simplicity we are compiling and testing everything on the Ubuntu environment only.
|
|
||||||
# For multi-OS testing see the `cross.yml` workflow.
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
name: Quickstart
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
name: Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install stable toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run cargo check
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: check
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test Suite
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install stable toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run cargo test
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
|
|
||||||
lints:
|
|
||||||
name: Lints
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout sources
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install stable toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
components: rustfmt, clippy
|
|
||||||
|
|
||||||
- name: Run cargo fmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
- name: Run cargo clippy
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
continue-on-error: true # WARNING: only for this example, remove it!
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: -- -D warnings
|
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -3,7 +3,7 @@
|
||||||
*.jar
|
*.jar
|
||||||
*jar
|
*jar
|
||||||
*~
|
*~
|
||||||
**/*.rs.bk
|
*.rs.bk
|
||||||
.s*
|
.s*
|
||||||
.*.sw*
|
.*.sw*
|
||||||
*.rs.bak
|
*.rs.bak
|
||||||
|
@ -15,8 +15,6 @@
|
||||||
.lein-plugins/
|
.lein-plugins/
|
||||||
.lein-repl-history
|
.lein-repl-history
|
||||||
.nrepl-port
|
.nrepl-port
|
||||||
.bundle/
|
|
||||||
docs/vendor/
|
|
||||||
/.lein-*
|
/.lein-*
|
||||||
/.nrepl-port
|
/.nrepl-port
|
||||||
Cargo.lock
|
Cargo.lock
|
||||||
|
@ -24,7 +22,7 @@ Cargo.lock
|
||||||
/classes/
|
/classes/
|
||||||
/node_modules/
|
/node_modules/
|
||||||
/out/
|
/out/
|
||||||
/target
|
target/
|
||||||
pom.xml
|
pom.xml
|
||||||
pom.xml.asc
|
pom.xml.asc
|
||||||
/.cljs_node_repl/
|
/.cljs_node_repl/
|
||||||
|
|
55
.travis.yml
55
.travis.yml
|
@ -1,53 +1,11 @@
|
||||||
language: rust
|
language: rust
|
||||||
env:
|
|
||||||
- CARGO_INCREMENTAL=0
|
|
||||||
# https://bheisler.github.io/post/efficient-use-of-travis-ci-cache-for-rust/
|
|
||||||
before_cache:
|
|
||||||
# Delete loose files in the debug directory
|
|
||||||
- find ./target/debug -maxdepth 1 -type f -delete
|
|
||||||
# Delete the test and benchmark executables. Finding these all might take some
|
|
||||||
# experimentation.
|
|
||||||
- rm -rf ./target/debug/deps/criterion*
|
|
||||||
- rm -rf ./target/debug/deps/bench*
|
|
||||||
# Delete the associated metadata files for those executables
|
|
||||||
- rm -rf ./target/debug/.fingerprint/criterion*
|
|
||||||
- rm -rf ./target/debug/.fingerprint/bench*
|
|
||||||
# Note that all of the above need to be repeated for `release/` instead of
|
|
||||||
# `debug/` if your build script builds artifacts in release mode.
|
|
||||||
# This is just more metadata
|
|
||||||
- rm -f ./target/.rustc_info.json
|
|
||||||
# Also delete the saved benchmark data from the test benchmarks. If you
|
|
||||||
# have Criterion.rs benchmarks, you'll probably want to do this as well, or set
|
|
||||||
# the CRITERION_HOME environment variable to move that data out of the
|
|
||||||
# `target/` directory.
|
|
||||||
- rm -rf ./target/criterion
|
|
||||||
# Also delete cargo's registry index. This is updated on every build, but it's
|
|
||||||
# way cheaper to re-download than the whole cache is.
|
|
||||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/index/"
|
|
||||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/src"
|
|
||||||
cache:
|
|
||||||
directories:
|
|
||||||
- ./target
|
|
||||||
- $TRAVIS_HOME/.cache/sccache
|
|
||||||
- $TRAVIS_HOME/.cargo/
|
|
||||||
- $TRAVIS_HOME/.rustup/
|
|
||||||
before_script:
|
|
||||||
- cargo install --force cargo-audit
|
|
||||||
- cargo generate-lockfile
|
|
||||||
- rustup component add clippy-preview
|
|
||||||
script:
|
|
||||||
- cargo audit
|
|
||||||
# We use OSX so that we can get a reasonably up to date version of SQLCipher.
|
# We use OSX so that we can get a reasonably up to date version of SQLCipher.
|
||||||
# (The version in Travis's default Ubuntu Trusty is much too old).
|
# (The version in Travis's default Ubuntu Trusty is much too old).
|
||||||
os: osx
|
os: osx
|
||||||
before_install:
|
before_install:
|
||||||
- brew install sqlcipher
|
- brew install sqlcipher --with-fts
|
||||||
rust:
|
rust:
|
||||||
- 1.43.0
|
- 1.25.0
|
||||||
- 1.44.0
|
|
||||||
- 1.45.0
|
|
||||||
- 1.46.0
|
|
||||||
- 1.47.0
|
|
||||||
- stable
|
- stable
|
||||||
- beta
|
- beta
|
||||||
- nightly
|
- nightly
|
||||||
|
@ -58,21 +16,20 @@ matrix:
|
||||||
jobs:
|
jobs:
|
||||||
include:
|
include:
|
||||||
- stage: "Test iOS"
|
- stage: "Test iOS"
|
||||||
rust: 1.47.0
|
rust: 1.25.0
|
||||||
script: ./scripts/test-ios.sh
|
script: ./scripts/test-ios.sh
|
||||||
- stage: "Docs"
|
- stage: "Docs"
|
||||||
rust: 1.47.0
|
rust: 1.25.0
|
||||||
script: ./scripts/cargo-doc.sh
|
script: ./scripts/cargo-doc.sh
|
||||||
script:
|
script:
|
||||||
- cargo build --verbose --all
|
|
||||||
- cargo clippy --all-targets --all-features -- -D warnings -A clippy::comparison-chain -A clippy::many-single-char-names # Check tests and non-default crate features.
|
|
||||||
- cargo test --verbose --all
|
- cargo test --verbose --all
|
||||||
- cargo test --features edn/serde_support --verbose --all
|
- cargo test --features edn/serde_support --verbose --all
|
||||||
# We can't pick individual features out with `cargo test --all` (At the time of this writing, this
|
# We can't pick individual features out with `cargo test --all` (At the time of this writing, this
|
||||||
# works but does the wrong thing because of a bug in cargo, but its fix will be to disallow doing
|
# works but does the wrong thing because of a bug in cargo, but its fix will be to disallow doing
|
||||||
# this all-together, see https://github.com/rust-lang/cargo/issues/5364 for more information). To
|
# this all-together, see https://github.com/rust-lang/cargo/issues/5364 for more information). To
|
||||||
# work around this, we run tests individually for sub-crates that rely on `rusqlite`.
|
# work around this, we run tests individually for subcrates that rely on `rusqlite`.
|
||||||
- |
|
- |
|
||||||
for crate in "" "db" "db-traits" "ffi" "public-traits" "query-projector" "query-projector-traits" "query-pull" "sql" "tolstoy" "tolstoy-traits" "transaction" "tools/cli"; do
|
for crate in "" "db" "db-traits" "ffi" "public-traits" "query-projector" "query-projector-traits" "query-pull" "sql" "tolstoy" "tolstoy-traits" "transaction" "tools/cli"; do
|
||||||
cargo test --manifest-path ./$crate/Cargo.toml --verbose --no-default-features --features sqlcipher
|
cargo test --manifest-path ./$crate/Cargo.toml --verbose --no-default-features --features sqlcipher
|
||||||
done
|
done
|
||||||
|
cache: cargo
|
||||||
|
|
42
Cargo.toml
42
Cargo.toml
|
@ -1,5 +1,5 @@
|
||||||
[package]
|
[package]
|
||||||
edition = "2021"
|
edition = "2018"
|
||||||
authors = [
|
authors = [
|
||||||
"Richard Newman <rnewman@twinql.com>",
|
"Richard Newman <rnewman@twinql.com>",
|
||||||
"Nicholas Alexander <nalexander@mozilla.com>",
|
"Nicholas Alexander <nalexander@mozilla.com>",
|
||||||
|
@ -11,10 +11,9 @@ authors = [
|
||||||
"Kit Cambridge <kit@yakshaving.ninja>",
|
"Kit Cambridge <kit@yakshaving.ninja>",
|
||||||
"Edouard Oger <eoger@fastmail.com>",
|
"Edouard Oger <eoger@fastmail.com>",
|
||||||
"Thom Chiovoloni <tchiovoloni@mozilla.com>",
|
"Thom Chiovoloni <tchiovoloni@mozilla.com>",
|
||||||
"Gregory Burd <greg@burd.me>",
|
|
||||||
]
|
]
|
||||||
name = "mentat"
|
name = "mentat"
|
||||||
version = "0.14.0"
|
version = "0.11.2"
|
||||||
build = "build/version.rs"
|
build = "build/version.rs"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
@ -24,37 +23,24 @@ sqlcipher = ["rusqlite/sqlcipher", "mentat_db/sqlcipher"]
|
||||||
syncable = ["mentat_tolstoy", "tolstoy_traits", "mentat_db/syncable"]
|
syncable = ["mentat_tolstoy", "tolstoy_traits", "mentat_db/syncable"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = ["tools/cli", "ffi"]
|
||||||
"tools/cli",
|
|
||||||
"ffi", "core", "core-traits","db", "db-traits", "edn", "public-traits", "query-algebrizer",
|
|
||||||
"query-algebrizer-traits", "query-projector", "query-projector-traits","query-pull",
|
|
||||||
"query-sql", "sql", "sql-traits", "tolstoy-traits", "tolstoy", "transaction"
|
|
||||||
]
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
rustc_version = "~0.4"
|
rustc_version = "0.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
assert_approx_eq = "~1.1"
|
|
||||||
|
|
||||||
#[dev-dependencies.cargo-husky]
|
|
||||||
#version = "1"
|
|
||||||
#default-features = false # Disable features which are enabled by default
|
|
||||||
#features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
|
||||||
#cargo audit
|
|
||||||
#cargo outdated
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = "~0.4"
|
chrono = "0.4"
|
||||||
failure = "~0.1"
|
failure = "0.1.6"
|
||||||
lazy_static = "~1.4"
|
lazy_static = "1.4.0"
|
||||||
time = "0.3.1"
|
time = "0.2"
|
||||||
log = "~0.4"
|
log = "0.4"
|
||||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
|
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21.0"
|
||||||
features = ["limits", "bundled"]
|
# System sqlite might be very old.
|
||||||
|
features = ["limits"]
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "edn"
|
path = "edn"
|
||||||
|
|
11
Makefile
11
Makefile
|
@ -1,11 +0,0 @@
|
||||||
.PHONY: outdated fix
|
|
||||||
|
|
||||||
outdated:
|
|
||||||
for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo outdated -R); done
|
|
||||||
|
|
||||||
|
|
||||||
fix:
|
|
||||||
$(for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo fix --allow-dirty --broken-code --edition-idioms); done)
|
|
||||||
|
|
||||||
upgrades:
|
|
||||||
cargo upgrades
|
|
29
NOTES
29
NOTES
|
@ -1,29 +0,0 @@
|
||||||
* sqlite -> monetdb-lite-c + fts5 + bayesdb
|
|
||||||
* fts5 + regex + tre/fuzzy + codesearch/trigram filters, streaming bloom filters https://arxiv.org/abs/2001.03147
|
|
||||||
* datalog to "goblin relational engine" (gtk)
|
|
||||||
* branching distributed wal (chain replication) and CRDTs
|
|
||||||
* alf:fn query language
|
|
||||||
* datatypes via bit syntax+some code?
|
|
||||||
* pure lang?
|
|
||||||
|
|
||||||
* https://github.com/dahjelle/pouch-datalog
|
|
||||||
* https://github.com/edn-query-language/eql
|
|
||||||
* https://github.com/borkdude/jet
|
|
||||||
* https://github.com/walmartlabs/dyn-edn
|
|
||||||
* https://github.com/go-edn/edn
|
|
||||||
* https://github.com/smothers/cause
|
|
||||||
* https://github.com/oscaro/eq
|
|
||||||
* https://github.com/clojure-emacs/parseedn
|
|
||||||
* https://github.com/exoscale/seql
|
|
||||||
* https://github.com/axboe/liburing
|
|
||||||
|
|
||||||
* (EAVtf) - entity attribute value type flags
|
|
||||||
|
|
||||||
* distributed, replicated WAL
|
|
||||||
* https://github.com/mirage/irmin
|
|
||||||
|
|
||||||
* What if facts had "confidence" [0-1)?
|
|
||||||
* entity attribute value type flags
|
|
||||||
* https://github.com/probcomp/BayesDB
|
|
||||||
* https://github.com/probcomp/bayeslite
|
|
||||||
* http://probcomp.csail.mit.edu/software/bayesdb/
|
|
22
README.md
22
README.md
|
@ -1,13 +1,17 @@
|
||||||
# Project Mentat
|
# Project Mentat
|
||||||
[![Build Status](https://travis-ci.org/qpdb/mentat.svg?branch=master)](https://travis-ci.org/qpdb/mentat)
|
[![Build Status](https://travis-ci.org/mozilla/mentat.svg?branch=master)](https://travis-ci.org/mozilla/mentat)
|
||||||
|
|
||||||
|
**Project Mentat is [no longer being developed or actively maintained by Mozilla](https://mail.mozilla.org/pipermail/firefox-dev/2018-September/006780.html).** This repository will be marked read-only in the near future. You are, of course, welcome to fork the repository and use the existing code.
|
||||||
|
|
||||||
Project Mentat is a persistent, embedded knowledge base. It draws heavily on [DataScript](https://github.com/tonsky/datascript) and [Datomic](http://datomic.com).
|
Project Mentat is a persistent, embedded knowledge base. It draws heavily on [DataScript](https://github.com/tonsky/datascript) and [Datomic](http://datomic.com).
|
||||||
|
|
||||||
This project was started by Mozilla, but [is no longer being developed or actively maintained by them](https://mail.mozilla.org/pipermail/firefox-dev/2018-September/006780.html). [Their repository](https://github.com/mozilla/mentat) was marked read-only, [this fork](https://github.com/qpdb/mentat) is an attempt to revive and continue that interesting work. We owe the team at Mozilla more than words can express for inspiring us all and for this project in particular.
|
Mentat is implemented in Rust.
|
||||||
|
|
||||||
*Thank you*.
|
The first version of Project Mentat, named Datomish, [was written in ClojureScript](https://github.com/mozilla/mentat/tree/clojure), targeting both Node (on top of `promise_sqlite`) and Firefox (on top of `Sqlite.jsm`). It also worked in pure Clojure on the JVM on top of `jdbc-sqlite`. The name was changed to avoid confusion with [Datomic](http://datomic.com).
|
||||||
|
|
||||||
[Documentation](https://docs.rs/mentat)
|
The Rust implementation gives us a smaller compiled output, better performance, more type safety, better tooling, and easier deployment into Firefox and mobile platforms.
|
||||||
|
|
||||||
|
[Documentation](https://mozilla.github.io/mentat)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -73,11 +77,9 @@ We've observed that data storage is a particular area of difficulty for software
|
||||||
|
|
||||||
DataScript asks the question: "What if creating a database were as cheap as creating a Hashmap?"
|
DataScript asks the question: "What if creating a database were as cheap as creating a Hashmap?"
|
||||||
|
|
||||||
Mentat is not interested in that. Instead, it's focused on persistence and performance, with very little interest in immutable databases/databases as values or throwaway use.
|
Mentat is not interested in that. Instead, it's strongly interested in persistence and performance, with very little interest in immutable databases/databases as values or throwaway use.
|
||||||
|
|
||||||
One might say that Mentat's question is: "What if a database could store arbitrary relations, for arbitrary consumers, without them having to coordinate an up-front storage-level schema?"
|
One might say that Mentat's question is: "What if an SQLite database could store arbitrary relations, for arbitrary consumers, without them having to coordinate an up-front storage-level schema?"
|
||||||
|
|
||||||
Consider this a practical approach to facts, to knowledge its storage and access, much like SQLite is a practical RDBMS.
|
|
||||||
|
|
||||||
(Note that [domain-level schemas are very valuable](http://martinfowler.com/articles/schemaless/).)
|
(Note that [domain-level schemas are very valuable](http://martinfowler.com/articles/schemaless/).)
|
||||||
|
|
||||||
|
@ -87,7 +89,7 @@ Some thought has been given to how databases as values — long-term references
|
||||||
|
|
||||||
Just like DataScript, Mentat speaks Datalog for querying and takes additions and retractions as input to a transaction.
|
Just like DataScript, Mentat speaks Datalog for querying and takes additions and retractions as input to a transaction.
|
||||||
|
|
||||||
Unlike DataScript, Mentat exposes free-text indexing, thanks to SQLite/FTS.
|
Unlike DataScript, Mentat exposes free-text indexing, thanks to SQLite.
|
||||||
|
|
||||||
|
|
||||||
## Comparison to Datomic
|
## Comparison to Datomic
|
||||||
|
@ -96,6 +98,8 @@ Datomic is a server-side, enterprise-grade data storage system. Datomic has a be
|
||||||
|
|
||||||
Many of these design decisions are inapplicable to deployed desktop software; indeed, the use of multiple JVM processes makes Datomic's use in a small desktop app, or a mobile device, prohibitive.
|
Many of these design decisions are inapplicable to deployed desktop software; indeed, the use of multiple JVM processes makes Datomic's use in a small desktop app, or a mobile device, prohibitive.
|
||||||
|
|
||||||
|
Mentat was designed for embedding, initially in an experimental Electron app ([Tofino](https://github.com/mozilla/tofino)). It is less concerned with exposing consistent database states outside transaction boundaries, because that's less important here, and dropping some of these requirements allows us to leverage SQLite itself.
|
||||||
|
|
||||||
|
|
||||||
## Comparison to SQLite
|
## Comparison to SQLite
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ use std::process::exit;
|
||||||
|
|
||||||
/// MIN_VERSION should be changed when there's a new minimum version of rustc required
|
/// MIN_VERSION should be changed when there's a new minimum version of rustc required
|
||||||
/// to build the project.
|
/// to build the project.
|
||||||
static MIN_VERSION: &str = "1.69.0";
|
static MIN_VERSION: &'static str = "1.41.0";
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let ver = version().unwrap();
|
let ver = version().unwrap();
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "core_traits"
|
name = "core_traits"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -8,15 +8,14 @@ name = "core_traits"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "~0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
enum-set = "~0.0.8"
|
enum-set = "0.0.8"
|
||||||
lazy_static = "~1.4"
|
lazy_static = "1.4.0"
|
||||||
indexmap = "~1.9"
|
indexmap = "1.3.1"
|
||||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
serde = { version = "~1.0", features = ["rc"] }
|
serde = { version = "1.0", features = ["rc"] }
|
||||||
serde_derive = "~1.0"
|
serde_derive = "1.0"
|
||||||
bytes = { version = "1.0.1", features = ["serde"] }
|
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
|
|
@ -14,7 +14,6 @@ extern crate indexmap;
|
||||||
extern crate ordered_float;
|
extern crate ordered_float;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate bytes;
|
|
||||||
extern crate edn;
|
extern crate edn;
|
||||||
extern crate uuid;
|
extern crate uuid;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
@ -34,7 +33,6 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use bytes::Bytes;
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
use enum_set::EnumSet;
|
use enum_set::EnumSet;
|
||||||
|
@ -54,7 +52,7 @@ use edn::entities::{
|
||||||
mod value_type_set;
|
mod value_type_set;
|
||||||
pub mod values;
|
pub mod values;
|
||||||
|
|
||||||
pub use crate::value_type_set::ValueTypeSet;
|
pub use value_type_set::ValueTypeSet;
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! bail {
|
macro_rules! bail {
|
||||||
|
@ -104,14 +102,14 @@ impl<V: TransactableValueMarker> Into<ValuePlace<V>> for KnownEntid {
|
||||||
/// When moving to a more concrete table, such as `datoms`, they are expanded out
|
/// When moving to a more concrete table, such as `datoms`, they are expanded out
|
||||||
/// via these flags and put into their own column rather than a bit field.
|
/// via these flags and put into their own column rather than a bit field.
|
||||||
pub enum AttributeBitFlags {
|
pub enum AttributeBitFlags {
|
||||||
IndexAVET = 1,
|
IndexAVET = 1 << 0,
|
||||||
IndexVAET = 1 << 1,
|
IndexVAET = 1 << 1,
|
||||||
IndexFulltext = 1 << 2,
|
IndexFulltext = 1 << 2,
|
||||||
UniqueValue = 1 << 3,
|
UniqueValue = 1 << 3,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod attribute {
|
pub mod attribute {
|
||||||
use crate::TypedValue;
|
use TypedValue;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
pub enum Unique {
|
pub enum Unique {
|
||||||
|
@ -282,7 +280,6 @@ pub enum ValueType {
|
||||||
String,
|
String,
|
||||||
Keyword,
|
Keyword,
|
||||||
Uuid,
|
Uuid,
|
||||||
Bytes,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ValueType {
|
impl ValueType {
|
||||||
|
@ -297,7 +294,6 @@ impl ValueType {
|
||||||
s.insert(ValueType::String);
|
s.insert(ValueType::String);
|
||||||
s.insert(ValueType::Keyword);
|
s.insert(ValueType::Keyword);
|
||||||
s.insert(ValueType::Uuid);
|
s.insert(ValueType::Uuid);
|
||||||
s.insert(ValueType::Bytes);
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -325,28 +321,26 @@ impl ValueType {
|
||||||
ValueType::String => "string",
|
ValueType::String => "string",
|
||||||
ValueType::Keyword => "keyword",
|
ValueType::Keyword => "keyword",
|
||||||
ValueType::Uuid => "uuid",
|
ValueType::Uuid => "uuid",
|
||||||
ValueType::Bytes => "bytes",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_keyword(keyword: &Keyword) -> Option<Self> {
|
pub fn from_keyword(keyword: &Keyword) -> Option<Self> {
|
||||||
if keyword.namespace() != Some("db.type") {
|
if keyword.namespace() != Some("db.type") {
|
||||||
None
|
return None;
|
||||||
} else {
|
|
||||||
match keyword.name() {
|
|
||||||
"ref" => Some(ValueType::Ref),
|
|
||||||
"boolean" => Some(ValueType::Boolean),
|
|
||||||
"instant" => Some(ValueType::Instant),
|
|
||||||
"long" => Some(ValueType::Long),
|
|
||||||
"double" => Some(ValueType::Double),
|
|
||||||
"string" => Some(ValueType::String),
|
|
||||||
"keyword" => Some(ValueType::Keyword),
|
|
||||||
"uuid" => Some(ValueType::Uuid),
|
|
||||||
"bytes" => Some(ValueType::Bytes),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return match keyword.name() {
|
||||||
|
"ref" => Some(ValueType::Ref),
|
||||||
|
"boolean" => Some(ValueType::Boolean),
|
||||||
|
"instant" => Some(ValueType::Instant),
|
||||||
|
"long" => Some(ValueType::Long),
|
||||||
|
"double" => Some(ValueType::Double),
|
||||||
|
"string" => Some(ValueType::String),
|
||||||
|
"keyword" => Some(ValueType::Keyword),
|
||||||
|
"uuid" => Some(ValueType::Uuid),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_typed_value(self) -> TypedValue {
|
pub fn into_typed_value(self) -> TypedValue {
|
||||||
|
@ -361,7 +355,6 @@ impl ValueType {
|
||||||
ValueType::String => "string",
|
ValueType::String => "string",
|
||||||
ValueType::Keyword => "keyword",
|
ValueType::Keyword => "keyword",
|
||||||
ValueType::Uuid => "uuid",
|
ValueType::Uuid => "uuid",
|
||||||
ValueType::Bytes => "bytes",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -376,12 +369,14 @@ impl ValueType {
|
||||||
ValueType::String => values::DB_TYPE_STRING.clone(),
|
ValueType::String => values::DB_TYPE_STRING.clone(),
|
||||||
ValueType::Keyword => values::DB_TYPE_KEYWORD.clone(),
|
ValueType::Keyword => values::DB_TYPE_KEYWORD.clone(),
|
||||||
ValueType::Uuid => values::DB_TYPE_UUID.clone(),
|
ValueType::Uuid => values::DB_TYPE_UUID.clone(),
|
||||||
ValueType::Bytes => values::DB_TYPE_BYTES.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_numeric(self) -> bool {
|
pub fn is_numeric(&self) -> bool {
|
||||||
matches!(self, ValueType::Long | ValueType::Double)
|
match self {
|
||||||
|
&ValueType::Long | &ValueType::Double => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,7 +394,6 @@ impl fmt::Display for ValueType {
|
||||||
ValueType::String => ":db.type/string",
|
ValueType::String => ":db.type/string",
|
||||||
ValueType::Keyword => ":db.type/keyword",
|
ValueType::Keyword => ":db.type/keyword",
|
||||||
ValueType::Uuid => ":db.type/uuid",
|
ValueType::Uuid => ":db.type/uuid",
|
||||||
ValueType::Bytes => ":db.type/bytes",
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -423,7 +417,6 @@ pub enum TypedValue {
|
||||||
String(ValueRc<String>),
|
String(ValueRc<String>),
|
||||||
Keyword(ValueRc<Keyword>),
|
Keyword(ValueRc<Keyword>),
|
||||||
Uuid(Uuid), // It's only 128 bits, so this should be acceptable to clone.
|
Uuid(Uuid), // It's only 128 bits, so this should be acceptable to clone.
|
||||||
Bytes(Bytes),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<KnownEntid> for TypedValue {
|
impl From<KnownEntid> for TypedValue {
|
||||||
|
@ -447,15 +440,14 @@ impl TypedValue {
|
||||||
|
|
||||||
pub fn value_type(&self) -> ValueType {
|
pub fn value_type(&self) -> ValueType {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::Ref(_) => ValueType::Ref,
|
&TypedValue::Ref(_) => ValueType::Ref,
|
||||||
TypedValue::Boolean(_) => ValueType::Boolean,
|
&TypedValue::Boolean(_) => ValueType::Boolean,
|
||||||
TypedValue::Long(_) => ValueType::Long,
|
&TypedValue::Long(_) => ValueType::Long,
|
||||||
TypedValue::Instant(_) => ValueType::Instant,
|
&TypedValue::Instant(_) => ValueType::Instant,
|
||||||
TypedValue::Double(_) => ValueType::Double,
|
&TypedValue::Double(_) => ValueType::Double,
|
||||||
TypedValue::String(_) => ValueType::String,
|
&TypedValue::String(_) => ValueType::String,
|
||||||
TypedValue::Keyword(_) => ValueType::Keyword,
|
&TypedValue::Keyword(_) => ValueType::Keyword,
|
||||||
TypedValue::Uuid(_) => ValueType::Uuid,
|
&TypedValue::Uuid(_) => ValueType::Uuid,
|
||||||
TypedValue::Bytes(_) => ValueType::Bytes,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -582,7 +574,7 @@ impl TypedValue {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::Uuid(v) => {
|
TypedValue::Uuid(v) => {
|
||||||
// Get an independent copy of the string.
|
// Get an independent copy of the string.
|
||||||
let s: String = v.hyphenated().to_string();
|
let s: String = v.to_hyphenated().to_string();
|
||||||
|
|
||||||
// Make a CString out of the new bytes.
|
// Make a CString out of the new bytes.
|
||||||
let c: CString = CString::new(s).expect("String conversion failed!");
|
let c: CString = CString::new(s).expect("String conversion failed!");
|
||||||
|
@ -603,14 +595,7 @@ impl TypedValue {
|
||||||
|
|
||||||
pub fn into_uuid_string(self) -> Option<String> {
|
pub fn into_uuid_string(self) -> Option<String> {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::Uuid(v) => Some(v.hyphenated().to_string()),
|
TypedValue::Uuid(v) => Some(v.to_hyphenated().to_string()),
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_bytes(self) -> Option<Bytes> {
|
|
||||||
match self {
|
|
||||||
TypedValue::Bytes(b) => Some(b),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -704,12 +689,6 @@ impl From<f64> for TypedValue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&[u8]> for TypedValue {
|
|
||||||
fn from(bslice: &[u8]) -> Self {
|
|
||||||
TypedValue::Bytes(Bytes::copy_from_slice(bslice))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trait MicrosecondPrecision {
|
trait MicrosecondPrecision {
|
||||||
/// Truncate the provided `DateTime` to microsecond precision.
|
/// Truncate the provided `DateTime` to microsecond precision.
|
||||||
fn microsecond_precision(self) -> Self;
|
fn microsecond_precision(self) -> Self;
|
||||||
|
@ -791,21 +770,21 @@ impl Binding {
|
||||||
|
|
||||||
pub fn as_scalar(&self) -> Option<&TypedValue> {
|
pub fn as_scalar(&self) -> Option<&TypedValue> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(ref v) => Some(v),
|
&Binding::Scalar(ref v) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_vec(&self) -> Option<&Vec<Binding>> {
|
pub fn as_vec(&self) -> Option<&Vec<Binding>> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Vec(ref v) => Some(v),
|
&Binding::Vec(ref v) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_map(&self) -> Option<&StructuredMap> {
|
pub fn as_map(&self) -> Option<&StructuredMap> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Map(ref v) => Some(v),
|
&Binding::Map(ref v) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -877,10 +856,10 @@ impl Binding {
|
||||||
|
|
||||||
pub fn value_type(&self) -> Option<ValueType> {
|
pub fn value_type(&self) -> Option<ValueType> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(ref v) => Some(v.value_type()),
|
&Binding::Scalar(ref v) => Some(v.value_type()),
|
||||||
|
|
||||||
Binding::Map(_) => None,
|
&Binding::Map(_) => None,
|
||||||
Binding::Vec(_) => None,
|
&Binding::Vec(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -963,7 +942,7 @@ impl Binding {
|
||||||
|
|
||||||
pub fn into_uuid_string(self) -> Option<String> {
|
pub fn into_uuid_string(self) -> Option<String> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.hyphenated().to_string()),
|
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.to_hyphenated().to_string()),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -991,56 +970,56 @@ impl Binding {
|
||||||
|
|
||||||
pub fn as_entid(&self) -> Option<&Entid> {
|
pub fn as_entid(&self) -> Option<&Entid> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Ref(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Ref(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_kw(&self) -> Option<&ValueRc<Keyword>> {
|
pub fn as_kw(&self) -> Option<&ValueRc<Keyword>> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Keyword(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Keyword(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_boolean(&self) -> Option<&bool> {
|
pub fn as_boolean(&self) -> Option<&bool> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Boolean(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Boolean(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_long(&self) -> Option<&i64> {
|
pub fn as_long(&self) -> Option<&i64> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Long(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Long(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_double(&self) -> Option<&f64> {
|
pub fn as_double(&self) -> Option<&f64> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Double(ref v)) => Some(&v.0),
|
&Binding::Scalar(TypedValue::Double(ref v)) => Some(&v.0),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_instant(&self) -> Option<&DateTime<Utc>> {
|
pub fn as_instant(&self) -> Option<&DateTime<Utc>> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Instant(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Instant(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_string(&self) -> Option<&ValueRc<String>> {
|
pub fn as_string(&self) -> Option<&ValueRc<String>> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::String(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::String(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_uuid(&self) -> Option<&Uuid> {
|
pub fn as_uuid(&self) -> Option<&Uuid> {
|
||||||
match self {
|
match self {
|
||||||
Binding::Scalar(TypedValue::Uuid(ref v)) => Some(v),
|
&Binding::Scalar(TypedValue::Uuid(ref v)) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use enum_set::EnumSet;
|
use enum_set::EnumSet;
|
||||||
|
|
||||||
use crate::ValueType;
|
use ValueType;
|
||||||
|
|
||||||
trait EnumSetExtensions<T: ::enum_set::CLike + Clone> {
|
trait EnumSetExtensions<T: ::enum_set::CLike + Clone> {
|
||||||
/// Return a set containing both `x` and `y`.
|
/// Return a set containing both `x` and `y`.
|
||||||
|
@ -92,53 +92,53 @@ impl ValueTypeSet {
|
||||||
self.0.insert(vt)
|
self.0.insert(vt)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.0.len()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a set containing all the types in this set and `other`.
|
/// Returns a set containing all the types in this set and `other`.
|
||||||
pub fn union(self, other: ValueTypeSet) -> ValueTypeSet {
|
pub fn union(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||||
ValueTypeSet(self.0.union(other.0))
|
ValueTypeSet(self.0.union(other.0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn intersection(self, other: ValueTypeSet) -> ValueTypeSet {
|
pub fn intersection(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||||
ValueTypeSet(self.0.intersection(other.0))
|
ValueTypeSet(self.0.intersection(other.0))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the set difference between `self` and `other`, which is the
|
/// Returns the set difference between `self` and `other`, which is the
|
||||||
/// set of items in `self` that are not in `other`.
|
/// set of items in `self` that are not in `other`.
|
||||||
pub fn difference(self, other: ValueTypeSet) -> ValueTypeSet {
|
pub fn difference(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||||
ValueTypeSet(self.0 - other.0)
|
ValueTypeSet(self.0 - other.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return an arbitrary type that's part of this set.
|
/// Return an arbitrary type that's part of this set.
|
||||||
/// For a set containing a single type, this will be that type.
|
/// For a set containing a single type, this will be that type.
|
||||||
pub fn exemplar(self) -> Option<ValueType> {
|
pub fn exemplar(&self) -> Option<ValueType> {
|
||||||
self.0.iter().next()
|
self.0.iter().next()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_subset(self, other: ValueTypeSet) -> bool {
|
pub fn is_subset(&self, other: &ValueTypeSet) -> bool {
|
||||||
self.0.is_subset(&other.0)
|
self.0.is_subset(&other.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if `self` and `other` contain no items in common.
|
/// Returns true if `self` and `other` contain no items in common.
|
||||||
pub fn is_disjoint(self, other: ValueTypeSet) -> bool {
|
pub fn is_disjoint(&self, other: &ValueTypeSet) -> bool {
|
||||||
self.0.is_disjoint(&other.0)
|
self.0.is_disjoint(&other.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains(self, vt: ValueType) -> bool {
|
pub fn contains(&self, vt: ValueType) -> bool {
|
||||||
self.0.contains(&vt)
|
self.0.contains(&vt)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.0.is_empty()
|
self.0.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_unit(self) -> bool {
|
pub fn is_unit(&self) -> bool {
|
||||||
self.0.len() == 1
|
self.0.len() == 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(self) -> ::enum_set::Iter<ValueType> {
|
pub fn iter(&self) -> ::enum_set::Iter<ValueType> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,8 +150,8 @@ impl From<ValueType> for ValueTypeSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ValueTypeSet {
|
impl ValueTypeSet {
|
||||||
pub fn is_only_numeric(self) -> bool {
|
pub fn is_only_numeric(&self) -> bool {
|
||||||
self.is_subset(ValueTypeSet::of_numeric_types())
|
self.is_subset(&ValueTypeSet::of_numeric_types())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,6 @@ lazy_static_namespaced_keyword_value!(DB_TYPE_REF, "db.type", "ref");
|
||||||
lazy_static_namespaced_keyword_value!(DB_TYPE_STRING, "db.type", "string");
|
lazy_static_namespaced_keyword_value!(DB_TYPE_STRING, "db.type", "string");
|
||||||
lazy_static_namespaced_keyword_value!(DB_TYPE_URI, "db.type", "uri");
|
lazy_static_namespaced_keyword_value!(DB_TYPE_URI, "db.type", "uri");
|
||||||
lazy_static_namespaced_keyword_value!(DB_TYPE_UUID, "db.type", "uuid");
|
lazy_static_namespaced_keyword_value!(DB_TYPE_UUID, "db.type", "uuid");
|
||||||
lazy_static_namespaced_keyword_value!(DB_TYPE_BYTES, "db.type", "bytes");
|
|
||||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE, "db", "unique");
|
lazy_static_namespaced_keyword_value!(DB_UNIQUE, "db", "unique");
|
||||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_IDENTITY, "db.unique", "identity");
|
lazy_static_namespaced_keyword_value!(DB_UNIQUE_IDENTITY, "db.unique", "identity");
|
||||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_VALUE, "db.unique", "value");
|
lazy_static_namespaced_keyword_value!(DB_UNIQUE_VALUE, "db.unique", "value");
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
[package]
|
[package]
|
||||||
name = "mentat_core"
|
name = "mentat_core"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "~0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
enum-set = "~0.0"
|
enum-set = "0.0"
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
indexmap = "~1.9"
|
indexmap = "1.3"
|
||||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
ordered-float = { version = "1.0", features = ["serde"] }
|
||||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
|
|
||||||
[dependencies.core_traits]
|
[dependencies.core_traits]
|
||||||
path = "../core-traits"
|
path = "../core-traits"
|
||||||
|
|
|
@ -13,7 +13,7 @@ use std::collections::BTreeSet;
|
||||||
|
|
||||||
use core_traits::{Entid, TypedValue};
|
use core_traits::{Entid, TypedValue};
|
||||||
|
|
||||||
use crate::Schema;
|
use Schema;
|
||||||
|
|
||||||
pub trait CachedAttributes {
|
pub trait CachedAttributes {
|
||||||
fn is_attribute_cached_reverse(&self, entid: Entid) -> bool;
|
fn is_attribute_cached_reverse(&self, entid: Entid) -> bool;
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone)]
|
||||||
pub struct RcCounter {
|
pub struct RcCounter {
|
||||||
c: Rc<Cell<usize>>,
|
c: Rc<Cell<usize>>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,18 +35,18 @@ pub use chrono::{
|
||||||
pub use edn::parse::parse_query;
|
pub use edn::parse::parse_query;
|
||||||
pub use edn::{Cloned, FromMicros, FromRc, Keyword, ToMicros, Utc, ValueRc};
|
pub use edn::{Cloned, FromMicros, FromRc, Keyword, ToMicros, Utc, ValueRc};
|
||||||
|
|
||||||
pub use crate::cache::{CachedAttributes, UpdateableCache};
|
pub use cache::{CachedAttributes, UpdateableCache};
|
||||||
|
|
||||||
mod sql_types;
|
mod sql_types;
|
||||||
mod tx_report;
|
mod tx_report;
|
||||||
/// Core types defining a Mentat knowledge base.
|
/// Core types defining a Mentat knowledge base.
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
pub use crate::tx_report::TxReport;
|
pub use tx_report::TxReport;
|
||||||
|
|
||||||
pub use crate::types::ValueTypeTag;
|
pub use types::ValueTypeTag;
|
||||||
|
|
||||||
pub use crate::sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
pub use sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
||||||
|
|
||||||
/// Map `Keyword` idents (`:db/ident`) to positive integer entids (`1`).
|
/// Map `Keyword` idents (`:db/ident`) to positive integer entids (`1`).
|
||||||
pub type IdentMap = BTreeMap<Keyword, Entid>;
|
pub type IdentMap = BTreeMap<Keyword, Entid>;
|
||||||
|
@ -135,7 +135,7 @@ impl Schema {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_raw_entid(&self, x: &Keyword) -> Option<Entid> {
|
fn get_raw_entid(&self, x: &Keyword) -> Option<Entid> {
|
||||||
self.ident_map.get(x).copied()
|
self.ident_map.get(x).map(|x| *x)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_component_attributes(&mut self) {
|
pub fn update_component_attributes(&mut self) {
|
||||||
|
|
|
@ -12,7 +12,7 @@ use std::collections::BTreeSet;
|
||||||
|
|
||||||
use core_traits::{ValueType, ValueTypeSet};
|
use core_traits::{ValueType, ValueTypeSet};
|
||||||
|
|
||||||
use crate::types::ValueTypeTag;
|
use types::ValueTypeTag;
|
||||||
|
|
||||||
/// Type safe representation of the possible return values from SQLite's `typeof`
|
/// Type safe representation of the possible return values from SQLite's `typeof`
|
||||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
|
@ -51,7 +51,6 @@ impl SQLValueType for ValueType {
|
||||||
ValueType::String => (10, None),
|
ValueType::String => (10, None),
|
||||||
ValueType::Uuid => (11, None),
|
ValueType::Uuid => (11, None),
|
||||||
ValueType::Keyword => (13, None),
|
ValueType::Keyword => (13, None),
|
||||||
ValueType::Bytes => (15, Some(SQLTypeAffinity::Blob)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,7 +62,7 @@ impl SQLValueType for ValueType {
|
||||||
/// Returns true if the provided integer is in the SQLite value space of this type. For
|
/// Returns true if the provided integer is in the SQLite value space of this type. For
|
||||||
/// example, `1` is how we encode `true`.
|
/// example, `1` is how we encode `true`.
|
||||||
fn accommodates_integer(&self, int: i64) -> bool {
|
fn accommodates_integer(&self, int: i64) -> bool {
|
||||||
use crate::ValueType::*;
|
use ValueType::*;
|
||||||
match *self {
|
match *self {
|
||||||
Instant => false, // Always use #inst.
|
Instant => false, // Always use #inst.
|
||||||
Long | Double => true,
|
Long | Double => true,
|
||||||
|
@ -72,7 +71,6 @@ impl SQLValueType for ValueType {
|
||||||
ValueType::String => false,
|
ValueType::String => false,
|
||||||
Keyword => false,
|
Keyword => false,
|
||||||
Uuid => false,
|
Uuid => false,
|
||||||
Bytes => false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,8 +123,8 @@ impl SQLValueTypeSet for ValueTypeSet {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::sql_types::SQLValueType;
|
|
||||||
use core_traits::ValueType;
|
use core_traits::ValueType;
|
||||||
|
use sql_types::SQLValueType;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_accommodates_integer() {
|
fn test_accommodates_integer() {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::BTreeMap;
|
||||||
|
|
||||||
use core_traits::Entid;
|
use core_traits::Entid;
|
||||||
|
|
||||||
use crate::{DateTime, Utc};
|
use {DateTime, Utc};
|
||||||
|
|
||||||
/// A transaction report summarizes an applied transaction.
|
/// A transaction report summarizes an applied transaction.
|
||||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "db_traits"
|
name = "db_traits"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -11,8 +11,8 @@ path = "lib.rs"
|
||||||
sqlcipher = ["rusqlite/sqlcipher"]
|
sqlcipher = ["rusqlite/sqlcipher"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
failure_derive = "~0.1"
|
failure_derive = "0.1"
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
@ -21,5 +21,5 @@ path = "../edn"
|
||||||
path = "../core-traits"
|
path = "../core-traits"
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21"
|
||||||
features = ["limits", "bundled"]
|
features = ["limits"]
|
||||||
|
|
|
@ -69,7 +69,7 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
||||||
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
||||||
use self::SchemaConstraintViolation::*;
|
use self::SchemaConstraintViolation::*;
|
||||||
match self {
|
match self {
|
||||||
ConflictingUpserts {
|
&ConflictingUpserts {
|
||||||
ref conflicting_upserts,
|
ref conflicting_upserts,
|
||||||
} => {
|
} => {
|
||||||
writeln!(f, "conflicting upserts:")?;
|
writeln!(f, "conflicting upserts:")?;
|
||||||
|
@ -78,7 +78,7 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
TypeDisagreements {
|
&TypeDisagreements {
|
||||||
ref conflicting_datoms,
|
ref conflicting_datoms,
|
||||||
} => {
|
} => {
|
||||||
writeln!(f, "type disagreements:")?;
|
writeln!(f, "type disagreements:")?;
|
||||||
|
@ -91,9 +91,9 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
CardinalityConflicts { ref conflicts } => {
|
&CardinalityConflicts { ref conflicts } => {
|
||||||
writeln!(f, "cardinality conflicts:")?;
|
writeln!(f, "cardinality conflicts:")?;
|
||||||
for conflict in conflicts {
|
for ref conflict in conflicts {
|
||||||
writeln!(f, " {:?}", conflict)?;
|
writeln!(f, " {:?}", conflict)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -116,12 +116,12 @@ impl ::std::fmt::Display for InputError {
|
||||||
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
||||||
use self::InputError::*;
|
use self::InputError::*;
|
||||||
match self {
|
match self {
|
||||||
BadDbId => {
|
&BadDbId => {
|
||||||
writeln!(f, ":db/id in map notation must either not be present or be an entid, an ident, or a tempid")
|
writeln!(f, ":db/id in map notation must either not be present or be an entid, an ident, or a tempid")
|
||||||
}
|
},
|
||||||
BadEntityPlace => {
|
&BadEntityPlace => {
|
||||||
writeln!(f, "cannot convert value place into entity place")
|
writeln!(f, "cannot convert value place into entity place")
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ impl From<DbErrorKind> for DbError {
|
||||||
|
|
||||||
impl From<Context<DbErrorKind>> for DbError {
|
impl From<Context<DbErrorKind>> for DbError {
|
||||||
fn from(inner: Context<DbErrorKind>) -> Self {
|
fn from(inner: Context<DbErrorKind>) -> Self {
|
||||||
DbError { inner }
|
DbError { inner: inner }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,7 +177,8 @@ impl From<rusqlite::Error> for DbError {
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Debug, Fail)]
|
#[derive(Clone, PartialEq, Debug, Fail)]
|
||||||
pub enum DbErrorKind {
|
pub enum DbErrorKind {
|
||||||
/// We're just not done yet. Recognized a feature that is not yet implemented.
|
/// We're just not done yet. Message that the feature is recognized but not yet
|
||||||
|
/// implemented.
|
||||||
#[fail(display = "not yet implemented: {}", _0)]
|
#[fail(display = "not yet implemented: {}", _0)]
|
||||||
NotYetImplemented(String),
|
NotYetImplemented(String),
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "mentat_db"
|
name = "mentat_db"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
@ -9,21 +9,21 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
||||||
syncable = ["serde", "serde_json", "serde_derive"]
|
syncable = ["serde", "serde_json", "serde_derive"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1.6"
|
||||||
indexmap = "~1.9"
|
indexmap = "1.3.1"
|
||||||
itertools = "~0.10"
|
itertools = "0.8"
|
||||||
lazy_static = "~1.4"
|
lazy_static = "1.4.0"
|
||||||
log = "~0.4"
|
log = "0.4"
|
||||||
ordered-float = "~2.8"
|
ordered-float = "1.0.2"
|
||||||
time = "~0.3"
|
time = "0.2"
|
||||||
petgraph = "~0.6"
|
petgraph = "0.5"
|
||||||
serde = { version = "~1.0", optional = true }
|
serde = { version = "1.0", optional = true }
|
||||||
serde_json = { version = "~1.0", optional = true }
|
serde_json = { version = "1.0", optional = true }
|
||||||
serde_derive = { version = "~1.0", optional = true }
|
serde_derive = { version = "1.0", optional = true }
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21"
|
||||||
features = ["limits", "bundled"]
|
features = ["limits"]
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
@ -40,10 +40,9 @@ path = "../db-traits"
|
||||||
[dependencies.mentat_sql]
|
[dependencies.mentat_sql]
|
||||||
path = "../sql"
|
path = "../sql"
|
||||||
|
|
||||||
# TODO: This should be in dev-dependencies.
|
# Should be dev-dependencies.
|
||||||
[dependencies.tabwriter]
|
[dependencies.tabwriter]
|
||||||
version = "~1.2"
|
version = "1.2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = "0.9"
|
env_logger = "0.7"
|
||||||
#tabwriter = { version = "1.2.1" }
|
|
||||||
|
|
|
@ -48,10 +48,12 @@ where
|
||||||
} else {
|
} else {
|
||||||
self.asserted.insert(key, value);
|
self.asserted.insert(key, value);
|
||||||
}
|
}
|
||||||
} else if let Some(asserted_value) = self.asserted.remove(&key) {
|
|
||||||
self.altered.insert(key, (value, asserted_value));
|
|
||||||
} else {
|
} else {
|
||||||
self.retracted.insert(key, value);
|
if let Some(asserted_value) = self.asserted.remove(&key) {
|
||||||
|
self.altered.insert(key, (value, asserted_value));
|
||||||
|
} else {
|
||||||
|
self.retracted.insert(key, value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,24 +10,24 @@
|
||||||
|
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use crate::db::TypedSQLValue;
|
use db::TypedSQLValue;
|
||||||
use crate::entids;
|
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
use edn;
|
use edn;
|
||||||
use edn::entities::Entity;
|
use edn::entities::Entity;
|
||||||
use edn::symbols;
|
use edn::symbols;
|
||||||
use edn::types::Value;
|
use edn::types::Value;
|
||||||
|
use entids;
|
||||||
|
|
||||||
use core_traits::{values, TypedValue};
|
use core_traits::{values, TypedValue};
|
||||||
|
|
||||||
use crate::schema::SchemaBuilding;
|
|
||||||
use crate::types::{Partition, PartitionMap};
|
|
||||||
use mentat_core::{IdentMap, Schema};
|
use mentat_core::{IdentMap, Schema};
|
||||||
|
use schema::SchemaBuilding;
|
||||||
|
use types::{Partition, PartitionMap};
|
||||||
|
|
||||||
/// The first transaction ID applied to the knowledge base.
|
/// The first transaction ID applied to the knowledge base.
|
||||||
///
|
///
|
||||||
/// This is the start of the :db.part/tx partition.
|
/// This is the start of the :db.part/tx partition.
|
||||||
pub const TX0: i64 = 0x1000_0000;
|
pub const TX0: i64 = 0x10000000;
|
||||||
|
|
||||||
/// This is the start of the :db.part/user partition.
|
/// This is the start of the :db.part/user partition.
|
||||||
pub const USER0: i64 = 0x10000;
|
pub const USER0: i64 = 0x10000;
|
||||||
|
@ -206,14 +206,14 @@ lazy_static! {
|
||||||
/// Convert (ident, entid) pairs into [:db/add IDENT :db/ident IDENT] `Value` instances.
|
/// Convert (ident, entid) pairs into [:db/add IDENT :db/ident IDENT] `Value` instances.
|
||||||
fn idents_to_assertions(idents: &[(symbols::Keyword, i64)]) -> Vec<Value> {
|
fn idents_to_assertions(idents: &[(symbols::Keyword, i64)]) -> Vec<Value> {
|
||||||
idents
|
idents
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|&(ref ident, _)| {
|
.map(|&(ref ident, _)| {
|
||||||
let value = Value::Keyword(ident.clone());
|
let value = Value::Keyword(ident.clone());
|
||||||
Value::Vector(vec![
|
Value::Vector(vec![
|
||||||
values::DB_ADD.clone(),
|
values::DB_ADD.clone(),
|
||||||
value.clone(),
|
value.clone(),
|
||||||
values::DB_IDENT.clone(),
|
values::DB_IDENT.clone(),
|
||||||
value,
|
value.clone(),
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -225,7 +225,7 @@ fn schema_attrs_to_assertions(version: u32, idents: &[symbols::Keyword]) -> Vec<
|
||||||
let schema_attr = Value::Keyword(ns_keyword!("db.schema", "attribute"));
|
let schema_attr = Value::Keyword(ns_keyword!("db.schema", "attribute"));
|
||||||
let schema_version = Value::Keyword(ns_keyword!("db.schema", "version"));
|
let schema_version = Value::Keyword(ns_keyword!("db.schema", "version"));
|
||||||
idents
|
idents
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|ident| {
|
.map(|ident| {
|
||||||
let value = Value::Keyword(ident.clone());
|
let value = Value::Keyword(ident.clone());
|
||||||
Value::Vector(vec![
|
Value::Vector(vec![
|
||||||
|
@ -260,7 +260,7 @@ fn symbolic_schema_to_triples(
|
||||||
Value::Map(ref m) => {
|
Value::Map(ref m) => {
|
||||||
for (ident, mp) in m {
|
for (ident, mp) in m {
|
||||||
let ident = match ident {
|
let ident = match ident {
|
||||||
Value::Keyword(ref ident) => ident,
|
&Value::Keyword(ref ident) => ident,
|
||||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||||
"Expected namespaced keyword for ident but got '{:?}'",
|
"Expected namespaced keyword for ident but got '{:?}'",
|
||||||
ident
|
ident
|
||||||
|
@ -270,7 +270,7 @@ fn symbolic_schema_to_triples(
|
||||||
Value::Map(ref mpp) => {
|
Value::Map(ref mpp) => {
|
||||||
for (attr, value) in mpp {
|
for (attr, value) in mpp {
|
||||||
let attr = match attr {
|
let attr = match attr {
|
||||||
Value::Keyword(ref attr) => attr,
|
&Value::Keyword(ref attr) => attr,
|
||||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||||
"Expected namespaced keyword for attr but got '{:?}'",
|
"Expected namespaced keyword for attr but got '{:?}'",
|
||||||
attr
|
attr
|
||||||
|
@ -289,7 +289,7 @@ fn symbolic_schema_to_triples(
|
||||||
Some(TypedValue::Keyword(ref k)) => ident_map
|
Some(TypedValue::Keyword(ref k)) => ident_map
|
||||||
.get(k)
|
.get(k)
|
||||||
.map(|entid| TypedValue::Ref(*entid))
|
.map(|entid| TypedValue::Ref(*entid))
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(k.to_string()))?,
|
.ok_or(DbErrorKind::UnrecognizedIdent(k.to_string()))?,
|
||||||
Some(v) => v,
|
Some(v) => v,
|
||||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||||
"Expected Mentat typed value for value but got '{:?}'",
|
"Expected Mentat typed value for value but got '{:?}'",
|
||||||
|
@ -377,6 +377,8 @@ pub(crate) fn bootstrap_entities() -> Vec<Entity<edn::ValueAndSpan>> {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Failure here is a coding error (since the inputs are fixed), not a runtime error.
|
// Failure here is a coding error (since the inputs are fixed), not a runtime error.
|
||||||
// TODO: represent these bootstrap entity data errors rather than just panicing.
|
// TODO: represent these bootstrap data errors rather than just panicing.
|
||||||
edn::parse::entities(&bootstrap_assertions.to_string()).expect("bootstrap assertions")
|
let bootstrap_entities: Vec<Entity<edn::ValueAndSpan>> =
|
||||||
|
edn::parse::entities(&bootstrap_assertions.to_string()).expect("bootstrap assertions");
|
||||||
|
return bootstrap_entities;
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,6 +54,8 @@ use std::collections::btree_map::Entry::{Occupied, Vacant};
|
||||||
|
|
||||||
use std::iter::once;
|
use std::iter::once;
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
@ -61,7 +63,6 @@ use std::iter::Peekable;
|
||||||
use failure::ResultExt;
|
use failure::ResultExt;
|
||||||
|
|
||||||
use rusqlite;
|
use rusqlite;
|
||||||
use rusqlite::params_from_iter;
|
|
||||||
|
|
||||||
use core_traits::{Binding, Entid, TypedValue};
|
use core_traits::{Binding, Entid, TypedValue};
|
||||||
|
|
||||||
|
@ -73,11 +74,11 @@ use mentat_sql::{QueryBuilder, SQLQuery, SQLiteQueryBuilder};
|
||||||
|
|
||||||
use edn::entities::OpType;
|
use edn::entities::OpType;
|
||||||
|
|
||||||
use crate::db::TypedSQLValue;
|
use db::TypedSQLValue;
|
||||||
|
|
||||||
use db_traits::errors::{DbError, DbErrorKind, Result};
|
use db_traits::errors::{DbError, DbErrorKind, Result};
|
||||||
|
|
||||||
use crate::watcher::TransactWatcher;
|
use watcher::TransactWatcher;
|
||||||
|
|
||||||
// Right now we use BTreeMap, because we expect few cached attributes.
|
// Right now we use BTreeMap, because we expect few cached attributes.
|
||||||
pub type CacheMap<K, V> = BTreeMap<K, V>;
|
pub type CacheMap<K, V> = BTreeMap<K, V>;
|
||||||
|
@ -189,7 +190,7 @@ impl AevFactory {
|
||||||
return existing;
|
return existing;
|
||||||
}
|
}
|
||||||
self.strings.insert(rc.clone());
|
self.strings.insert(rc.clone());
|
||||||
TypedValue::String(rc)
|
return TypedValue::String(rc);
|
||||||
}
|
}
|
||||||
t => t,
|
t => t,
|
||||||
}
|
}
|
||||||
|
@ -199,7 +200,9 @@ impl AevFactory {
|
||||||
let a: Entid = row.get_unwrap(0);
|
let a: Entid = row.get_unwrap(0);
|
||||||
let e: Entid = row.get_unwrap(1);
|
let e: Entid = row.get_unwrap(1);
|
||||||
let value_type_tag: i32 = row.get_unwrap(3);
|
let value_type_tag: i32 = row.get_unwrap(3);
|
||||||
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag).unwrap();
|
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag)
|
||||||
|
.map(|x| x)
|
||||||
|
.unwrap();
|
||||||
(a, e, self.intern(v))
|
(a, e, self.intern(v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -374,7 +377,7 @@ impl RemoveFromCache for MultiValAttributeCache {
|
||||||
|
|
||||||
impl CardinalityManyCache for MultiValAttributeCache {
|
impl CardinalityManyCache for MultiValAttributeCache {
|
||||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||||
self.e_vs.entry(e).or_insert_with(Vec::new).push(v)
|
self.e_vs.entry(e).or_insert(vec![]).push(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set(&mut self, e: Entid, vs: Vec<TypedValue>) {
|
fn set(&mut self, e: Entid, vs: Vec<TypedValue>) {
|
||||||
|
@ -436,7 +439,7 @@ impl UniqueReverseAttributeCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_e(&self, v: &TypedValue) -> Option<Entid> {
|
fn get_e(&self, v: &TypedValue) -> Option<Entid> {
|
||||||
self.v_e.get(v).and_then(|o| *o)
|
self.v_e.get(v).and_then(|o| o.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup(&self, v: &TypedValue) -> Option<Option<Entid>> {
|
fn lookup(&self, v: &TypedValue) -> Option<Option<Entid>> {
|
||||||
|
@ -491,7 +494,7 @@ impl RemoveFromCache for NonUniqueReverseAttributeCache {
|
||||||
|
|
||||||
impl NonUniqueReverseAttributeCache {
|
impl NonUniqueReverseAttributeCache {
|
||||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||||
self.v_es.entry(v).or_insert_with(BTreeSet::new).insert(e);
|
self.v_es.entry(v).or_insert(BTreeSet::new()).insert(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_es(&self, v: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
fn get_es(&self, v: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
||||||
|
@ -640,9 +643,9 @@ enum AccumulationBehavior {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AccumulationBehavior {
|
impl AccumulationBehavior {
|
||||||
fn is_replacing(self) -> bool {
|
fn is_replacing(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
AccumulationBehavior::Add { replacing } => replacing,
|
&AccumulationBehavior::Add { replacing } => replacing,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1003,7 +1006,7 @@ impl AttributeCaches {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We need this block for fall-back.
|
// We need this block for fallback.
|
||||||
impl AttributeCaches {
|
impl AttributeCaches {
|
||||||
fn get_entid_for_value_if_present(
|
fn get_entid_for_value_if_present(
|
||||||
&self,
|
&self,
|
||||||
|
@ -1072,10 +1075,8 @@ impl AttributeCaches {
|
||||||
replacing: bool,
|
replacing: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut aev_factory = AevFactory::new();
|
let mut aev_factory = AevFactory::new();
|
||||||
let rows = statement.query_map(params_from_iter(&args), |row| {
|
let rows = statement.query_map(&args, |row| Ok(aev_factory.row_to_aev(row)))?;
|
||||||
Ok(aev_factory.row_to_aev(row))
|
let aevs = AevRows { rows: rows };
|
||||||
})?;
|
|
||||||
let aevs = AevRows { rows };
|
|
||||||
self.accumulate_into_cache(
|
self.accumulate_into_cache(
|
||||||
None,
|
None,
|
||||||
schema,
|
schema,
|
||||||
|
@ -1131,7 +1132,7 @@ impl AttributeCaches {
|
||||||
schema: &'s Schema,
|
schema: &'s Schema,
|
||||||
sqlite: &'c rusqlite::Connection,
|
sqlite: &'c rusqlite::Connection,
|
||||||
attrs: AttributeSpec,
|
attrs: AttributeSpec,
|
||||||
entities: &[Entid],
|
entities: &Vec<Entid>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// Mark the attributes as cached as we go. We do this because we're going in through the
|
// Mark the attributes as cached as we go. We do this because we're going in through the
|
||||||
// back door here, and the usual caching API won't have taken care of this for us.
|
// back door here, and the usual caching API won't have taken care of this for us.
|
||||||
|
@ -1228,17 +1229,17 @@ impl AttributeCaches {
|
||||||
schema: &'s Schema,
|
schema: &'s Schema,
|
||||||
sqlite: &'c rusqlite::Connection,
|
sqlite: &'c rusqlite::Connection,
|
||||||
mut attrs: AttributeSpec,
|
mut attrs: AttributeSpec,
|
||||||
entities: &[Entid],
|
entities: &Vec<Entid>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// TODO: Exclude any entities for which every attribute is known.
|
// TODO: Exclude any entities for which every attribute is known.
|
||||||
// TODO: initialize from an existing (complete) AttributeCache.
|
// TODO: initialize from an existing (complete) AttributeCache.
|
||||||
|
|
||||||
// Exclude any attributes for which every entity's value is already known.
|
// Exclude any attributes for which every entity's value is already known.
|
||||||
match &mut attrs {
|
match &mut attrs {
|
||||||
AttributeSpec::All => {
|
&mut AttributeSpec::All => {
|
||||||
// If we're caching all attributes, there's nothing we can exclude.
|
// If we're caching all attributes, there's nothing we can exclude.
|
||||||
}
|
}
|
||||||
AttributeSpec::Specified {
|
&mut AttributeSpec::Specified {
|
||||||
ref mut non_fts,
|
ref mut non_fts,
|
||||||
ref mut fts,
|
ref mut fts,
|
||||||
} => {
|
} => {
|
||||||
|
@ -1284,7 +1285,7 @@ impl AttributeCaches {
|
||||||
schema: &'s Schema,
|
schema: &'s Schema,
|
||||||
sqlite: &'c rusqlite::Connection,
|
sqlite: &'c rusqlite::Connection,
|
||||||
attrs: AttributeSpec,
|
attrs: AttributeSpec,
|
||||||
entities: &[Entid],
|
entities: &Vec<Entid>,
|
||||||
) -> Result<AttributeCaches> {
|
) -> Result<AttributeCaches> {
|
||||||
let mut cache = AttributeCaches::default();
|
let mut cache = AttributeCaches::default();
|
||||||
cache.populate_cache_for_entities_and_attributes(schema, sqlite, attrs, entities)?;
|
cache.populate_cache_for_entities_and_attributes(schema, sqlite, attrs, entities)?;
|
||||||
|
@ -1449,7 +1450,7 @@ pub struct SQLiteAttributeCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SQLiteAttributeCache {
|
impl SQLiteAttributeCache {
|
||||||
fn make_mut(&mut self) -> &mut AttributeCaches {
|
fn make_mut<'s>(&'s mut self) -> &'s mut AttributeCaches {
|
||||||
Arc::make_mut(&mut self.inner)
|
Arc::make_mut(&mut self.inner)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1627,7 +1628,7 @@ impl InProgressSQLiteAttributeCache {
|
||||||
let overlay = inner.make_override();
|
let overlay = inner.make_override();
|
||||||
InProgressSQLiteAttributeCache {
|
InProgressSQLiteAttributeCache {
|
||||||
inner: inner.inner,
|
inner: inner.inner,
|
||||||
overlay,
|
overlay: overlay,
|
||||||
unregistered_forward: Default::default(),
|
unregistered_forward: Default::default(),
|
||||||
unregistered_reverse: Default::default(),
|
unregistered_reverse: Default::default(),
|
||||||
}
|
}
|
||||||
|
@ -1817,7 +1818,9 @@ impl CachedAttributes for InProgressSQLiteAttributeCache {
|
||||||
.inner
|
.inner
|
||||||
.forward_cached_attributes
|
.forward_cached_attributes
|
||||||
.iter()
|
.iter()
|
||||||
.any(|a| !self.unregistered_forward.contains(a))
|
.filter(|a| !self.unregistered_forward.contains(a))
|
||||||
|
.next()
|
||||||
|
.is_some()
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -1825,7 +1828,9 @@ impl CachedAttributes for InProgressSQLiteAttributeCache {
|
||||||
self.inner
|
self.inner
|
||||||
.reverse_cached_attributes
|
.reverse_cached_attributes
|
||||||
.iter()
|
.iter()
|
||||||
.any(|a| !self.unregistered_reverse.contains(a))
|
.filter(|a| !self.unregistered_reverse.contains(a))
|
||||||
|
.next()
|
||||||
|
.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_entids_for_value(
|
fn get_entids_for_value(
|
||||||
|
@ -1939,7 +1944,7 @@ impl<'a> InProgressCacheTransactWatcher<'a> {
|
||||||
let mut w = InProgressCacheTransactWatcher {
|
let mut w = InProgressCacheTransactWatcher {
|
||||||
collected_assertions: Default::default(),
|
collected_assertions: Default::default(),
|
||||||
collected_retractions: Default::default(),
|
collected_retractions: Default::default(),
|
||||||
cache,
|
cache: cache,
|
||||||
active: true,
|
active: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1972,10 +1977,10 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
||||||
}
|
}
|
||||||
Entry::Occupied(mut entry) => {
|
Entry::Occupied(mut entry) => {
|
||||||
match entry.get_mut() {
|
match entry.get_mut() {
|
||||||
Either::Left(_) => {
|
&mut Either::Left(_) => {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
Either::Right(ref mut vec) => {
|
&mut Either::Right(ref mut vec) => {
|
||||||
vec.push((e, v.clone()));
|
vec.push((e, v.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1984,12 +1989,14 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn done(&mut self, _t: &Entid, schema: &Schema) -> Result<()> {
|
fn done(&mut self, _t: &Entid, schema: &Schema) -> Result<()> {
|
||||||
// Oh, how I wish we had `impl trait`. Without it we have a six-line type signature if we
|
// Oh, I wish we had impl trait. Without it we have a six-line type signature if we
|
||||||
// try to break this out as a helper function.
|
// try to break this out as a helper function.
|
||||||
let collected_retractions = std::mem::take(&mut self.collected_retractions);
|
let collected_retractions =
|
||||||
let collected_assertions = std::mem::take(&mut self.collected_assertions);
|
mem::replace(&mut self.collected_retractions, Default::default());
|
||||||
|
let collected_assertions = mem::replace(&mut self.collected_assertions, Default::default());
|
||||||
let mut intermediate_expansion = once(collected_retractions)
|
let mut intermediate_expansion = once(collected_retractions)
|
||||||
.chain(once(collected_assertions))
|
.chain(once(collected_assertions))
|
||||||
|
.into_iter()
|
||||||
.map(move |tree| {
|
.map(move |tree| {
|
||||||
tree.into_iter()
|
tree.into_iter()
|
||||||
.filter_map(move |(a, evs)| {
|
.filter_map(move |(a, evs)| {
|
||||||
|
@ -2011,7 +2018,7 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InProgressSQLiteAttributeCache {
|
impl InProgressSQLiteAttributeCache {
|
||||||
pub fn transact_watcher(&mut self) -> InProgressCacheTransactWatcher {
|
pub fn transact_watcher<'a>(&'a mut self) -> InProgressCacheTransactWatcher<'a> {
|
||||||
InProgressCacheTransactWatcher::new(self)
|
InProgressCacheTransactWatcher::new(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
151
db/src/db.rs
151
db/src/db.rs
|
@ -22,16 +22,15 @@ use itertools;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rusqlite;
|
use rusqlite;
|
||||||
use rusqlite::limits::Limit;
|
use rusqlite::limits::Limit;
|
||||||
use rusqlite::params_from_iter;
|
|
||||||
use rusqlite::types::{ToSql, ToSqlOutput};
|
use rusqlite::types::{ToSql, ToSqlOutput};
|
||||||
use rusqlite::TransactionBehavior;
|
use rusqlite::TransactionBehavior;
|
||||||
|
|
||||||
use crate::bootstrap;
|
use bootstrap;
|
||||||
use crate::{repeat_values, to_namespaced_keyword};
|
use {repeat_values, to_namespaced_keyword};
|
||||||
|
|
||||||
use edn::{DateTime, Utc, Uuid, Value};
|
use edn::{DateTime, Utc, Uuid, Value};
|
||||||
|
|
||||||
use crate::entids;
|
use entids;
|
||||||
|
|
||||||
use core_traits::{attribute, Attribute, AttributeBitFlags, Entid, TypedValue, ValueType};
|
use core_traits::{attribute, Attribute, AttributeBitFlags, Entid, TypedValue, ValueType};
|
||||||
|
|
||||||
|
@ -39,13 +38,13 @@ use mentat_core::{AttributeMap, FromMicros, IdentMap, Schema, ToMicros, ValueRc}
|
||||||
|
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
|
|
||||||
use crate::metadata;
|
use metadata;
|
||||||
use crate::schema::SchemaBuilding;
|
use schema::SchemaBuilding;
|
||||||
use crate::tx::transact;
|
use tx::transact;
|
||||||
use crate::types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
use types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
||||||
|
|
||||||
use crate::watcher::NullWatcher;
|
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
|
use watcher::NullWatcher;
|
||||||
|
|
||||||
// In PRAGMA foo='bar', `'bar'` must be a constant string (it cannot be a
|
// In PRAGMA foo='bar', `'bar'` must be a constant string (it cannot be a
|
||||||
// bound parameter), so we need to escape manually. According to
|
// bound parameter), so we need to escape manually. According to
|
||||||
|
@ -67,9 +66,10 @@ fn make_connection(
|
||||||
let page_size = 32768;
|
let page_size = 32768;
|
||||||
|
|
||||||
let initial_pragmas = if let Some(encryption_key) = maybe_encryption_key {
|
let initial_pragmas = if let Some(encryption_key) = maybe_encryption_key {
|
||||||
if !cfg!(feature = "sqlcipher") {
|
assert!(
|
||||||
panic!("This function shouldn't be called with a key unless we have sqlcipher support");
|
cfg!(feature = "sqlcipher"),
|
||||||
}
|
"This function shouldn't be called with a key unless we have sqlcipher support"
|
||||||
|
);
|
||||||
// Important: The `cipher_page_size` cannot be changed without breaking
|
// Important: The `cipher_page_size` cannot be changed without breaking
|
||||||
// the ability to open databases that were written when using a
|
// the ability to open databases that were written when using a
|
||||||
// different `cipher_page_size`. Additionally, it (AFAICT) must be a
|
// different `cipher_page_size`. Additionally, it (AFAICT) must be a
|
||||||
|
@ -147,10 +147,10 @@ pub const CURRENT_VERSION: i32 = 1;
|
||||||
|
|
||||||
/// MIN_SQLITE_VERSION should be changed when there's a new minimum version of sqlite required
|
/// MIN_SQLITE_VERSION should be changed when there's a new minimum version of sqlite required
|
||||||
/// for the project to work.
|
/// for the project to work.
|
||||||
const MIN_SQLITE_VERSION: i32 = 3_008_000;
|
const MIN_SQLITE_VERSION: i32 = 3008000;
|
||||||
|
|
||||||
const TRUE: &bool = &true;
|
const TRUE: &'static bool = &true;
|
||||||
const FALSE: &bool = &false;
|
const FALSE: &'static bool = &false;
|
||||||
|
|
||||||
/// Turn an owned bool into a static reference to a bool.
|
/// Turn an owned bool into a static reference to a bool.
|
||||||
///
|
///
|
||||||
|
@ -315,7 +315,7 @@ fn create_current_partition_view(conn: &rusqlite::Connection) -> Result<()> {
|
||||||
max(e) + 1 AS idx
|
max(e) + 1 AS idx
|
||||||
FROM timelined_transactions WHERE timeline = {} GROUP BY part",
|
FROM timelined_transactions WHERE timeline = {} GROUP BY part",
|
||||||
case.join(" "),
|
case.join(" "),
|
||||||
crate::TIMELINE_MAIN
|
::TIMELINE_MAIN
|
||||||
);
|
);
|
||||||
|
|
||||||
conn.execute(&view_stmt, rusqlite::params![])?;
|
conn.execute(&view_stmt, rusqlite::params![])?;
|
||||||
|
@ -360,10 +360,9 @@ pub fn create_current_version(conn: &mut rusqlite::Connection) -> Result<DB> {
|
||||||
// TODO: validate metadata mutations that aren't schema related, like additional partitions.
|
// TODO: validate metadata mutations that aren't schema related, like additional partitions.
|
||||||
if let Some(next_schema) = next_schema {
|
if let Some(next_schema) = next_schema {
|
||||||
if next_schema != db.schema {
|
if next_schema != db.schema {
|
||||||
bail!(DbErrorKind::NotYetImplemented(
|
bail!(DbErrorKind::NotYetImplemented(format!(
|
||||||
"Initial bootstrap transaction did not produce expected bootstrap schema"
|
"Initial bootstrap transaction did not produce expected bootstrap schema"
|
||||||
.to_string()
|
)));
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -397,7 +396,7 @@ pub trait TypedSQLValue {
|
||||||
value: rusqlite::types::Value,
|
value: rusqlite::types::Value,
|
||||||
value_type_tag: i32,
|
value_type_tag: i32,
|
||||||
) -> Result<TypedValue>;
|
) -> Result<TypedValue>;
|
||||||
fn to_sql_value_pair(&self) -> (ToSqlOutput, i32);
|
fn to_sql_value_pair<'a>(&'a self) -> (ToSqlOutput<'a>, i32);
|
||||||
fn from_edn_value(value: &Value) -> Option<TypedValue>;
|
fn from_edn_value(value: &Value) -> Option<TypedValue>;
|
||||||
fn to_edn_value_pair(&self) -> (Value, ValueType);
|
fn to_edn_value_pair(&self) -> (Value, ValueType);
|
||||||
}
|
}
|
||||||
|
@ -434,7 +433,6 @@ impl TypedSQLValue for TypedValue {
|
||||||
Ok(TypedValue::Uuid(u))
|
Ok(TypedValue::Uuid(u))
|
||||||
}
|
}
|
||||||
(13, rusqlite::types::Value::Text(x)) => to_namespaced_keyword(&x).map(|k| k.into()),
|
(13, rusqlite::types::Value::Text(x)) => to_namespaced_keyword(&x).map(|k| k.into()),
|
||||||
(15, rusqlite::types::Value::Blob(x)) => Ok(TypedValue::Bytes(x.into())),
|
|
||||||
(_, value) => bail!(DbErrorKind::BadSQLValuePair(value, value_type_tag)),
|
(_, value) => bail!(DbErrorKind::BadSQLValuePair(value, value_type_tag)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -448,46 +446,43 @@ impl TypedSQLValue for TypedValue {
|
||||||
/// This function is deterministic.
|
/// This function is deterministic.
|
||||||
fn from_edn_value(value: &Value) -> Option<TypedValue> {
|
fn from_edn_value(value: &Value) -> Option<TypedValue> {
|
||||||
match value {
|
match value {
|
||||||
Value::Boolean(x) => Some(TypedValue::Boolean(*x)),
|
&Value::Boolean(x) => Some(TypedValue::Boolean(x)),
|
||||||
Value::Instant(x) => Some(TypedValue::Instant(*x)),
|
&Value::Instant(x) => Some(TypedValue::Instant(x)),
|
||||||
Value::Integer(x) => Some(TypedValue::Long(*x)),
|
&Value::Integer(x) => Some(TypedValue::Long(x)),
|
||||||
Value::Uuid(x) => Some(TypedValue::Uuid(*x)),
|
&Value::Uuid(x) => Some(TypedValue::Uuid(x)),
|
||||||
Value::Float(ref x) => Some(TypedValue::Double(*x)),
|
&Value::Float(ref x) => Some(TypedValue::Double(x.clone())),
|
||||||
Value::Text(ref x) => Some(x.clone().into()),
|
&Value::Text(ref x) => Some(x.clone().into()),
|
||||||
Value::Keyword(ref x) => Some(x.clone().into()),
|
&Value::Keyword(ref x) => Some(x.clone().into()),
|
||||||
Value::Bytes(b) => Some(TypedValue::Bytes(b.clone())),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the corresponding SQLite `value` and `value_type_tag` pair.
|
/// Return the corresponding SQLite `value` and `value_type_tag` pair.
|
||||||
fn to_sql_value_pair(&self) -> (ToSqlOutput, i32) {
|
fn to_sql_value_pair<'a>(&'a self) -> (ToSqlOutput<'a>, i32) {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::Ref(x) => ((*x).into(), 0),
|
&TypedValue::Ref(x) => (x.into(), 0),
|
||||||
TypedValue::Boolean(x) => ((if *x { 1 } else { 0 }).into(), 1),
|
&TypedValue::Boolean(x) => ((if x { 1 } else { 0 }).into(), 1),
|
||||||
TypedValue::Instant(x) => (x.to_micros().into(), 4),
|
&TypedValue::Instant(x) => (x.to_micros().into(), 4),
|
||||||
// SQLite distinguishes integral from decimal types, allowing long and double to share a tag.
|
// SQLite distinguishes integral from decimal types, allowing long and double to share a tag.
|
||||||
TypedValue::Long(x) => ((*x).into(), 5),
|
&TypedValue::Long(x) => (x.into(), 5),
|
||||||
TypedValue::Double(x) => (x.into_inner().into(), 5),
|
&TypedValue::Double(x) => (x.into_inner().into(), 5),
|
||||||
TypedValue::String(ref x) => (x.as_str().into(), 10),
|
&TypedValue::String(ref x) => (x.as_str().into(), 10),
|
||||||
TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
&TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
||||||
TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
&TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
||||||
TypedValue::Bytes(b) => (b.to_vec().into(), 15),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the corresponding EDN `value` and `value_type` pair.
|
/// Return the corresponding EDN `value` and `value_type` pair.
|
||||||
fn to_edn_value_pair(&self) -> (Value, ValueType) {
|
fn to_edn_value_pair(&self) -> (Value, ValueType) {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::Ref(x) => (Value::Integer(*x), ValueType::Ref),
|
&TypedValue::Ref(x) => (Value::Integer(x), ValueType::Ref),
|
||||||
TypedValue::Boolean(x) => (Value::Boolean(*x), ValueType::Boolean),
|
&TypedValue::Boolean(x) => (Value::Boolean(x), ValueType::Boolean),
|
||||||
TypedValue::Instant(x) => (Value::Instant(*x), ValueType::Instant),
|
&TypedValue::Instant(x) => (Value::Instant(x), ValueType::Instant),
|
||||||
TypedValue::Long(x) => (Value::Integer(*x), ValueType::Long),
|
&TypedValue::Long(x) => (Value::Integer(x), ValueType::Long),
|
||||||
TypedValue::Double(x) => (Value::Float(*x), ValueType::Double),
|
&TypedValue::Double(x) => (Value::Float(x), ValueType::Double),
|
||||||
TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
&TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
||||||
TypedValue::Uuid(ref u) => (Value::Uuid(*u), ValueType::Uuid),
|
&TypedValue::Uuid(ref u) => (Value::Uuid(u.clone()), ValueType::Uuid),
|
||||||
TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
&TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
||||||
TypedValue::Bytes(b) => (Value::Bytes(b.clone()), ValueType::Bytes),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -515,7 +510,7 @@ pub fn read_partition_map(conn: &rusqlite::Connection) -> Result<PartitionMap> {
|
||||||
// First part of the union sprinkles 'allow_excision' into the 'parts' view.
|
// First part of the union sprinkles 'allow_excision' into the 'parts' view.
|
||||||
// Second part of the union takes care of partitions which are known
|
// Second part of the union takes care of partitions which are known
|
||||||
// but don't have any transactions.
|
// but don't have any transactions.
|
||||||
conn.prepare(
|
let mut stmt: rusqlite::Statement = conn.prepare(
|
||||||
"
|
"
|
||||||
SELECT
|
SELECT
|
||||||
known_parts.part,
|
known_parts.part,
|
||||||
|
@ -541,14 +536,16 @@ pub fn read_partition_map(conn: &rusqlite::Connection) -> Result<PartitionMap> {
|
||||||
known_parts
|
known_parts
|
||||||
WHERE
|
WHERE
|
||||||
part NOT IN (SELECT part FROM parts)",
|
part NOT IN (SELECT part FROM parts)",
|
||||||
)?
|
)?;
|
||||||
.query_and_then(rusqlite::params![], |row| -> Result<(String, Partition)> {
|
let m = stmt
|
||||||
Ok((
|
.query_and_then(rusqlite::params![], |row| -> Result<(String, Partition)> {
|
||||||
row.get(0)?,
|
Ok((
|
||||||
Partition::new(row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?),
|
row.get(0)?,
|
||||||
))
|
Partition::new(row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?),
|
||||||
})?
|
))
|
||||||
.collect()
|
})?
|
||||||
|
.collect();
|
||||||
|
m
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the ident map materialized view from the given SQL store.
|
/// Read the ident map materialized view from the given SQL store.
|
||||||
|
@ -770,7 +767,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
//
|
//
|
||||||
// TODO: `collect` into a HashSet so that any (a, v) is resolved at most once.
|
// TODO: `collect` into a HashSet so that any (a, v) is resolved at most once.
|
||||||
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
||||||
let chunks: itertools::IntoChunks<_> = avs.iter().enumerate().chunks(max_vars / 4);
|
let chunks: itertools::IntoChunks<_> = avs.into_iter().enumerate().chunks(max_vars / 4);
|
||||||
|
|
||||||
// We'd like to `flat_map` here, but it's not obvious how to `flat_map` across `Result`.
|
// We'd like to `flat_map` here, but it's not obvious how to `flat_map` across `Result`.
|
||||||
// Alternatively, this is a `fold`, and it might be wise to express it as such.
|
// Alternatively, this is a `fold`, and it might be wise to express it as such.
|
||||||
|
@ -809,7 +806,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
values);
|
values);
|
||||||
let mut stmt: rusqlite::Statement = self.prepare(s.as_str())?;
|
let mut stmt: rusqlite::Statement = self.prepare(s.as_str())?;
|
||||||
|
|
||||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(params_from_iter(¶ms), |row| -> Result<(i64, Entid)> {
|
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(¶ms, |row| -> Result<(i64, Entid)> {
|
||||||
Ok((row.get(0)?, row.get(1)?))
|
Ok((row.get(0)?, row.get(1)?))
|
||||||
})?.collect();
|
})?.collect();
|
||||||
m
|
m
|
||||||
|
@ -903,8 +900,9 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
let bindings_per_statement = 6;
|
let bindings_per_statement = 6;
|
||||||
|
|
||||||
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
||||||
let chunks: itertools::IntoChunks<_> =
|
let chunks: itertools::IntoChunks<_> = entities
|
||||||
entities.iter().chunks(max_vars / bindings_per_statement);
|
.into_iter()
|
||||||
|
.chunks(max_vars / bindings_per_statement);
|
||||||
|
|
||||||
// We'd like to flat_map here, but it's not obvious how to flat_map across Result.
|
// We'd like to flat_map here, but it's not obvious how to flat_map across Result.
|
||||||
let results: Result<Vec<()>> = chunks.into_iter().map(|chunk| -> Result<()> {
|
let results: Result<Vec<()>> = chunks.into_iter().map(|chunk| -> Result<()> {
|
||||||
|
@ -913,7 +911,6 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
// We must keep these computed values somewhere to reference them later, so we can't
|
// We must keep these computed values somewhere to reference them later, so we can't
|
||||||
// combine this map and the subsequent flat_map.
|
// combine this map and the subsequent flat_map.
|
||||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
let block: Result<Vec<(i64 /* e */,
|
let block: Result<Vec<(i64 /* e */,
|
||||||
i64 /* a */,
|
i64 /* a */,
|
||||||
ToSqlOutput<'a> /* value */,
|
ToSqlOutput<'a> /* value */,
|
||||||
|
@ -953,7 +950,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
|
|
||||||
// TODO: consider ensuring we inserted the expected number of rows.
|
// TODO: consider ensuring we inserted the expected number of rows.
|
||||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||||
stmt.execute(params_from_iter(¶ms))
|
stmt.execute(¶ms)
|
||||||
.context(DbErrorKind::NonFtsInsertionIntoTempSearchTableFailed)
|
.context(DbErrorKind::NonFtsInsertionIntoTempSearchTableFailed)
|
||||||
.map_err(|e| e.into())
|
.map_err(|e| e.into())
|
||||||
.map(|_c| ())
|
.map(|_c| ())
|
||||||
|
@ -976,8 +973,9 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
|
|
||||||
let mut outer_searchid = 2000;
|
let mut outer_searchid = 2000;
|
||||||
|
|
||||||
let chunks: itertools::IntoChunks<_> =
|
let chunks: itertools::IntoChunks<_> = entities
|
||||||
entities.iter().chunks(max_vars / bindings_per_statement);
|
.into_iter()
|
||||||
|
.chunks(max_vars / bindings_per_statement);
|
||||||
|
|
||||||
// From string to (searchid, value_type_tag).
|
// From string to (searchid, value_type_tag).
|
||||||
let mut seen: HashMap<ValueRc<String>, (i64, i32)> = HashMap::with_capacity(entities.len());
|
let mut seen: HashMap<ValueRc<String>, (i64, i32)> = HashMap::with_capacity(entities.len());
|
||||||
|
@ -990,7 +988,6 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
// We must keep these computed values somewhere to reference them later, so we can't
|
// We must keep these computed values somewhere to reference them later, so we can't
|
||||||
// combine this map and the subsequent flat_map.
|
// combine this map and the subsequent flat_map.
|
||||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
let block: Result<Vec<(i64 /* e */,
|
let block: Result<Vec<(i64 /* e */,
|
||||||
i64 /* a */,
|
i64 /* a */,
|
||||||
Option<ToSqlOutput<'a>> /* value */,
|
Option<ToSqlOutput<'a>> /* value */,
|
||||||
|
@ -999,7 +996,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
u8 /* flags0 */,
|
u8 /* flags0 */,
|
||||||
i64 /* searchid */)>> = chunk.map(|&(e, a, ref attribute, ref typed_value, added)| {
|
i64 /* searchid */)>> = chunk.map(|&(e, a, ref attribute, ref typed_value, added)| {
|
||||||
match typed_value {
|
match typed_value {
|
||||||
TypedValue::String(ref rc) => {
|
&TypedValue::String(ref rc) => {
|
||||||
datom_count += 1;
|
datom_count += 1;
|
||||||
let entry = seen.entry(rc.clone());
|
let entry = seen.entry(rc.clone());
|
||||||
match entry {
|
match entry {
|
||||||
|
@ -1047,7 +1044,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
|
|
||||||
// TODO: consider ensuring we inserted the expected number of rows.
|
// TODO: consider ensuring we inserted the expected number of rows.
|
||||||
let mut stmt = self.prepare_cached(fts_s.as_str())?;
|
let mut stmt = self.prepare_cached(fts_s.as_str())?;
|
||||||
stmt.execute(params_from_iter(&fts_params)).context(DbErrorKind::FtsInsertionFailed)?;
|
stmt.execute(&fts_params).context(DbErrorKind::FtsInsertionFailed)?;
|
||||||
|
|
||||||
// Second, insert searches.
|
// Second, insert searches.
|
||||||
// `params` reference computed values in `block`.
|
// `params` reference computed values in `block`.
|
||||||
|
@ -1075,7 +1072,7 @@ impl MentatStoring for rusqlite::Connection {
|
||||||
|
|
||||||
// TODO: consider ensuring we inserted the expected number of rows.
|
// TODO: consider ensuring we inserted the expected number of rows.
|
||||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||||
stmt.execute(params_from_iter(¶ms)).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
stmt.execute(¶ms).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||||
.map_err(|e| e.into())
|
.map_err(|e| e.into())
|
||||||
.map(|_c| ())
|
.map(|_c| ())
|
||||||
}).collect::<Result<Vec<()>>>();
|
}).collect::<Result<Vec<()>>>();
|
||||||
|
@ -1181,7 +1178,7 @@ pub fn update_metadata(
|
||||||
new_schema: &Schema,
|
new_schema: &Schema,
|
||||||
metadata_report: &metadata::MetadataReport,
|
metadata_report: &metadata::MetadataReport,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use crate::metadata::AttributeAlteration::*;
|
use metadata::AttributeAlteration::*;
|
||||||
|
|
||||||
// Populate the materialized view directly from datoms (and, potentially in the future,
|
// Populate the materialized view directly from datoms (and, potentially in the future,
|
||||||
// transactions). This might generalize nicely as we expand the set of materialized views.
|
// transactions). This might generalize nicely as we expand the set of materialized views.
|
||||||
|
@ -1189,10 +1186,7 @@ pub fn update_metadata(
|
||||||
// TODO: use concat! to avoid creating String instances.
|
// TODO: use concat! to avoid creating String instances.
|
||||||
if !metadata_report.idents_altered.is_empty() {
|
if !metadata_report.idents_altered.is_empty() {
|
||||||
// Idents is the materialized view of the [entid :db/ident ident] slice of datoms.
|
// Idents is the materialized view of the [entid :db/ident ident] slice of datoms.
|
||||||
conn.execute(
|
conn.execute(format!("DELETE FROM idents").as_str(), rusqlite::params![])?;
|
||||||
"DELETE FROM idents".to_string().as_str(),
|
|
||||||
rusqlite::params![],
|
|
||||||
)?;
|
|
||||||
conn.execute(
|
conn.execute(
|
||||||
format!(
|
format!(
|
||||||
"INSERT INTO idents SELECT e, a, v, value_type_tag FROM datoms WHERE a IN {}",
|
"INSERT INTO idents SELECT e, a, v, value_type_tag FROM datoms WHERE a IN {}",
|
||||||
|
@ -1214,10 +1208,7 @@ pub fn update_metadata(
|
||||||
|| !metadata_report.attributes_altered.is_empty()
|
|| !metadata_report.attributes_altered.is_empty()
|
||||||
|| !metadata_report.idents_altered.is_empty()
|
|| !metadata_report.idents_altered.is_empty()
|
||||||
{
|
{
|
||||||
conn.execute(
|
conn.execute(format!("DELETE FROM schema").as_str(), rusqlite::params![])?;
|
||||||
"DELETE FROM schema".to_string().as_str(),
|
|
||||||
rusqlite::params![],
|
|
||||||
)?;
|
|
||||||
// NB: we're using :db/valueType as a placeholder for the entire schema-defining set.
|
// NB: we're using :db/valueType as a placeholder for the entire schema-defining set.
|
||||||
let s = format!(
|
let s = format!(
|
||||||
r#"
|
r#"
|
||||||
|
@ -1338,12 +1329,12 @@ mod tests {
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::debug::{tempids, TestConn};
|
|
||||||
use crate::internal_types::Term;
|
|
||||||
use core_traits::{attribute, KnownEntid};
|
use core_traits::{attribute, KnownEntid};
|
||||||
use db_traits::errors;
|
use db_traits::errors;
|
||||||
|
use debug::{tempids, TestConn};
|
||||||
use edn::entities::OpType;
|
use edn::entities::OpType;
|
||||||
use edn::{self, InternSet};
|
use edn::{self, InternSet};
|
||||||
|
use internal_types::Term;
|
||||||
use mentat_core::util::Either::*;
|
use mentat_core::util::Either::*;
|
||||||
use mentat_core::{HasSchema, Keyword};
|
use mentat_core::{HasSchema, Keyword};
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
|
@ -66,23 +66,23 @@ use rusqlite::types::ToSql;
|
||||||
use rusqlite::TransactionBehavior;
|
use rusqlite::TransactionBehavior;
|
||||||
use tabwriter::TabWriter;
|
use tabwriter::TabWriter;
|
||||||
|
|
||||||
use crate::bootstrap;
|
use bootstrap;
|
||||||
use crate::db::*;
|
use db::*;
|
||||||
use crate::db::{read_attribute_map, read_ident_map};
|
use db::{read_attribute_map, read_ident_map};
|
||||||
use crate::entids;
|
|
||||||
use db_traits::errors::Result;
|
use db_traits::errors::Result;
|
||||||
use edn;
|
use edn;
|
||||||
|
use entids;
|
||||||
|
|
||||||
use core_traits::{Entid, TypedValue, ValueType};
|
use core_traits::{Entid, TypedValue, ValueType};
|
||||||
|
|
||||||
use crate::internal_types::TermWithTempIds;
|
|
||||||
use crate::schema::SchemaBuilding;
|
|
||||||
use crate::tx::{transact, transact_terms};
|
|
||||||
use crate::types::*;
|
|
||||||
use crate::watcher::NullWatcher;
|
|
||||||
use edn::entities::{EntidOrIdent, TempId};
|
use edn::entities::{EntidOrIdent, TempId};
|
||||||
use edn::InternSet;
|
use edn::InternSet;
|
||||||
|
use internal_types::TermWithTempIds;
|
||||||
use mentat_core::{HasSchema, SQLValueType, TxReport};
|
use mentat_core::{HasSchema, SQLValueType, TxReport};
|
||||||
|
use schema::SchemaBuilding;
|
||||||
|
use tx::{transact, transact_terms};
|
||||||
|
use types::*;
|
||||||
|
use watcher::NullWatcher;
|
||||||
|
|
||||||
/// Represents a *datom* (assertion) in the store.
|
/// Represents a *datom* (assertion) in the store.
|
||||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
|
@ -117,7 +117,7 @@ impl Datom {
|
||||||
pub fn to_edn(&self) -> edn::Value {
|
pub fn to_edn(&self) -> edn::Value {
|
||||||
let f = |entid: &EntidOrIdent| -> edn::Value {
|
let f = |entid: &EntidOrIdent| -> edn::Value {
|
||||||
match *entid {
|
match *entid {
|
||||||
EntidOrIdent::Entid(ref y) => edn::Value::Integer(*y),
|
EntidOrIdent::Entid(ref y) => edn::Value::Integer(y.clone()),
|
||||||
EntidOrIdent::Ident(ref y) => edn::Value::Keyword(y.clone()),
|
EntidOrIdent::Ident(ref y) => edn::Value::Keyword(y.clone()),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -134,13 +134,13 @@ impl Datom {
|
||||||
|
|
||||||
impl Datoms {
|
impl Datoms {
|
||||||
pub fn to_edn(&self) -> edn::Value {
|
pub fn to_edn(&self) -> edn::Value {
|
||||||
edn::Value::Vector((&self.0).iter().map(|x| x.to_edn()).collect())
|
edn::Value::Vector((&self.0).into_iter().map(|x| x.to_edn()).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Transactions {
|
impl Transactions {
|
||||||
pub fn to_edn(&self) -> edn::Value {
|
pub fn to_edn(&self) -> edn::Value {
|
||||||
edn::Value::Vector((&self.0).iter().map(|x| x.to_edn()).collect())
|
edn::Value::Vector((&self.0).into_iter().map(|x| x.to_edn()).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ impl FulltextValues {
|
||||||
pub fn to_edn(&self) -> edn::Value {
|
pub fn to_edn(&self) -> edn::Value {
|
||||||
edn::Value::Vector(
|
edn::Value::Vector(
|
||||||
(&self.0)
|
(&self.0)
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|&(x, ref y)| {
|
.map(|&(x, ref y)| {
|
||||||
edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])
|
edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])
|
||||||
})
|
})
|
||||||
|
@ -238,7 +238,7 @@ pub fn datoms_after<S: Borrow<Schema>>(
|
||||||
e: EntidOrIdent::Entid(e),
|
e: EntidOrIdent::Entid(e),
|
||||||
a: to_entid(borrowed_schema, a),
|
a: to_entid(borrowed_schema, a),
|
||||||
v: value,
|
v: value,
|
||||||
tx,
|
tx: tx,
|
||||||
added: None,
|
added: None,
|
||||||
}))
|
}))
|
||||||
})?
|
})?
|
||||||
|
@ -286,7 +286,7 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
||||||
e: EntidOrIdent::Entid(e),
|
e: EntidOrIdent::Entid(e),
|
||||||
a: to_entid(borrowed_schema, a),
|
a: to_entid(borrowed_schema, a),
|
||||||
v: value,
|
v: value,
|
||||||
tx,
|
tx: tx,
|
||||||
added: Some(added),
|
added: Some(added),
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
|
@ -306,9 +306,10 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
||||||
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
|
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
|
||||||
let mut stmt: rusqlite::Statement =
|
let mut stmt: rusqlite::Statement =
|
||||||
conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
|
conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
|
||||||
|
let params: &[i32; 0] = &[];
|
||||||
|
|
||||||
let r: Result<Vec<_>> = stmt
|
let r: Result<Vec<_>> = stmt
|
||||||
.query_and_then([], |row| {
|
.query_and_then(params, |row| {
|
||||||
let rowid: i64 = row.get(0)?;
|
let rowid: i64 = row.get(0)?;
|
||||||
let text: String = row.get(1)?;
|
let text: String = row.get(1)?;
|
||||||
Ok((rowid, text))
|
Ok((rowid, text))
|
||||||
|
@ -331,20 +332,20 @@ pub fn dump_sql_query(
|
||||||
let mut stmt: rusqlite::Statement = conn.prepare(sql)?;
|
let mut stmt: rusqlite::Statement = conn.prepare(sql)?;
|
||||||
|
|
||||||
let mut tw = TabWriter::new(Vec::new()).padding(2);
|
let mut tw = TabWriter::new(Vec::new()).padding(2);
|
||||||
writeln!(&mut tw, "{}", sql).unwrap();
|
write!(&mut tw, "{}\n", sql).unwrap();
|
||||||
|
|
||||||
for column_name in stmt.column_names() {
|
for column_name in stmt.column_names() {
|
||||||
write!(&mut tw, "{}\t", column_name).unwrap();
|
write!(&mut tw, "{}\t", column_name).unwrap();
|
||||||
}
|
}
|
||||||
writeln!(&mut tw).unwrap();
|
write!(&mut tw, "\n").unwrap();
|
||||||
|
|
||||||
let r: Result<Vec<_>> = stmt
|
let r: Result<Vec<_>> = stmt
|
||||||
.query_and_then(params, |row| {
|
.query_and_then(params, |row| {
|
||||||
for i in 0..row.as_ref().column_count() {
|
for i in 0..row.column_count() {
|
||||||
let value: rusqlite::types::Value = row.get(i)?;
|
let value: rusqlite::types::Value = row.get(i)?;
|
||||||
write!(&mut tw, "{:?}\t", value).unwrap();
|
write!(&mut tw, "{:?}\t", value).unwrap();
|
||||||
}
|
}
|
||||||
writeln!(&mut tw).unwrap();
|
write!(&mut tw, "\n").unwrap();
|
||||||
Ok(())
|
Ok(())
|
||||||
})?
|
})?
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -380,9 +381,8 @@ impl TestConn {
|
||||||
I: Borrow<str>,
|
I: Borrow<str>,
|
||||||
{
|
{
|
||||||
// Failure to parse the transaction is a coding error, so we unwrap.
|
// Failure to parse the transaction is a coding error, so we unwrap.
|
||||||
let entities = edn::parse::entities(transaction.borrow()).unwrap_or_else(|_| {
|
let entities = edn::parse::entities(transaction.borrow())
|
||||||
panic!("to be able to parse {} into entities", transaction.borrow())
|
.expect(format!("to be able to parse {} into entities", transaction.borrow()).as_str());
|
||||||
});
|
|
||||||
|
|
||||||
let details = {
|
let details = {
|
||||||
// The block scopes the borrow of self.sqlite.
|
// The block scopes the borrow of self.sqlite.
|
||||||
|
|
|
@ -63,8 +63,7 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
||||||
if attribute >= DB_DOC {
|
if attribute >= DB_DOC {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
matches!(
|
match attribute {
|
||||||
attribute,
|
|
||||||
// Idents.
|
// Idents.
|
||||||
DB_IDENT |
|
DB_IDENT |
|
||||||
// Schema.
|
// Schema.
|
||||||
|
@ -73,22 +72,19 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
||||||
DB_INDEX |
|
DB_INDEX |
|
||||||
DB_IS_COMPONENT |
|
DB_IS_COMPONENT |
|
||||||
DB_UNIQUE |
|
DB_UNIQUE |
|
||||||
DB_VALUE_TYPE
|
DB_VALUE_TYPE =>
|
||||||
)
|
true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return 'false' if the given attribute might be used to describe a schema attribute.
|
/// Return 'false' if the given attribute might be used to describe a schema attribute.
|
||||||
pub fn is_a_schema_attribute(attribute: Entid) -> bool {
|
pub fn is_a_schema_attribute(attribute: Entid) -> bool {
|
||||||
matches!(
|
match attribute {
|
||||||
attribute,
|
DB_IDENT | DB_CARDINALITY | DB_FULLTEXT | DB_INDEX | DB_IS_COMPONENT | DB_UNIQUE
|
||||||
DB_IDENT
|
| DB_VALUE_TYPE => true,
|
||||||
| DB_CARDINALITY
|
_ => false,
|
||||||
| DB_FULLTEXT
|
}
|
||||||
| DB_INDEX
|
|
||||||
| DB_IS_COMPONENT
|
|
||||||
| DB_UNIQUE
|
|
||||||
| DB_VALUE_TYPE
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
|
|
@ -23,10 +23,10 @@ use edn::entities;
|
||||||
use edn::entities::{EntityPlace, OpType, TempId, TxFunction};
|
use edn::entities::{EntityPlace, OpType, TempId, TxFunction};
|
||||||
use edn::{SpannedValue, ValueAndSpan, ValueRc};
|
use edn::{SpannedValue, ValueAndSpan, ValueRc};
|
||||||
|
|
||||||
use crate::schema::SchemaTypeChecking;
|
|
||||||
use crate::types::{AVMap, AVPair, Schema, TransactableValue};
|
|
||||||
use db_traits::errors;
|
use db_traits::errors;
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
|
use schema::SchemaTypeChecking;
|
||||||
|
use types::{AVMap, AVPair, Schema, TransactableValue};
|
||||||
|
|
||||||
impl TransactableValue for ValueAndSpan {
|
impl TransactableValue for ValueAndSpan {
|
||||||
fn into_typed_value(self, schema: &Schema, value_type: ValueType) -> Result<TypedValue> {
|
fn into_typed_value(self, schema: &Schema, value_type: ValueType) -> Result<TypedValue> {
|
||||||
|
@ -75,14 +75,18 @@ impl TransactableValue for ValueAndSpan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Nil | Boolean(_) | Instant(_) | BigInteger(_) | Float(_) | Uuid(_) | PlainSymbol(_)
|
Nil | Boolean(_) | Instant(_) | BigInteger(_) | Float(_) | Uuid(_) | PlainSymbol(_)
|
||||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) | Bytes(_) => {
|
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) => {
|
||||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_tempid(&self) -> Option<TempId> {
|
fn as_tempid(&self) -> Option<TempId> {
|
||||||
self.inner.as_text().cloned().map(TempId::External)
|
self.inner
|
||||||
|
.as_text()
|
||||||
|
.cloned()
|
||||||
|
.map(TempId::External)
|
||||||
|
.map(|v| v.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,8 +109,7 @@ impl TransactableValue for TypedValue {
|
||||||
| TypedValue::Long(_)
|
| TypedValue::Long(_)
|
||||||
| TypedValue::Double(_)
|
| TypedValue::Double(_)
|
||||||
| TypedValue::Instant(_)
|
| TypedValue::Instant(_)
|
||||||
| TypedValue::Uuid(_)
|
| TypedValue::Uuid(_) => {
|
||||||
| TypedValue::Bytes(_) => {
|
|
||||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +117,7 @@ impl TransactableValue for TypedValue {
|
||||||
|
|
||||||
fn as_tempid(&self) -> Option<TempId> {
|
fn as_tempid(&self) -> Option<TempId> {
|
||||||
match self {
|
match self {
|
||||||
TypedValue::String(ref s) => Some(TempId::External((**s).clone())),
|
&TypedValue::String(ref s) => Some(TempId::External((**s).clone()).into()),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,30 +60,30 @@ mod upsert_resolution;
|
||||||
mod watcher;
|
mod watcher;
|
||||||
|
|
||||||
// Export these for reference from sync code and tests.
|
// Export these for reference from sync code and tests.
|
||||||
pub use crate::bootstrap::{TX0, USER0, V1_PARTS};
|
pub use bootstrap::{TX0, USER0, V1_PARTS};
|
||||||
|
|
||||||
pub static TIMELINE_MAIN: i64 = 0;
|
pub static TIMELINE_MAIN: i64 = 0;
|
||||||
|
|
||||||
pub use crate::schema::{AttributeBuilder, AttributeValidation};
|
pub use schema::{AttributeBuilder, AttributeValidation};
|
||||||
|
|
||||||
pub use crate::bootstrap::CORE_SCHEMA_VERSION;
|
pub use bootstrap::CORE_SCHEMA_VERSION;
|
||||||
|
|
||||||
use edn::symbols;
|
use edn::symbols;
|
||||||
|
|
||||||
pub use crate::entids::DB_SCHEMA_CORE;
|
pub use entids::DB_SCHEMA_CORE;
|
||||||
|
|
||||||
pub use crate::db::{new_connection, TypedSQLValue};
|
pub use db::{new_connection, TypedSQLValue};
|
||||||
|
|
||||||
#[cfg(feature = "sqlcipher")]
|
#[cfg(feature = "sqlcipher")]
|
||||||
pub use db::{change_encryption_key, new_connection_with_key};
|
pub use db::{change_encryption_key, new_connection_with_key};
|
||||||
|
|
||||||
pub use crate::watcher::TransactWatcher;
|
pub use watcher::TransactWatcher;
|
||||||
|
|
||||||
pub use crate::tx::{transact, transact_terms};
|
pub use tx::{transact, transact_terms};
|
||||||
|
|
||||||
pub use crate::tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
pub use tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
||||||
|
|
||||||
pub use crate::types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
pub use types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
||||||
|
|
||||||
pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
||||||
let splits = [':', '/'];
|
let splits = [':', '/'];
|
||||||
|
@ -95,7 +95,7 @@ pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
nsk.ok_or_else(|| DbErrorKind::NotYetImplemented(format!("InvalidKeyword: {}", s)).into())
|
nsk.ok_or(DbErrorKind::NotYetImplemented(format!("InvalidKeyword: {}", s)).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prepare an SQL `VALUES` block, like (?, ?, ?), (?, ?, ?).
|
/// Prepare an SQL `VALUES` block, like (?, ?, ?), (?, ?, ?).
|
||||||
|
|
|
@ -29,18 +29,18 @@ use failure::ResultExt;
|
||||||
use std::collections::btree_map::Entry;
|
use std::collections::btree_map::Entry;
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
|
||||||
use crate::add_retract_alter_set::AddRetractAlterSet;
|
use add_retract_alter_set::AddRetractAlterSet;
|
||||||
use crate::entids;
|
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
use edn::symbols;
|
use edn::symbols;
|
||||||
|
use entids;
|
||||||
|
|
||||||
use core_traits::{attribute, Entid, TypedValue, ValueType};
|
use core_traits::{attribute, Entid, TypedValue, ValueType};
|
||||||
|
|
||||||
use mentat_core::{AttributeMap, Schema};
|
use mentat_core::{AttributeMap, Schema};
|
||||||
|
|
||||||
use crate::schema::{AttributeBuilder, AttributeValidation};
|
use schema::{AttributeBuilder, AttributeValidation};
|
||||||
|
|
||||||
use crate::types::EAV;
|
use types::EAV;
|
||||||
|
|
||||||
/// An alteration to an attribute.
|
/// An alteration to an attribute.
|
||||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
|
@ -111,7 +111,7 @@ fn update_attribute_map_from_schema_retractions(
|
||||||
let mut eas = BTreeMap::new();
|
let mut eas = BTreeMap::new();
|
||||||
for (e, a, v) in retractions.into_iter() {
|
for (e, a, v) in retractions.into_iter() {
|
||||||
if entids::is_a_schema_attribute(a) {
|
if entids::is_a_schema_attribute(a) {
|
||||||
eas.entry(e).or_insert_with(Vec::new).push(a);
|
eas.entry(e).or_insert(vec![]).push(a);
|
||||||
suspect_retractions.push((e, a, v));
|
suspect_retractions.push((e, a, v));
|
||||||
} else {
|
} else {
|
||||||
filtered_retractions.push((e, a, v));
|
filtered_retractions.push((e, a, v));
|
||||||
|
@ -145,7 +145,7 @@ fn update_attribute_map_from_schema_retractions(
|
||||||
// Remove attributes corresponding to retracted attribute.
|
// Remove attributes corresponding to retracted attribute.
|
||||||
attribute_map.remove(&e);
|
attribute_map.remove(&e);
|
||||||
} else {
|
} else {
|
||||||
bail!(DbErrorKind::BadSchemaAssertion("Retracting defining attributes of a schema without retracting its :db/ident is not permitted.".to_string()));
|
bail!(DbErrorKind::BadSchemaAssertion(format!("Retracting defining attributes of a schema without retracting its :db/ident is not permitted.")));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
filtered_retractions.push((e, a, v));
|
filtered_retractions.push((e, a, v));
|
||||||
|
@ -172,7 +172,7 @@ pub fn update_attribute_map_from_entid_triples(
|
||||||
) -> AttributeBuilder {
|
) -> AttributeBuilder {
|
||||||
existing
|
existing
|
||||||
.get(&attribute_id)
|
.get(&attribute_id)
|
||||||
.map(AttributeBuilder::modify_attribute)
|
.map(AttributeBuilder::to_modify_attribute)
|
||||||
.unwrap_or_else(AttributeBuilder::default)
|
.unwrap_or_else(AttributeBuilder::default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,7 +248,6 @@ pub fn update_attribute_map_from_entid_triples(
|
||||||
TypedValue::Ref(entids::DB_TYPE_REF) => { builder.value_type(ValueType::Ref); },
|
TypedValue::Ref(entids::DB_TYPE_REF) => { builder.value_type(ValueType::Ref); },
|
||||||
TypedValue::Ref(entids::DB_TYPE_STRING) => { builder.value_type(ValueType::String); },
|
TypedValue::Ref(entids::DB_TYPE_STRING) => { builder.value_type(ValueType::String); },
|
||||||
TypedValue::Ref(entids::DB_TYPE_UUID) => { builder.value_type(ValueType::Uuid); },
|
TypedValue::Ref(entids::DB_TYPE_UUID) => { builder.value_type(ValueType::Uuid); },
|
||||||
TypedValue::Ref(entids::DB_TYPE_BYTES) => { builder.value_type(ValueType::Bytes); },
|
|
||||||
_ => bail!(DbErrorKind::BadSchemaAssertion(format!("Expected [... :db/valueType :db.type/*] but got [... :db/valueType {:?}] for entid {} and attribute {}", value, entid, attr)))
|
_ => bail!(DbErrorKind::BadSchemaAssertion(format!("Expected [... :db/valueType :db.type/*] but got [... :db/valueType {:?}] for entid {} and attribute {}", value, entid, attr)))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -338,8 +337,8 @@ pub fn update_attribute_map_from_entid_triples(
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(MetadataReport {
|
Ok(MetadataReport {
|
||||||
attributes_installed,
|
attributes_installed: attributes_installed,
|
||||||
attributes_altered,
|
attributes_altered: attributes_altered,
|
||||||
idents_altered: BTreeMap::default(),
|
idents_altered: BTreeMap::default(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -440,12 +439,12 @@ where
|
||||||
// component_attributes up-to-date: most of the time we'll rebuild it
|
// component_attributes up-to-date: most of the time we'll rebuild it
|
||||||
// even though it's not necessary (e.g. a schema attribute that's _not_
|
// even though it's not necessary (e.g. a schema attribute that's _not_
|
||||||
// a component was removed, or a non-component related attribute changed).
|
// a component was removed, or a non-component related attribute changed).
|
||||||
if report.attributes_did_change() || !ident_set.retracted.is_empty() {
|
if report.attributes_did_change() || ident_set.retracted.len() > 0 {
|
||||||
schema.update_component_attributes();
|
schema.update_component_attributes();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(MetadataReport {
|
Ok(MetadataReport {
|
||||||
idents_altered,
|
idents_altered: idents_altered,
|
||||||
..report
|
..report
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,16 +10,16 @@
|
||||||
|
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use crate::db::TypedSQLValue;
|
use db::TypedSQLValue;
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
use edn;
|
use edn;
|
||||||
use edn::symbols;
|
use edn::symbols;
|
||||||
|
|
||||||
use core_traits::{attribute, Attribute, Entid, KnownEntid, TypedValue, ValueType};
|
use core_traits::{attribute, Attribute, Entid, KnownEntid, TypedValue, ValueType};
|
||||||
|
|
||||||
use crate::metadata;
|
|
||||||
use crate::metadata::AttributeAlteration;
|
|
||||||
use mentat_core::{AttributeMap, EntidMap, HasSchema, IdentMap, Schema};
|
use mentat_core::{AttributeMap, EntidMap, HasSchema, IdentMap, Schema};
|
||||||
|
use metadata;
|
||||||
|
use metadata::AttributeAlteration;
|
||||||
|
|
||||||
pub trait AttributeValidation {
|
pub trait AttributeValidation {
|
||||||
fn validate<F>(&self, ident: F) -> Result<()>
|
fn validate<F>(&self, ident: F) -> Result<()>
|
||||||
|
@ -77,7 +77,7 @@ fn validate_attribute_map(entid_map: &EntidMap, attribute_map: &AttributeMap) ->
|
||||||
entid_map
|
entid_map
|
||||||
.get(entid)
|
.get(entid)
|
||||||
.map(|ident| ident.to_string())
|
.map(|ident| ident.to_string())
|
||||||
.unwrap_or_else(|| entid.to_string())
|
.unwrap_or(entid.to_string())
|
||||||
};
|
};
|
||||||
attribute.validate(ident)?;
|
attribute.validate(ident)?;
|
||||||
}
|
}
|
||||||
|
@ -108,7 +108,7 @@ impl AttributeBuilder {
|
||||||
|
|
||||||
/// Make a new AttributeBuilder from an existing Attribute. This is important to allow
|
/// Make a new AttributeBuilder from an existing Attribute. This is important to allow
|
||||||
/// retraction. Only attributes that we allow to change are duplicated here.
|
/// retraction. Only attributes that we allow to change are duplicated here.
|
||||||
pub fn modify_attribute(attribute: &Attribute) -> Self {
|
pub fn to_modify_attribute(attribute: &Attribute) -> Self {
|
||||||
let mut ab = AttributeBuilder::default();
|
let mut ab = AttributeBuilder::default();
|
||||||
ab.multival = Some(attribute.multival);
|
ab.multival = Some(attribute.multival);
|
||||||
ab.unique = Some(attribute.unique);
|
ab.unique = Some(attribute.unique);
|
||||||
|
@ -116,22 +116,22 @@ impl AttributeBuilder {
|
||||||
ab
|
ab
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn value_type(&mut self, value_type: ValueType) -> &mut Self {
|
pub fn value_type<'a>(&'a mut self, value_type: ValueType) -> &'a mut Self {
|
||||||
self.value_type = Some(value_type);
|
self.value_type = Some(value_type);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn multival(&mut self, multival: bool) -> &mut Self {
|
pub fn multival<'a>(&'a mut self, multival: bool) -> &'a mut Self {
|
||||||
self.multival = Some(multival);
|
self.multival = Some(multival);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn non_unique(&mut self) -> &mut Self {
|
pub fn non_unique<'a>(&'a mut self) -> &'a mut Self {
|
||||||
self.unique = Some(None);
|
self.unique = Some(None);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unique(&mut self, unique: attribute::Unique) -> &mut Self {
|
pub fn unique<'a>(&'a mut self, unique: attribute::Unique) -> &'a mut Self {
|
||||||
if self.helpful && unique == attribute::Unique::Identity {
|
if self.helpful && unique == attribute::Unique::Identity {
|
||||||
self.index = Some(true);
|
self.index = Some(true);
|
||||||
}
|
}
|
||||||
|
@ -139,12 +139,12 @@ impl AttributeBuilder {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index(&mut self, index: bool) -> &mut Self {
|
pub fn index<'a>(&'a mut self, index: bool) -> &'a mut Self {
|
||||||
self.index = Some(index);
|
self.index = Some(index);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fulltext(&mut self, fulltext: bool) -> &mut Self {
|
pub fn fulltext<'a>(&'a mut self, fulltext: bool) -> &'a mut Self {
|
||||||
self.fulltext = Some(fulltext);
|
self.fulltext = Some(fulltext);
|
||||||
if self.helpful && fulltext {
|
if self.helpful && fulltext {
|
||||||
self.index = Some(true);
|
self.index = Some(true);
|
||||||
|
@ -152,12 +152,12 @@ impl AttributeBuilder {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn component(&mut self, component: bool) -> &mut Self {
|
pub fn component<'a>(&'a mut self, component: bool) -> &'a mut Self {
|
||||||
self.component = Some(component);
|
self.component = Some(component);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn no_history(&mut self, no_history: bool) -> &mut Self {
|
pub fn no_history<'a>(&'a mut self, no_history: bool) -> &'a mut Self {
|
||||||
self.no_history = Some(no_history);
|
self.no_history = Some(no_history);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -197,7 +197,7 @@ impl AttributeBuilder {
|
||||||
attribute.multival = multival;
|
attribute.multival = multival;
|
||||||
}
|
}
|
||||||
if let Some(ref unique) = self.unique {
|
if let Some(ref unique) = self.unique {
|
||||||
attribute.unique = *unique;
|
attribute.unique = unique.clone();
|
||||||
}
|
}
|
||||||
if let Some(index) = self.index {
|
if let Some(index) = self.index {
|
||||||
attribute.index = index;
|
attribute.index = index;
|
||||||
|
@ -223,12 +223,14 @@ impl AttributeBuilder {
|
||||||
|
|
||||||
if let Some(ref unique) = self.unique {
|
if let Some(ref unique) = self.unique {
|
||||||
if *unique != attribute.unique {
|
if *unique != attribute.unique {
|
||||||
attribute.unique = *unique;
|
attribute.unique = unique.clone();
|
||||||
|
mutations.push(AttributeAlteration::Unique);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if attribute.unique != None {
|
||||||
|
attribute.unique = None;
|
||||||
mutations.push(AttributeAlteration::Unique);
|
mutations.push(AttributeAlteration::Unique);
|
||||||
}
|
}
|
||||||
} else if attribute.unique != None {
|
|
||||||
attribute.unique = None;
|
|
||||||
mutations.push(AttributeAlteration::Unique);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(index) = self.index {
|
if let Some(index) = self.index {
|
||||||
|
@ -270,17 +272,17 @@ pub trait SchemaBuilding {
|
||||||
impl SchemaBuilding for Schema {
|
impl SchemaBuilding for Schema {
|
||||||
fn require_ident(&self, entid: Entid) -> Result<&symbols::Keyword> {
|
fn require_ident(&self, entid: Entid) -> Result<&symbols::Keyword> {
|
||||||
self.get_ident(entid)
|
self.get_ident(entid)
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedEntid(entid).into())
|
.ok_or(DbErrorKind::UnrecognizedEntid(entid).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn require_entid(&self, ident: &symbols::Keyword) -> Result<KnownEntid> {
|
fn require_entid(&self, ident: &symbols::Keyword) -> Result<KnownEntid> {
|
||||||
self.get_entid(&ident)
|
self.get_entid(&ident)
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(ident.to_string()).into())
|
.ok_or(DbErrorKind::UnrecognizedIdent(ident.to_string()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn require_attribute_for_entid(&self, entid: Entid) -> Result<&Attribute> {
|
fn require_attribute_for_entid(&self, entid: Entid) -> Result<&Attribute> {
|
||||||
self.attribute_for_entid(entid)
|
self.attribute_for_entid(entid)
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedEntid(entid).into())
|
.ok_or(DbErrorKind::UnrecognizedEntid(entid).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a valid `Schema` from the constituent maps.
|
/// Create a valid `Schema` from the constituent maps.
|
||||||
|
@ -288,7 +290,10 @@ impl SchemaBuilding for Schema {
|
||||||
ident_map: IdentMap,
|
ident_map: IdentMap,
|
||||||
attribute_map: AttributeMap,
|
attribute_map: AttributeMap,
|
||||||
) -> Result<Schema> {
|
) -> Result<Schema> {
|
||||||
let entid_map: EntidMap = ident_map.iter().map(|(k, v)| (*v, k.clone())).collect();
|
let entid_map: EntidMap = ident_map
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| (v.clone(), k.clone()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
validate_attribute_map(&entid_map, &attribute_map)?;
|
validate_attribute_map(&entid_map, &attribute_map)?;
|
||||||
Ok(Schema::new(ident_map, entid_map, attribute_map))
|
Ok(Schema::new(ident_map, entid_map, attribute_map))
|
||||||
|
@ -304,10 +309,10 @@ impl SchemaBuilding for Schema {
|
||||||
.map(|(symbolic_ident, symbolic_attr, value)| {
|
.map(|(symbolic_ident, symbolic_attr, value)| {
|
||||||
let ident: i64 = *ident_map
|
let ident: i64 = *ident_map
|
||||||
.get(&symbolic_ident)
|
.get(&symbolic_ident)
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(symbolic_ident.to_string()))?;
|
.ok_or(DbErrorKind::UnrecognizedIdent(symbolic_ident.to_string()))?;
|
||||||
let attr: i64 = *ident_map
|
let attr: i64 = *ident_map
|
||||||
.get(&symbolic_attr)
|
.get(&symbolic_attr)
|
||||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(symbolic_attr.to_string()))?;
|
.ok_or(DbErrorKind::UnrecognizedIdent(symbolic_attr.to_string()))?;
|
||||||
Ok((ident, attr, value))
|
Ok((ident, attr, value))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -362,7 +367,6 @@ impl SchemaTypeChecking for Schema {
|
||||||
(ValueType::Uuid, tv @ TypedValue::Uuid(_)) => Ok(tv),
|
(ValueType::Uuid, tv @ TypedValue::Uuid(_)) => Ok(tv),
|
||||||
(ValueType::Instant, tv @ TypedValue::Instant(_)) => Ok(tv),
|
(ValueType::Instant, tv @ TypedValue::Instant(_)) => Ok(tv),
|
||||||
(ValueType::Keyword, tv @ TypedValue::Keyword(_)) => Ok(tv),
|
(ValueType::Keyword, tv @ TypedValue::Keyword(_)) => Ok(tv),
|
||||||
(ValueType::Bytes, tv @ TypedValue::Bytes(_)) => Ok(tv),
|
|
||||||
// Ref coerces a little: we interpret some things depending on the schema as a Ref.
|
// Ref coerces a little: we interpret some things depending on the schema as a Ref.
|
||||||
(ValueType::Ref, TypedValue::Long(x)) => Ok(TypedValue::Ref(x)),
|
(ValueType::Ref, TypedValue::Long(x)) => Ok(TypedValue::Ref(x)),
|
||||||
(ValueType::Ref, TypedValue::Keyword(ref x)) => {
|
(ValueType::Ref, TypedValue::Keyword(ref x)) => {
|
||||||
|
@ -380,7 +384,6 @@ impl SchemaTypeChecking for Schema {
|
||||||
| (vt @ ValueType::Uuid, _)
|
| (vt @ ValueType::Uuid, _)
|
||||||
| (vt @ ValueType::Instant, _)
|
| (vt @ ValueType::Instant, _)
|
||||||
| (vt @ ValueType::Keyword, _)
|
| (vt @ ValueType::Keyword, _)
|
||||||
| (vt @ ValueType::Bytes, _)
|
|
||||||
| (vt @ ValueType::Ref, _) => {
|
| (vt @ ValueType::Ref, _) => {
|
||||||
bail!(DbErrorKind::BadValuePair(format!("{}", value), vt))
|
bail!(DbErrorKind::BadValuePair(format!("{}", value), vt))
|
||||||
}
|
}
|
||||||
|
@ -396,7 +399,7 @@ mod test {
|
||||||
|
|
||||||
fn add_attribute(schema: &mut Schema, ident: Keyword, entid: Entid, attribute: Attribute) {
|
fn add_attribute(schema: &mut Schema, ident: Keyword, entid: Entid, attribute: Attribute) {
|
||||||
schema.entid_map.insert(entid, ident.clone());
|
schema.entid_map.insert(entid, ident.clone());
|
||||||
schema.ident_map.insert(ident, entid);
|
schema.ident_map.insert(ident.clone(), entid);
|
||||||
|
|
||||||
if attribute.component {
|
if attribute.component {
|
||||||
schema.component_attributes.push(entid);
|
schema.component_attributes.push(entid);
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use std::ops::RangeFrom;
|
use std::ops::RangeFrom;
|
||||||
|
|
||||||
use rusqlite::{self, params_from_iter};
|
use rusqlite;
|
||||||
|
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
|
|
||||||
|
@ -22,16 +22,16 @@ use edn::InternSet;
|
||||||
|
|
||||||
use edn::entities::OpType;
|
use edn::entities::OpType;
|
||||||
|
|
||||||
use crate::db;
|
use db;
|
||||||
use crate::db::TypedSQLValue;
|
use db::TypedSQLValue;
|
||||||
|
|
||||||
use crate::tx::{transact_terms_with_action, TransactorAction};
|
use tx::{transact_terms_with_action, TransactorAction};
|
||||||
|
|
||||||
use crate::types::PartitionMap;
|
use types::PartitionMap;
|
||||||
|
|
||||||
use crate::internal_types::{Term, TermWithoutTempIds};
|
use internal_types::{Term, TermWithoutTempIds};
|
||||||
|
|
||||||
use crate::watcher::NullWatcher;
|
use watcher::NullWatcher;
|
||||||
|
|
||||||
/// Collects a supplied tx range into an DESC ordered Vec of valid txs,
|
/// Collects a supplied tx range into an DESC ordered Vec of valid txs,
|
||||||
/// ensuring they all belong to the same timeline.
|
/// ensuring they all belong to the same timeline.
|
||||||
|
@ -58,7 +58,7 @@ fn collect_ordered_txs_to_move(
|
||||||
None => bail!(DbErrorKind::TimelinesInvalidRange),
|
None => bail!(DbErrorKind::TimelinesInvalidRange),
|
||||||
};
|
};
|
||||||
|
|
||||||
for t in rows {
|
while let Some(t) = rows.next() {
|
||||||
let t = t?;
|
let t = t?;
|
||||||
txs.push(t.0);
|
txs.push(t.0);
|
||||||
if t.1 != timeline {
|
if t.1 != timeline {
|
||||||
|
@ -79,9 +79,12 @@ fn move_transactions_to(
|
||||||
&format!(
|
&format!(
|
||||||
"UPDATE timelined_transactions SET timeline = {} WHERE tx IN {}",
|
"UPDATE timelined_transactions SET timeline = {} WHERE tx IN {}",
|
||||||
new_timeline,
|
new_timeline,
|
||||||
crate::repeat_values(tx_ids.len(), 1)
|
::repeat_values(tx_ids.len(), 1)
|
||||||
),
|
),
|
||||||
params_from_iter(tx_ids.iter()),
|
&(tx_ids
|
||||||
|
.iter()
|
||||||
|
.map(|x| x as &dyn rusqlite::types::ToSql)
|
||||||
|
.collect::<Vec<_>>()),
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -105,13 +108,12 @@ fn reversed_terms_for(
|
||||||
tx_id: Entid,
|
tx_id: Entid,
|
||||||
) -> Result<Vec<TermWithoutTempIds>> {
|
) -> Result<Vec<TermWithoutTempIds>> {
|
||||||
let mut stmt = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM timelined_transactions WHERE tx = ? AND timeline = ? ORDER BY tx DESC")?;
|
let mut stmt = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM timelined_transactions WHERE tx = ? AND timeline = ? ORDER BY tx DESC")?;
|
||||||
let rows = stmt.query_and_then(
|
let mut rows = stmt.query_and_then(
|
||||||
&[&tx_id, &crate::TIMELINE_MAIN],
|
&[&tx_id, &::TIMELINE_MAIN],
|
||||||
|row| -> Result<TermWithoutTempIds> {
|
|row| -> Result<TermWithoutTempIds> {
|
||||||
let op = if row.get(5)? {
|
let op = match row.get(5)? {
|
||||||
OpType::Retract
|
true => OpType::Retract,
|
||||||
} else {
|
false => OpType::Add,
|
||||||
OpType::Add
|
|
||||||
};
|
};
|
||||||
Ok(Term::AddOrRetract(
|
Ok(Term::AddOrRetract(
|
||||||
op,
|
op,
|
||||||
|
@ -124,7 +126,7 @@ fn reversed_terms_for(
|
||||||
|
|
||||||
let mut terms = vec![];
|
let mut terms = vec![];
|
||||||
|
|
||||||
for row in rows {
|
while let Some(row) = rows.next() {
|
||||||
terms.push(row?);
|
terms.push(row?);
|
||||||
}
|
}
|
||||||
Ok(terms)
|
Ok(terms)
|
||||||
|
@ -138,10 +140,10 @@ pub fn move_from_main_timeline(
|
||||||
txs_from: RangeFrom<Entid>,
|
txs_from: RangeFrom<Entid>,
|
||||||
new_timeline: Entid,
|
new_timeline: Entid,
|
||||||
) -> Result<(Option<Schema>, PartitionMap)> {
|
) -> Result<(Option<Schema>, PartitionMap)> {
|
||||||
if new_timeline == crate::TIMELINE_MAIN {
|
if new_timeline == ::TIMELINE_MAIN {
|
||||||
bail!(DbErrorKind::NotYetImplemented(
|
bail!(DbErrorKind::NotYetImplemented(format!(
|
||||||
"Can't move transactions to main timeline".to_string()
|
"Can't move transactions to main timeline"
|
||||||
));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// We don't currently ensure that moving transactions onto a non-empty timeline
|
// We don't currently ensure that moving transactions onto a non-empty timeline
|
||||||
|
@ -151,7 +153,7 @@ pub fn move_from_main_timeline(
|
||||||
bail!(DbErrorKind::TimelinesMoveToNonEmpty);
|
bail!(DbErrorKind::TimelinesMoveToNonEmpty);
|
||||||
}
|
}
|
||||||
|
|
||||||
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, crate::TIMELINE_MAIN)?;
|
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, ::TIMELINE_MAIN)?;
|
||||||
|
|
||||||
let mut last_schema = None;
|
let mut last_schema = None;
|
||||||
for tx_id in &txs_to_move {
|
for tx_id in &txs_to_move {
|
||||||
|
@ -196,16 +198,16 @@ mod tests {
|
||||||
|
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
|
|
||||||
use crate::debug::TestConn;
|
use debug::TestConn;
|
||||||
|
|
||||||
use crate::bootstrap;
|
use bootstrap;
|
||||||
|
|
||||||
// For convenience during testing.
|
// For convenience during testing.
|
||||||
// Real consumers will perform similar operations when appropriate.
|
// Real consumers will perform similar operations when appropriate.
|
||||||
fn update_conn(conn: &mut TestConn, schema: &Option<Schema>, pmap: &PartitionMap) {
|
fn update_conn(conn: &mut TestConn, schema: &Option<Schema>, pmap: &PartitionMap) {
|
||||||
match schema {
|
match schema {
|
||||||
Some(ref s) => conn.schema = s.clone(),
|
&Some(ref s) => conn.schema = s.clone(),
|
||||||
None => (),
|
&None => (),
|
||||||
};
|
};
|
||||||
conn.partition_map = pmap.clone();
|
conn.partition_map = pmap.clone();
|
||||||
}
|
}
|
||||||
|
@ -238,7 +240,7 @@ mod tests {
|
||||||
assert_matches!(conn.transactions(), "[]");
|
assert_matches!(conn.transactions(), "[]");
|
||||||
assert_eq!(new_partition_map, partition_map0);
|
assert_eq!(new_partition_map, partition_map0);
|
||||||
|
|
||||||
conn.partition_map = partition_map0;
|
conn.partition_map = partition_map0.clone();
|
||||||
let report2 = assert_transact!(conn, t);
|
let report2 = assert_transact!(conn, t);
|
||||||
let partition_map2 = conn.partition_map.clone();
|
let partition_map2 = conn.partition_map.clone();
|
||||||
|
|
||||||
|
|
80
db/src/tx.rs
80
db/src/tx.rs
|
@ -49,17 +49,17 @@ use std::borrow::Cow;
|
||||||
use std::collections::{BTreeMap, BTreeSet, VecDeque};
|
use std::collections::{BTreeMap, BTreeSet, VecDeque};
|
||||||
use std::iter::once;
|
use std::iter::once;
|
||||||
|
|
||||||
use crate::db;
|
use db;
|
||||||
use crate::db::MentatStoring;
|
use db::MentatStoring;
|
||||||
use crate::entids;
|
use db_traits::errors;
|
||||||
use crate::internal_types::{
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
|
use edn::{InternSet, Keyword};
|
||||||
|
use entids;
|
||||||
|
use internal_types::{
|
||||||
replace_lookup_ref, AEVTrie, AddAndRetract, KnownEntidOr, LookupRef, LookupRefOrTempId,
|
replace_lookup_ref, AEVTrie, AddAndRetract, KnownEntidOr, LookupRef, LookupRefOrTempId,
|
||||||
TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithTempIdsAndLookupRefs,
|
TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithTempIdsAndLookupRefs,
|
||||||
TermWithoutTempIds, TypedValueOr,
|
TermWithoutTempIds, TypedValueOr,
|
||||||
};
|
};
|
||||||
use db_traits::errors;
|
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
|
||||||
use edn::{InternSet, Keyword};
|
|
||||||
|
|
||||||
use mentat_core::util::Either;
|
use mentat_core::util::Either;
|
||||||
|
|
||||||
|
@ -67,15 +67,15 @@ use core_traits::{attribute, now, Attribute, Entid, KnownEntid, TypedValue, Valu
|
||||||
|
|
||||||
use mentat_core::{DateTime, Schema, TxReport, Utc};
|
use mentat_core::{DateTime, Schema, TxReport, Utc};
|
||||||
|
|
||||||
use crate::metadata;
|
|
||||||
use crate::schema::SchemaBuilding;
|
|
||||||
use crate::tx_checking;
|
|
||||||
use crate::types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
|
||||||
use crate::upsert_resolution::{FinalPopulations, Generation};
|
|
||||||
use crate::watcher::TransactWatcher;
|
|
||||||
use edn::entities as entmod;
|
use edn::entities as entmod;
|
||||||
use edn::entities::{AttributePlace, Entity, OpType, TempId};
|
use edn::entities::{AttributePlace, Entity, OpType, TempId};
|
||||||
|
use metadata;
|
||||||
use rusqlite;
|
use rusqlite;
|
||||||
|
use schema::SchemaBuilding;
|
||||||
|
use tx_checking;
|
||||||
|
use types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
||||||
|
use upsert_resolution::{FinalPopulations, Generation};
|
||||||
|
use watcher::TransactWatcher;
|
||||||
|
|
||||||
/// Defines transactor's high level behaviour.
|
/// Defines transactor's high level behaviour.
|
||||||
pub(crate) enum TransactorAction {
|
pub(crate) enum TransactorAction {
|
||||||
|
@ -163,12 +163,12 @@ where
|
||||||
tx_id: Entid,
|
tx_id: Entid,
|
||||||
) -> Tx<'conn, 'a, W> {
|
) -> Tx<'conn, 'a, W> {
|
||||||
Tx {
|
Tx {
|
||||||
store,
|
store: store,
|
||||||
partition_map,
|
partition_map: partition_map,
|
||||||
schema_for_mutation: Cow::Borrowed(schema_for_mutation),
|
schema_for_mutation: Cow::Borrowed(schema_for_mutation),
|
||||||
schema,
|
schema: schema,
|
||||||
watcher,
|
watcher: watcher,
|
||||||
tx_id,
|
tx_id: tx_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,8 +185,8 @@ where
|
||||||
|
|
||||||
// Map [a v]->entid.
|
// Map [a v]->entid.
|
||||||
let mut av_pairs: Vec<&AVPair> = vec![];
|
let mut av_pairs: Vec<&AVPair> = vec![];
|
||||||
for temp_id_av in temp_id_avs {
|
for i in 0..temp_id_avs.len() {
|
||||||
av_pairs.push(&temp_id_av.1);
|
av_pairs.push(&temp_id_avs[i].1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lookup in the store.
|
// Lookup in the store.
|
||||||
|
@ -208,14 +208,14 @@ where
|
||||||
av_map.get(&av_pair)
|
av_map.get(&av_pair)
|
||||||
);
|
);
|
||||||
if let Some(entid) = av_map.get(&av_pair).cloned().map(KnownEntid) {
|
if let Some(entid) = av_map.get(&av_pair).cloned().map(KnownEntid) {
|
||||||
if let Some(previous) = tempids.insert(tempid.clone(), entid) {
|
tempids.insert(tempid.clone(), entid).map(|previous| {
|
||||||
if entid != previous {
|
if entid != previous {
|
||||||
conflicting_upserts
|
conflicting_upserts
|
||||||
.entry((**tempid).clone())
|
.entry((**tempid).clone())
|
||||||
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
||||||
.insert(entid);
|
.insert(entid);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,7 +340,7 @@ where
|
||||||
entmod::EntityPlace::TxFunction(ref tx_function) => {
|
entmod::EntityPlace::TxFunction(ref tx_function) => {
|
||||||
match tx_function.op.0.as_str() {
|
match tx_function.op.0.as_str() {
|
||||||
"transaction-tx" => Ok(Either::Left(self.tx_id)),
|
"transaction-tx" => Ok(Either::Left(self.tx_id)),
|
||||||
unknown => bail!(DbErrorKind::NotYetImplemented(format!(
|
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||||
"Unknown transaction function {}",
|
"Unknown transaction function {}",
|
||||||
unknown
|
unknown
|
||||||
))),
|
))),
|
||||||
|
@ -372,7 +372,7 @@ where
|
||||||
) -> Result<KnownEntidOr<LookupRefOrTempId>> {
|
) -> Result<KnownEntidOr<LookupRefOrTempId>> {
|
||||||
match backward_a.unreversed() {
|
match backward_a.unreversed() {
|
||||||
None => {
|
None => {
|
||||||
bail!(DbErrorKind::NotYetImplemented("Cannot explode map notation value in :attr/_reversed notation for forward attribute".to_string()));
|
bail!(DbErrorKind::NotYetImplemented(format!("Cannot explode map notation value in :attr/_reversed notation for forward attribute")));
|
||||||
}
|
}
|
||||||
Some(forward_a) => {
|
Some(forward_a) => {
|
||||||
let forward_a = self.entity_a_into_term_a(forward_a)?;
|
let forward_a = self.entity_a_into_term_a(forward_a)?;
|
||||||
|
@ -412,7 +412,7 @@ where
|
||||||
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
||||||
match tx_function.op.0.as_str() {
|
match tx_function.op.0.as_str() {
|
||||||
"transaction-tx" => Ok(Either::Left(KnownEntid(self.tx_id.0))),
|
"transaction-tx" => Ok(Either::Left(KnownEntid(self.tx_id.0))),
|
||||||
unknown=> bail!(DbErrorKind::NotYetImplemented(format!("Unknown transaction function {}", unknown))),
|
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!("Unknown transaction function {}", unknown))),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -456,7 +456,7 @@ where
|
||||||
op: OpType::Add,
|
op: OpType::Add,
|
||||||
e: db_id.clone(),
|
e: db_id.clone(),
|
||||||
a: AttributePlace::Entid(a),
|
a: AttributePlace::Entid(a),
|
||||||
v,
|
v: v,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -519,7 +519,7 @@ where
|
||||||
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
||||||
let typed_value = match tx_function.op.0.as_str() {
|
let typed_value = match tx_function.op.0.as_str() {
|
||||||
"transaction-tx" => TypedValue::Ref(self.tx_id),
|
"transaction-tx" => TypedValue::Ref(self.tx_id),
|
||||||
unknown => bail!(DbErrorKind::NotYetImplemented(format!(
|
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||||
"Unknown transaction function {}",
|
"Unknown transaction function {}",
|
||||||
unknown
|
unknown
|
||||||
))),
|
))),
|
||||||
|
@ -546,7 +546,7 @@ where
|
||||||
|
|
||||||
for vv in vs {
|
for vv in vs {
|
||||||
deque.push_front(Entity::AddOrRetract {
|
deque.push_front(Entity::AddOrRetract {
|
||||||
op,
|
op: op.clone(),
|
||||||
e: e.clone(),
|
e: e.clone(),
|
||||||
a: AttributePlace::Entid(entmod::EntidOrIdent::Entid(a)),
|
a: AttributePlace::Entid(entmod::EntidOrIdent::Entid(a)),
|
||||||
v: vv,
|
v: vv,
|
||||||
|
@ -667,8 +667,8 @@ where
|
||||||
|term: TermWithTempIdsAndLookupRefs| -> Result<TermWithTempIds> {
|
|term: TermWithTempIdsAndLookupRefs| -> Result<TermWithTempIds> {
|
||||||
match term {
|
match term {
|
||||||
Term::AddOrRetract(op, e, a, v) => {
|
Term::AddOrRetract(op, e, a, v) => {
|
||||||
let e = replace_lookup_ref(&lookup_ref_map, e, KnownEntid)?;
|
let e = replace_lookup_ref(&lookup_ref_map, e, |x| KnownEntid(x))?;
|
||||||
let v = replace_lookup_ref(&lookup_ref_map, v, TypedValue::Ref)?;
|
let v = replace_lookup_ref(&lookup_ref_map, v, |x| TypedValue::Ref(x))?;
|
||||||
Ok(Term::AddOrRetract(op, e, a, v))
|
Ok(Term::AddOrRetract(op, e, a, v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -757,14 +757,14 @@ where
|
||||||
for (tempid, entid) in temp_id_map {
|
for (tempid, entid) in temp_id_map {
|
||||||
// Since `UpsertEV` instances always transition to `UpsertE` instances, it might be
|
// Since `UpsertEV` instances always transition to `UpsertE` instances, it might be
|
||||||
// that a tempid resolves in two generations, and those resolutions might conflict.
|
// that a tempid resolves in two generations, and those resolutions might conflict.
|
||||||
if let Some(previous) = tempids.insert((*tempid).clone(), entid) {
|
tempids.insert((*tempid).clone(), entid).map(|previous| {
|
||||||
if entid != previous {
|
if entid != previous {
|
||||||
conflicting_upserts
|
conflicting_upserts
|
||||||
.entry((*tempid).clone())
|
.entry((*tempid).clone())
|
||||||
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
||||||
.insert(entid);
|
.insert(entid);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if !conflicting_upserts.is_empty() {
|
if !conflicting_upserts.is_empty() {
|
||||||
|
@ -891,7 +891,10 @@ where
|
||||||
.map(|v| (true, v))
|
.map(|v| (true, v))
|
||||||
.chain(ars.retract.into_iter().map(|v| (false, v)))
|
.chain(ars.retract.into_iter().map(|v| (false, v)))
|
||||||
{
|
{
|
||||||
let op = if added { OpType::Add } else { OpType::Retract };
|
let op = match added {
|
||||||
|
true => OpType::Add,
|
||||||
|
false => OpType::Retract,
|
||||||
|
};
|
||||||
self.watcher.datom(op, e, a, &v);
|
self.watcher.datom(op, e, a, &v);
|
||||||
queue.push((e, a, attribute, v, added));
|
queue.push((e, a, attribute, v, added));
|
||||||
}
|
}
|
||||||
|
@ -964,7 +967,7 @@ where
|
||||||
Ok(TxReport {
|
Ok(TxReport {
|
||||||
tx_id: self.tx_id,
|
tx_id: self.tx_id,
|
||||||
tx_instant,
|
tx_instant,
|
||||||
tempids,
|
tempids: tempids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1058,7 +1061,6 @@ where
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub(crate) fn transact_terms_with_action<'conn, 'a, I, W>(
|
pub(crate) fn transact_terms_with_action<'conn, 'a, I, W>(
|
||||||
conn: &'conn rusqlite::Connection,
|
conn: &'conn rusqlite::Connection,
|
||||||
partition_map: PartitionMap,
|
partition_map: PartitionMap,
|
||||||
|
@ -1091,9 +1093,9 @@ where
|
||||||
|
|
||||||
let a_and_r = trie
|
let a_and_r = trie
|
||||||
.entry((a, attribute))
|
.entry((a, attribute))
|
||||||
.or_insert_with(BTreeMap::default)
|
.or_insert(BTreeMap::default())
|
||||||
.entry(e)
|
.entry(e)
|
||||||
.or_insert_with(AddAndRetract::default);
|
.or_insert(AddAndRetract::default());
|
||||||
|
|
||||||
match op {
|
match op {
|
||||||
OpType::Add => a_and_r.add.insert(v),
|
OpType::Add => a_and_r.add.insert(v),
|
||||||
|
@ -1134,9 +1136,9 @@ fn get_or_insert_tx_instant<'schema>(
|
||||||
entids::DB_TX_INSTANT,
|
entids::DB_TX_INSTANT,
|
||||||
schema.require_attribute_for_entid(entids::DB_TX_INSTANT)?,
|
schema.require_attribute_for_entid(entids::DB_TX_INSTANT)?,
|
||||||
))
|
))
|
||||||
.or_insert_with(BTreeMap::default)
|
.or_insert(BTreeMap::default())
|
||||||
.entry(tx_id)
|
.entry(tx_id)
|
||||||
.or_insert_with(AddAndRetract::default);
|
.or_insert(AddAndRetract::default());
|
||||||
if !ars.retract.is_empty() {
|
if !ars.retract.is_empty() {
|
||||||
// Cannot retract :db/txInstant!
|
// Cannot retract :db/txInstant!
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ use core_traits::{Entid, TypedValue, ValueType};
|
||||||
|
|
||||||
use db_traits::errors::CardinalityConflict;
|
use db_traits::errors::CardinalityConflict;
|
||||||
|
|
||||||
use crate::internal_types::AEVTrie;
|
use internal_types::AEVTrie;
|
||||||
|
|
||||||
/// Map from found [e a v] to expected type.
|
/// Map from found [e a v] to expected type.
|
||||||
pub(crate) type TypeDisagreements = BTreeMap<(Entid, Entid, TypedValue), ValueType>;
|
pub(crate) type TypeDisagreements = BTreeMap<(Entid, Entid, TypedValue), ValueType>;
|
||||||
|
|
|
@ -24,12 +24,11 @@ use edn::entities::OpType;
|
||||||
|
|
||||||
use db_traits::errors::Result;
|
use db_traits::errors::Result;
|
||||||
|
|
||||||
use crate::types::AttributeSet;
|
use types::AttributeSet;
|
||||||
|
|
||||||
use crate::watcher::TransactWatcher;
|
use watcher::TransactWatcher;
|
||||||
|
|
||||||
pub struct TxObserver {
|
pub struct TxObserver {
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
notify_fn: Arc<Box<dyn Fn(&str, IndexMap<&Entid, &AttributeSet>) + Send + Sync>>,
|
notify_fn: Arc<Box<dyn Fn(&str, IndexMap<&Entid, &AttributeSet>) + Send + Sync>>,
|
||||||
attributes: AttributeSet,
|
attributes: AttributeSet,
|
||||||
}
|
}
|
||||||
|
@ -83,18 +82,17 @@ impl TxCommand {
|
||||||
|
|
||||||
impl Command for TxCommand {
|
impl Command for TxCommand {
|
||||||
fn execute(&mut self) {
|
fn execute(&mut self) {
|
||||||
if let Some(observers) = self.observers.upgrade() {
|
self.observers.upgrade().map(|observers| {
|
||||||
for (key, observer) in observers.iter() {
|
for (key, observer) in observers.iter() {
|
||||||
let applicable_reports = observer.applicable_reports(&self.reports);
|
let applicable_reports = observer.applicable_reports(&self.reports);
|
||||||
if !applicable_reports.is_empty() {
|
if !applicable_reports.is_empty() {
|
||||||
observer.notify(&key, applicable_reports);
|
observer.notify(&key, applicable_reports);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct TxObservationService {
|
pub struct TxObservationService {
|
||||||
observers: Arc<IndexMap<String, Arc<TxObserver>>>,
|
observers: Arc<IndexMap<String, Arc<TxObserver>>>,
|
||||||
executor: Option<Sender<Box<dyn Command + Send>>>,
|
executor: Option<Sender<Box<dyn Command + Send>>>,
|
||||||
|
@ -109,7 +107,7 @@ impl TxObservationService {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For testing purposes
|
// For testing purposes
|
||||||
pub fn is_registered(&self, key: &str) -> bool {
|
pub fn is_registered(&self, key: &String) -> bool {
|
||||||
self.observers.contains_key(key)
|
self.observers.contains_key(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +115,7 @@ impl TxObservationService {
|
||||||
Arc::make_mut(&mut self.observers).insert(key, observer);
|
Arc::make_mut(&mut self.observers).insert(key, observer);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deregister(&mut self, key: &str) {
|
pub fn deregister(&mut self, key: &String) {
|
||||||
Arc::make_mut(&mut self.observers).remove(key);
|
Arc::make_mut(&mut self.observers).remove(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,7 +130,6 @@ impl TxObservationService {
|
||||||
}
|
}
|
||||||
|
|
||||||
let executor = self.executor.get_or_insert_with(|| {
|
let executor = self.executor.get_or_insert_with(|| {
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
let (tx, rx): (
|
let (tx, rx): (
|
||||||
Sender<Box<dyn Command + Send>>,
|
Sender<Box<dyn Command + Send>>,
|
||||||
Receiver<Box<dyn Command + Send>>,
|
Receiver<Box<dyn Command + Send>>,
|
||||||
|
@ -157,7 +154,6 @@ impl Drop for TxObservationService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct InProgressObserverTransactWatcher {
|
pub struct InProgressObserverTransactWatcher {
|
||||||
collected_attributes: AttributeSet,
|
collected_attributes: AttributeSet,
|
||||||
pub txes: IndexMap<Entid, AttributeSet>,
|
pub txes: IndexMap<Entid, AttributeSet>,
|
||||||
|
@ -178,7 +174,8 @@ impl TransactWatcher for InProgressObserverTransactWatcher {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn done(&mut self, t: &Entid, _schema: &Schema) -> Result<()> {
|
fn done(&mut self, t: &Entid, _schema: &Schema) -> Result<()> {
|
||||||
let collected_attributes = ::std::mem::take(&mut self.collected_attributes);
|
let collected_attributes =
|
||||||
|
::std::mem::replace(&mut self.collected_attributes, Default::default());
|
||||||
self.txes.insert(*t, collected_attributes);
|
self.txes.insert(*t, collected_attributes);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,8 +127,8 @@ pub struct DB {
|
||||||
impl DB {
|
impl DB {
|
||||||
pub fn new(partition_map: PartitionMap, schema: Schema) -> DB {
|
pub fn new(partition_map: PartitionMap, schema: Schema) -> DB {
|
||||||
DB {
|
DB {
|
||||||
partition_map,
|
partition_map: partition_map,
|
||||||
schema,
|
schema: schema,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,19 +18,19 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||||
use indexmap;
|
use indexmap;
|
||||||
use petgraph::unionfind;
|
use petgraph::unionfind;
|
||||||
|
|
||||||
use crate::internal_types::{
|
use db_traits::errors::{DbErrorKind, Result};
|
||||||
|
use internal_types::{
|
||||||
Population, TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithoutTempIds, TypedValueOr,
|
Population, TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithoutTempIds, TypedValueOr,
|
||||||
};
|
};
|
||||||
use crate::types::AVPair;
|
use types::AVPair;
|
||||||
use db_traits::errors::{DbErrorKind, Result};
|
|
||||||
|
|
||||||
use mentat_core::util::Either::*;
|
use mentat_core::util::Either::*;
|
||||||
|
|
||||||
use core_traits::{attribute, Attribute, Entid, TypedValue};
|
use core_traits::{attribute, Attribute, Entid, TypedValue};
|
||||||
|
|
||||||
use crate::schema::SchemaBuilding;
|
|
||||||
use edn::entities::OpType;
|
use edn::entities::OpType;
|
||||||
use mentat_core::Schema;
|
use mentat_core::Schema;
|
||||||
|
use schema::SchemaBuilding;
|
||||||
|
|
||||||
/// A "Simple upsert" that looks like [:db/add TEMPID a v], where a is :db.unique/identity.
|
/// A "Simple upsert" that looks like [:db/add TEMPID a v], where a is :db.unique/identity.
|
||||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||||
|
@ -227,7 +227,7 @@ impl Generation {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect id->[a v] pairs that might upsert at this evolutionary step.
|
// Collect id->[a v] pairs that might upsert at this evolutionary step.
|
||||||
pub(crate) fn temp_id_avs(&self) -> Vec<(TempIdHandle, AVPair)> {
|
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
|
||||||
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
|
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
|
||||||
// TODO: map/collect.
|
// TODO: map/collect.
|
||||||
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
|
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
|
||||||
|
@ -269,32 +269,32 @@ impl Generation {
|
||||||
|
|
||||||
for term in self.allocations.iter() {
|
for term in self.allocations.iter() {
|
||||||
match term {
|
match term {
|
||||||
Term::AddOrRetract(OpType::Add, Right(ref t1), a, Right(ref t2)) => {
|
&Term::AddOrRetract(OpType::Add, Right(ref t1), a, Right(ref t2)) => {
|
||||||
temp_ids.insert(t1.clone());
|
temp_ids.insert(t1.clone());
|
||||||
temp_ids.insert(t2.clone());
|
temp_ids.insert(t2.clone());
|
||||||
let attribute: &Attribute = schema.require_attribute_for_entid(*a)?;
|
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
|
||||||
if attribute.unique == Some(attribute::Unique::Identity) {
|
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||||
tempid_avs
|
tempid_avs
|
||||||
.entry((*a, Right(t2.clone())))
|
.entry((a, Right(t2.clone())))
|
||||||
.or_insert_with(Vec::new)
|
.or_insert(vec![])
|
||||||
.push(t1.clone());
|
.push(t1.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Term::AddOrRetract(OpType::Add, Right(ref t), a, ref x @ Left(_)) => {
|
&Term::AddOrRetract(OpType::Add, Right(ref t), a, ref x @ Left(_)) => {
|
||||||
temp_ids.insert(t.clone());
|
temp_ids.insert(t.clone());
|
||||||
let attribute: &Attribute = schema.require_attribute_for_entid(*a)?;
|
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
|
||||||
if attribute.unique == Some(attribute::Unique::Identity) {
|
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||||
tempid_avs
|
tempid_avs
|
||||||
.entry((*a, x.clone()))
|
.entry((a, x.clone()))
|
||||||
.or_insert_with(Vec::new)
|
.or_insert(vec![])
|
||||||
.push(t.clone());
|
.push(t.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Term::AddOrRetract(OpType::Add, Left(_), _, Right(ref t)) => {
|
&Term::AddOrRetract(OpType::Add, Left(_), _, Right(ref t)) => {
|
||||||
temp_ids.insert(t.clone());
|
temp_ids.insert(t.clone());
|
||||||
}
|
}
|
||||||
Term::AddOrRetract(OpType::Add, Left(_), _, Left(_)) => unreachable!(),
|
&Term::AddOrRetract(OpType::Add, Left(_), _, Left(_)) => unreachable!(),
|
||||||
Term::AddOrRetract(OpType::Retract, _, _, _) => {
|
&Term::AddOrRetract(OpType::Retract, _, _, _) => {
|
||||||
// [:db/retract ...] entities never allocate entids; they have to resolve due to
|
// [:db/retract ...] entities never allocate entids; they have to resolve due to
|
||||||
// other upserts (or they fail the transaction).
|
// other upserts (or they fail the transaction).
|
||||||
}
|
}
|
||||||
|
@ -319,11 +319,13 @@ impl Generation {
|
||||||
);
|
);
|
||||||
|
|
||||||
for vs in tempid_avs.values() {
|
for vs in tempid_avs.values() {
|
||||||
if let Some(&first_index) = vs.first().and_then(|first| temp_ids.get(first)) {
|
vs.first()
|
||||||
for tempid in vs {
|
.and_then(|first| temp_ids.get(first))
|
||||||
temp_ids.get(tempid).map(|&i| uf.union(first_index, i));
|
.map(|&first_index| {
|
||||||
}
|
for tempid in vs {
|
||||||
}
|
temp_ids.get(tempid).map(|&i| uf.union(first_index, i));
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("union-find aggregation {:?}", uf.clone().into_labeling());
|
debug!("union-find aggregation {:?}", uf.clone().into_labeling());
|
||||||
|
|
|
@ -67,11 +67,6 @@ fn test_from_sql_value_pair() {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
TypedValue::typed_ns_keyword("db", "keyword")
|
TypedValue::typed_ns_keyword("db", "keyword")
|
||||||
);
|
);
|
||||||
assert_eq!(
|
|
||||||
TypedValue::from_sql_value_pair(rusqlite::types::Value::Blob(vec![1, 2, 3, 42]), 15)
|
|
||||||
.unwrap(),
|
|
||||||
TypedValue::Bytes((vec![1, 2, 3, 42]).into())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -11,7 +11,7 @@ source "https://rubygems.org"
|
||||||
# gem "jekyll", "~> 3.7.3"
|
# gem "jekyll", "~> 3.7.3"
|
||||||
|
|
||||||
# This is the default theme for new Jekyll sites. You may change this to anything you like.
|
# This is the default theme for new Jekyll sites. You may change this to anything you like.
|
||||||
gem "minima", "~> 2.5.1"
|
gem "minima", "~> 2.0"
|
||||||
|
|
||||||
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
||||||
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
||||||
|
@ -19,9 +19,9 @@ gem "minima", "~> 2.5.1"
|
||||||
|
|
||||||
# If you have any plugins, put them here!
|
# If you have any plugins, put them here!
|
||||||
group :jekyll_plugins do
|
group :jekyll_plugins do
|
||||||
gem "jekyll-feed", "~> 0.15.1"
|
gem "jekyll-feed", "~> 0.9.3"
|
||||||
gem "github-pages", "~> 215"
|
gem "github-pages", "~> 186"
|
||||||
gem "jekyll-commonmark-ghpages", "~> 0.1.6"
|
gem "jekyll-commonmark-ghpages", "~> 0.1.5"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||||
|
|
|
@ -1,161 +1,148 @@
|
||||||
GEM
|
GEM
|
||||||
remote: https://rubygems.org/
|
remote: https://rubygems.org/
|
||||||
specs:
|
specs:
|
||||||
activesupport (6.0.4)
|
activesupport (4.2.10)
|
||||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
i18n (~> 0.7)
|
||||||
i18n (>= 0.7, < 2)
|
|
||||||
minitest (~> 5.1)
|
minitest (~> 5.1)
|
||||||
|
thread_safe (~> 0.3, >= 0.3.4)
|
||||||
tzinfo (~> 1.1)
|
tzinfo (~> 1.1)
|
||||||
zeitwerk (~> 2.2, >= 2.2.2)
|
addressable (2.5.2)
|
||||||
addressable (2.8.0)
|
public_suffix (>= 2.0.2, < 4.0)
|
||||||
public_suffix (>= 2.0.2, < 5.0)
|
|
||||||
coffee-script (2.4.1)
|
coffee-script (2.4.1)
|
||||||
coffee-script-source
|
coffee-script-source
|
||||||
execjs
|
execjs
|
||||||
coffee-script-source (1.11.1)
|
coffee-script-source (1.11.1)
|
||||||
colorator (1.1.0)
|
colorator (1.1.0)
|
||||||
commonmarker (0.17.13)
|
commonmarker (0.17.9)
|
||||||
ruby-enum (~> 0.5)
|
ruby-enum (~> 0.5)
|
||||||
concurrent-ruby (1.1.9)
|
concurrent-ruby (1.0.5)
|
||||||
dnsruby (1.61.7)
|
dnsruby (1.60.2)
|
||||||
simpleidn (~> 0.1)
|
em-websocket (0.5.1)
|
||||||
em-websocket (0.5.2)
|
|
||||||
eventmachine (>= 0.12.9)
|
eventmachine (>= 0.12.9)
|
||||||
http_parser.rb (~> 0.6.0)
|
http_parser.rb (~> 0.6.0)
|
||||||
ethon (0.14.0)
|
ethon (0.11.0)
|
||||||
ffi (>= 1.15.0)
|
ffi (>= 1.3.0)
|
||||||
eventmachine (1.2.7)
|
eventmachine (1.2.7)
|
||||||
execjs (2.8.1)
|
execjs (2.7.0)
|
||||||
faraday (1.4.3)
|
faraday (0.15.2)
|
||||||
faraday-em_http (~> 1.0)
|
|
||||||
faraday-em_synchrony (~> 1.0)
|
|
||||||
faraday-excon (~> 1.1)
|
|
||||||
faraday-net_http (~> 1.0)
|
|
||||||
faraday-net_http_persistent (~> 1.1)
|
|
||||||
multipart-post (>= 1.2, < 3)
|
multipart-post (>= 1.2, < 3)
|
||||||
ruby2_keywords (>= 0.0.4)
|
ffi (1.9.25)
|
||||||
faraday-em_http (1.0.0)
|
|
||||||
faraday-em_synchrony (1.0.0)
|
|
||||||
faraday-excon (1.1.0)
|
|
||||||
faraday-net_http (1.0.1)
|
|
||||||
faraday-net_http_persistent (1.1.0)
|
|
||||||
ffi (1.15.3)
|
|
||||||
forwardable-extended (2.6.0)
|
forwardable-extended (2.6.0)
|
||||||
gemoji (3.0.1)
|
gemoji (3.0.0)
|
||||||
github-pages (215)
|
github-pages (186)
|
||||||
github-pages-health-check (= 1.17.2)
|
activesupport (= 4.2.10)
|
||||||
jekyll (= 3.9.0)
|
github-pages-health-check (= 1.8.1)
|
||||||
jekyll-avatar (= 0.7.0)
|
jekyll (= 3.7.3)
|
||||||
|
jekyll-avatar (= 0.5.0)
|
||||||
jekyll-coffeescript (= 1.1.1)
|
jekyll-coffeescript (= 1.1.1)
|
||||||
jekyll-commonmark-ghpages (= 0.1.6)
|
jekyll-commonmark-ghpages (= 0.1.5)
|
||||||
jekyll-default-layout (= 0.1.4)
|
jekyll-default-layout (= 0.1.4)
|
||||||
jekyll-feed (= 0.15.1)
|
jekyll-feed (= 0.9.3)
|
||||||
jekyll-gist (= 1.5.0)
|
jekyll-gist (= 1.5.0)
|
||||||
jekyll-github-metadata (= 2.13.0)
|
jekyll-github-metadata (= 2.9.4)
|
||||||
jekyll-mentions (= 1.6.0)
|
jekyll-mentions (= 1.3.0)
|
||||||
jekyll-optional-front-matter (= 0.3.2)
|
jekyll-optional-front-matter (= 0.3.0)
|
||||||
jekyll-paginate (= 1.1.0)
|
jekyll-paginate (= 1.1.0)
|
||||||
jekyll-readme-index (= 0.3.0)
|
jekyll-readme-index (= 0.2.0)
|
||||||
jekyll-redirect-from (= 0.16.0)
|
jekyll-redirect-from (= 0.13.0)
|
||||||
jekyll-relative-links (= 0.6.1)
|
jekyll-relative-links (= 0.5.3)
|
||||||
jekyll-remote-theme (= 0.4.3)
|
jekyll-remote-theme (= 0.3.1)
|
||||||
jekyll-sass-converter (= 1.5.2)
|
jekyll-sass-converter (= 1.5.2)
|
||||||
jekyll-seo-tag (= 2.7.1)
|
jekyll-seo-tag (= 2.4.0)
|
||||||
jekyll-sitemap (= 1.4.0)
|
jekyll-sitemap (= 1.2.0)
|
||||||
jekyll-swiss (= 1.0.0)
|
jekyll-swiss (= 0.4.0)
|
||||||
jekyll-theme-architect (= 0.1.1)
|
jekyll-theme-architect (= 0.1.1)
|
||||||
jekyll-theme-cayman (= 0.1.1)
|
jekyll-theme-cayman (= 0.1.1)
|
||||||
jekyll-theme-dinky (= 0.1.1)
|
jekyll-theme-dinky (= 0.1.1)
|
||||||
jekyll-theme-hacker (= 0.1.2)
|
jekyll-theme-hacker (= 0.1.1)
|
||||||
jekyll-theme-leap-day (= 0.1.1)
|
jekyll-theme-leap-day (= 0.1.1)
|
||||||
jekyll-theme-merlot (= 0.1.1)
|
jekyll-theme-merlot (= 0.1.1)
|
||||||
jekyll-theme-midnight (= 0.1.1)
|
jekyll-theme-midnight (= 0.1.1)
|
||||||
jekyll-theme-minimal (= 0.1.1)
|
jekyll-theme-minimal (= 0.1.1)
|
||||||
jekyll-theme-modernist (= 0.1.1)
|
jekyll-theme-modernist (= 0.1.1)
|
||||||
jekyll-theme-primer (= 0.5.4)
|
jekyll-theme-primer (= 0.5.3)
|
||||||
jekyll-theme-slate (= 0.1.1)
|
jekyll-theme-slate (= 0.1.1)
|
||||||
jekyll-theme-tactile (= 0.1.1)
|
jekyll-theme-tactile (= 0.1.1)
|
||||||
jekyll-theme-time-machine (= 0.1.1)
|
jekyll-theme-time-machine (= 0.1.1)
|
||||||
jekyll-titles-from-headings (= 0.5.3)
|
jekyll-titles-from-headings (= 0.5.1)
|
||||||
jemoji (= 0.12.0)
|
jemoji (= 0.9.0)
|
||||||
kramdown (= 2.3.1)
|
kramdown (= 1.16.2)
|
||||||
kramdown-parser-gfm (= 1.1.0)
|
liquid (= 4.0.0)
|
||||||
liquid (= 4.0.3)
|
listen (= 3.1.5)
|
||||||
mercenary (~> 0.3)
|
mercenary (~> 0.3)
|
||||||
minima (= 2.5.1)
|
minima (= 2.4.1)
|
||||||
nokogiri (>= 1.10.4, < 2.0)
|
nokogiri (>= 1.8.2, < 2.0)
|
||||||
rouge (= 3.26.0)
|
rouge (= 2.2.1)
|
||||||
terminal-table (~> 1.4)
|
terminal-table (~> 1.4)
|
||||||
github-pages-health-check (1.17.2)
|
github-pages-health-check (1.8.1)
|
||||||
addressable (~> 2.3)
|
addressable (~> 2.3)
|
||||||
dnsruby (~> 1.60)
|
dnsruby (~> 1.60)
|
||||||
octokit (~> 4.0)
|
octokit (~> 4.0)
|
||||||
public_suffix (>= 2.0.2, < 5.0)
|
public_suffix (~> 2.0)
|
||||||
typhoeus (~> 1.3)
|
typhoeus (~> 1.3)
|
||||||
html-pipeline (2.14.0)
|
html-pipeline (2.8.0)
|
||||||
activesupport (>= 2)
|
activesupport (>= 2)
|
||||||
nokogiri (>= 1.4)
|
nokogiri (>= 1.4)
|
||||||
http_parser.rb (0.6.0)
|
http_parser.rb (0.6.0)
|
||||||
i18n (0.9.5)
|
i18n (0.9.5)
|
||||||
concurrent-ruby (~> 1.0)
|
concurrent-ruby (~> 1.0)
|
||||||
jekyll (3.9.0)
|
jekyll (3.7.3)
|
||||||
addressable (~> 2.4)
|
addressable (~> 2.4)
|
||||||
colorator (~> 1.0)
|
colorator (~> 1.0)
|
||||||
em-websocket (~> 0.5)
|
em-websocket (~> 0.5)
|
||||||
i18n (~> 0.7)
|
i18n (~> 0.7)
|
||||||
jekyll-sass-converter (~> 1.0)
|
jekyll-sass-converter (~> 1.0)
|
||||||
jekyll-watch (~> 2.0)
|
jekyll-watch (~> 2.0)
|
||||||
kramdown (>= 1.17, < 3)
|
kramdown (~> 1.14)
|
||||||
liquid (~> 4.0)
|
liquid (~> 4.0)
|
||||||
mercenary (~> 0.3.3)
|
mercenary (~> 0.3.3)
|
||||||
pathutil (~> 0.9)
|
pathutil (~> 0.9)
|
||||||
rouge (>= 1.7, < 4)
|
rouge (>= 1.7, < 4)
|
||||||
safe_yaml (~> 1.0)
|
safe_yaml (~> 1.0)
|
||||||
jekyll-avatar (0.7.0)
|
jekyll-avatar (0.5.0)
|
||||||
jekyll (>= 3.0, < 5.0)
|
jekyll (~> 3.0)
|
||||||
jekyll-coffeescript (1.1.1)
|
jekyll-coffeescript (1.1.1)
|
||||||
coffee-script (~> 2.2)
|
coffee-script (~> 2.2)
|
||||||
coffee-script-source (~> 1.11.1)
|
coffee-script-source (~> 1.11.1)
|
||||||
jekyll-commonmark (1.3.1)
|
jekyll-commonmark (1.2.0)
|
||||||
commonmarker (~> 0.14)
|
commonmarker (~> 0.14)
|
||||||
jekyll (>= 3.7, < 5.0)
|
jekyll (>= 3.0, < 4.0)
|
||||||
jekyll-commonmark-ghpages (0.1.6)
|
jekyll-commonmark-ghpages (0.1.5)
|
||||||
commonmarker (~> 0.17.6)
|
commonmarker (~> 0.17.6)
|
||||||
jekyll-commonmark (~> 1.2)
|
jekyll-commonmark (~> 1)
|
||||||
rouge (>= 2.0, < 4.0)
|
rouge (~> 2)
|
||||||
jekyll-default-layout (0.1.4)
|
jekyll-default-layout (0.1.4)
|
||||||
jekyll (~> 3.0)
|
jekyll (~> 3.0)
|
||||||
jekyll-feed (0.15.1)
|
jekyll-feed (0.9.3)
|
||||||
jekyll (>= 3.7, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-gist (1.5.0)
|
jekyll-gist (1.5.0)
|
||||||
octokit (~> 4.2)
|
octokit (~> 4.2)
|
||||||
jekyll-github-metadata (2.13.0)
|
jekyll-github-metadata (2.9.4)
|
||||||
jekyll (>= 3.4, < 5.0)
|
jekyll (~> 3.1)
|
||||||
octokit (~> 4.0, != 4.4.0)
|
octokit (~> 4.0, != 4.4.0)
|
||||||
jekyll-mentions (1.6.0)
|
jekyll-mentions (1.3.0)
|
||||||
|
activesupport (~> 4.0)
|
||||||
html-pipeline (~> 2.3)
|
html-pipeline (~> 2.3)
|
||||||
jekyll (>= 3.7, < 5.0)
|
jekyll (~> 3.0)
|
||||||
jekyll-optional-front-matter (0.3.2)
|
jekyll-optional-front-matter (0.3.0)
|
||||||
jekyll (>= 3.0, < 5.0)
|
jekyll (~> 3.0)
|
||||||
jekyll-paginate (1.1.0)
|
jekyll-paginate (1.1.0)
|
||||||
jekyll-readme-index (0.3.0)
|
jekyll-readme-index (0.2.0)
|
||||||
jekyll (>= 3.0, < 5.0)
|
jekyll (~> 3.0)
|
||||||
jekyll-redirect-from (0.16.0)
|
jekyll-redirect-from (0.13.0)
|
||||||
jekyll (>= 3.3, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-relative-links (0.6.1)
|
jekyll-relative-links (0.5.3)
|
||||||
jekyll (>= 3.3, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-remote-theme (0.4.3)
|
jekyll-remote-theme (0.3.1)
|
||||||
addressable (~> 2.0)
|
jekyll (~> 3.5)
|
||||||
jekyll (>= 3.5, < 5.0)
|
rubyzip (>= 1.2.1, < 3.0)
|
||||||
jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
|
|
||||||
rubyzip (>= 1.3.0, < 3.0)
|
|
||||||
jekyll-sass-converter (1.5.2)
|
jekyll-sass-converter (1.5.2)
|
||||||
sass (~> 3.4)
|
sass (~> 3.4)
|
||||||
jekyll-seo-tag (2.7.1)
|
jekyll-seo-tag (2.4.0)
|
||||||
jekyll (>= 3.8, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-sitemap (1.4.0)
|
jekyll-sitemap (1.2.0)
|
||||||
jekyll (>= 3.7, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-swiss (1.0.0)
|
jekyll-swiss (0.4.0)
|
||||||
jekyll-theme-architect (0.1.1)
|
jekyll-theme-architect (0.1.1)
|
||||||
jekyll (~> 3.5)
|
jekyll (~> 3.5)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
|
@ -165,8 +152,8 @@ GEM
|
||||||
jekyll-theme-dinky (0.1.1)
|
jekyll-theme-dinky (0.1.1)
|
||||||
jekyll (~> 3.5)
|
jekyll (~> 3.5)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
jekyll-theme-hacker (0.1.2)
|
jekyll-theme-hacker (0.1.1)
|
||||||
jekyll (> 3.5, < 5.0)
|
jekyll (~> 3.5)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
jekyll-theme-leap-day (0.1.1)
|
jekyll-theme-leap-day (0.1.1)
|
||||||
jekyll (~> 3.5)
|
jekyll (~> 3.5)
|
||||||
|
@ -183,8 +170,8 @@ GEM
|
||||||
jekyll-theme-modernist (0.1.1)
|
jekyll-theme-modernist (0.1.1)
|
||||||
jekyll (~> 3.5)
|
jekyll (~> 3.5)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
jekyll-theme-primer (0.5.4)
|
jekyll-theme-primer (0.5.3)
|
||||||
jekyll (> 3.5, < 5.0)
|
jekyll (~> 3.5)
|
||||||
jekyll-github-metadata (~> 2.9)
|
jekyll-github-metadata (~> 2.9)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
jekyll-theme-slate (0.1.1)
|
jekyll-theme-slate (0.1.1)
|
||||||
|
@ -196,82 +183,71 @@ GEM
|
||||||
jekyll-theme-time-machine (0.1.1)
|
jekyll-theme-time-machine (0.1.1)
|
||||||
jekyll (~> 3.5)
|
jekyll (~> 3.5)
|
||||||
jekyll-seo-tag (~> 2.0)
|
jekyll-seo-tag (~> 2.0)
|
||||||
jekyll-titles-from-headings (0.5.3)
|
jekyll-titles-from-headings (0.5.1)
|
||||||
jekyll (>= 3.3, < 5.0)
|
jekyll (~> 3.3)
|
||||||
jekyll-watch (2.2.1)
|
jekyll-watch (2.0.0)
|
||||||
listen (~> 3.0)
|
listen (~> 3.0)
|
||||||
jemoji (0.12.0)
|
jemoji (0.9.0)
|
||||||
|
activesupport (~> 4.0, >= 4.2.9)
|
||||||
gemoji (~> 3.0)
|
gemoji (~> 3.0)
|
||||||
html-pipeline (~> 2.2)
|
html-pipeline (~> 2.2)
|
||||||
jekyll (>= 3.0, < 5.0)
|
jekyll (~> 3.0)
|
||||||
kramdown (2.3.1)
|
kramdown (1.16.2)
|
||||||
rexml
|
liquid (4.0.0)
|
||||||
kramdown-parser-gfm (1.1.0)
|
listen (3.1.5)
|
||||||
kramdown (~> 2.0)
|
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||||
liquid (4.0.3)
|
rb-inotify (~> 0.9, >= 0.9.7)
|
||||||
listen (3.5.1)
|
ruby_dep (~> 1.2)
|
||||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
|
||||||
rb-inotify (~> 0.9, >= 0.9.10)
|
|
||||||
mercenary (0.3.6)
|
mercenary (0.3.6)
|
||||||
mini_portile2 (2.6.1)
|
mini_portile2 (2.3.0)
|
||||||
minima (2.5.1)
|
minima (2.4.1)
|
||||||
jekyll (>= 3.5, < 5.0)
|
jekyll (~> 3.5)
|
||||||
jekyll-feed (~> 0.9)
|
jekyll-feed (~> 0.9)
|
||||||
jekyll-seo-tag (~> 2.1)
|
jekyll-seo-tag (~> 2.1)
|
||||||
minitest (5.14.4)
|
minitest (5.11.3)
|
||||||
multipart-post (2.1.1)
|
multipart-post (2.0.0)
|
||||||
nokogiri (1.12.5)
|
nokogiri (1.8.3)
|
||||||
mini_portile2 (~> 2.6.1)
|
mini_portile2 (~> 2.3.0)
|
||||||
racc (~> 1.4)
|
octokit (4.9.0)
|
||||||
octokit (4.21.0)
|
|
||||||
faraday (>= 0.9)
|
|
||||||
sawyer (~> 0.8.0, >= 0.5.3)
|
sawyer (~> 0.8.0, >= 0.5.3)
|
||||||
pathutil (0.16.2)
|
pathutil (0.16.1)
|
||||||
forwardable-extended (~> 2.6)
|
forwardable-extended (~> 2.6)
|
||||||
public_suffix (4.0.6)
|
public_suffix (2.0.5)
|
||||||
racc (1.5.2)
|
rb-fsevent (0.10.3)
|
||||||
rb-fsevent (0.11.0)
|
rb-inotify (0.9.10)
|
||||||
rb-inotify (0.10.1)
|
ffi (>= 0.5.0, < 2)
|
||||||
ffi (~> 1.0)
|
rouge (2.2.1)
|
||||||
rexml (3.2.5)
|
ruby-enum (0.7.2)
|
||||||
rouge (3.26.0)
|
|
||||||
ruby-enum (0.9.0)
|
|
||||||
i18n
|
i18n
|
||||||
ruby2_keywords (0.0.4)
|
ruby_dep (1.5.0)
|
||||||
rubyzip (2.3.0)
|
rubyzip (1.2.1)
|
||||||
safe_yaml (1.0.5)
|
safe_yaml (1.0.4)
|
||||||
sass (3.7.4)
|
sass (3.5.6)
|
||||||
sass-listen (~> 4.0.0)
|
sass-listen (~> 4.0.0)
|
||||||
sass-listen (4.0.0)
|
sass-listen (4.0.0)
|
||||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||||
rb-inotify (~> 0.9, >= 0.9.7)
|
rb-inotify (~> 0.9, >= 0.9.7)
|
||||||
sawyer (0.8.2)
|
sawyer (0.8.1)
|
||||||
addressable (>= 2.3.5)
|
addressable (>= 2.3.5, < 2.6)
|
||||||
faraday (> 0.8, < 2.0)
|
faraday (~> 0.8, < 1.0)
|
||||||
simpleidn (0.2.1)
|
|
||||||
unf (~> 0.1.4)
|
|
||||||
terminal-table (1.8.0)
|
terminal-table (1.8.0)
|
||||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||||
thread_safe (0.3.6)
|
thread_safe (0.3.6)
|
||||||
typhoeus (1.4.0)
|
typhoeus (1.3.0)
|
||||||
ethon (>= 0.9.0)
|
ethon (>= 0.9.0)
|
||||||
tzinfo (1.2.9)
|
tzinfo (1.2.5)
|
||||||
thread_safe (~> 0.1)
|
thread_safe (~> 0.1)
|
||||||
unf (0.1.4)
|
unicode-display_width (1.4.0)
|
||||||
unf_ext
|
|
||||||
unf_ext (0.0.7.7)
|
|
||||||
unicode-display_width (1.7.0)
|
|
||||||
zeitwerk (2.4.2)
|
|
||||||
|
|
||||||
PLATFORMS
|
PLATFORMS
|
||||||
ruby
|
ruby
|
||||||
|
|
||||||
DEPENDENCIES
|
DEPENDENCIES
|
||||||
github-pages (~> 215)
|
github-pages (~> 186)
|
||||||
jekyll-commonmark-ghpages (~> 0.1.6)
|
jekyll-commonmark-ghpages (~> 0.1.5)
|
||||||
jekyll-feed (~> 0.15.1)
|
jekyll-feed (~> 0.9.3)
|
||||||
minima (~> 2.5.1)
|
minima (~> 2.0)
|
||||||
tzinfo-data
|
tzinfo-data
|
||||||
|
|
||||||
BUNDLED WITH
|
BUNDLED WITH
|
||||||
2.2.21
|
1.16.2
|
||||||
|
|
|
@ -10,21 +10,19 @@ description = "EDN parser for Project Mentat"
|
||||||
readme = "./README.md"
|
readme = "./README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = "~0.4"
|
chrono = "0.4"
|
||||||
itertools = "~0.10"
|
itertools = "0.8"
|
||||||
num = "~0.4"
|
num = "0.2"
|
||||||
ordered-float = "~2.8"
|
ordered-float = "1.0"
|
||||||
pretty = "~0.12"
|
pretty = "0.9"
|
||||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
serde = { version = "~1.0", optional = true }
|
serde = { version = "1.0", optional = true }
|
||||||
serde_derive = { version = "~1.0", optional = true }
|
serde_derive = { version = "1.0", optional = true }
|
||||||
peg = "~0.8"
|
peg = "0.6"
|
||||||
bytes = "1.0.1"
|
|
||||||
hex = "0.4.3"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serde_test = "~1.0"
|
serde_test = "1.0"
|
||||||
serde_json = "~1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
serde_support = ["serde", "serde_derive"]
|
serde_support = ["serde", "serde_derive"]
|
||||||
|
|
|
@ -13,11 +13,11 @@
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use crate::value_rc::ValueRc;
|
use value_rc::ValueRc;
|
||||||
|
|
||||||
use crate::symbols::{Keyword, PlainSymbol};
|
use symbols::{Keyword, PlainSymbol};
|
||||||
|
|
||||||
use crate::types::ValueAndSpan;
|
use types::ValueAndSpan;
|
||||||
|
|
||||||
/// `EntityPlace` and `ValuePlace` embed values, either directly (i.e., `ValuePlace::Atom`) or
|
/// `EntityPlace` and `ValuePlace` embed values, either directly (i.e., `ValuePlace::Atom`) or
|
||||||
/// indirectly (i.e., `EntityPlace::LookupRef`). In order to maintain the graph of `Into` and
|
/// indirectly (i.e., `EntityPlace::LookupRef`). In order to maintain the graph of `Into` and
|
||||||
|
@ -49,8 +49,8 @@ impl TempId {
|
||||||
impl fmt::Display for TempId {
|
impl fmt::Display for TempId {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
match self {
|
match self {
|
||||||
TempId::External(ref s) => write!(f, "{}", s),
|
&TempId::External(ref s) => write!(f, "{}", s),
|
||||||
TempId::Internal(x) => write!(f, "<tempid {}>", x),
|
&TempId::Internal(x) => write!(f, "<tempid {}>", x),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -76,8 +76,8 @@ impl From<Keyword> for EntidOrIdent {
|
||||||
impl EntidOrIdent {
|
impl EntidOrIdent {
|
||||||
pub fn unreversed(&self) -> Option<EntidOrIdent> {
|
pub fn unreversed(&self) -> Option<EntidOrIdent> {
|
||||||
match self {
|
match self {
|
||||||
EntidOrIdent::Entid(_) => None,
|
&EntidOrIdent::Entid(_) => None,
|
||||||
EntidOrIdent::Ident(ref a) => a.unreversed().map(EntidOrIdent::Ident),
|
&EntidOrIdent::Ident(ref a) => a.unreversed().map(EntidOrIdent::Ident),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::HashSet;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
use crate::ValueRc;
|
use ValueRc;
|
||||||
|
|
||||||
/// An `InternSet` allows to "intern" some potentially large values, maintaining a single value
|
/// An `InternSet` allows to "intern" some potentially large values, maintaining a single value
|
||||||
/// instance owned by the `InternSet` and leaving consumers with lightweight ref-counted handles to
|
/// instance owned by the `InternSet` and leaving consumers with lightweight ref-counted handles to
|
||||||
|
|
|
@ -8,9 +8,7 @@
|
||||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||||
// specific language governing permissions and limitations under the License.
|
// specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
extern crate bytes;
|
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate hex;
|
|
||||||
extern crate itertools;
|
extern crate itertools;
|
||||||
extern crate num;
|
extern crate num;
|
||||||
extern crate ordered_float;
|
extern crate ordered_float;
|
||||||
|
@ -27,7 +25,7 @@ extern crate serde_derive;
|
||||||
|
|
||||||
pub mod entities;
|
pub mod entities;
|
||||||
pub mod intern_set;
|
pub mod intern_set;
|
||||||
pub use crate::intern_set::InternSet;
|
pub use intern_set::InternSet;
|
||||||
// Intentionally not pub.
|
// Intentionally not pub.
|
||||||
pub mod matcher;
|
pub mod matcher;
|
||||||
mod namespaceable_name;
|
mod namespaceable_name;
|
||||||
|
@ -37,22 +35,20 @@ pub mod symbols;
|
||||||
pub mod types;
|
pub mod types;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
pub mod value_rc;
|
pub mod value_rc;
|
||||||
pub use crate::value_rc::{Cloned, FromRc, ValueRc};
|
pub use value_rc::{Cloned, FromRc, ValueRc};
|
||||||
|
|
||||||
// Re-export the types we use.
|
// Re-export the types we use.
|
||||||
use bytes::Bytes;
|
|
||||||
pub use chrono::{DateTime, Utc};
|
pub use chrono::{DateTime, Utc};
|
||||||
use hex::decode;
|
|
||||||
pub use num::BigInt;
|
pub use num::BigInt;
|
||||||
pub use ordered_float::OrderedFloat;
|
pub use ordered_float::OrderedFloat;
|
||||||
pub use uuid::Uuid;
|
pub use uuid::Uuid;
|
||||||
|
|
||||||
// Export from our modules.
|
// Export from our modules.
|
||||||
pub use crate::types::{
|
pub use types::{
|
||||||
FromMicros, FromMillis, Span, SpannedValue, ToMicros, ToMillis, Value, ValueAndSpan,
|
FromMicros, FromMillis, Span, SpannedValue, ToMicros, ToMillis, Value, ValueAndSpan,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use crate::symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
pub use symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
||||||
|
|
||||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||||
use std::f64::{INFINITY, NAN, NEG_INFINITY};
|
use std::f64::{INFINITY, NAN, NEG_INFINITY};
|
||||||
|
@ -60,8 +56,8 @@ use std::iter::FromIterator;
|
||||||
|
|
||||||
use chrono::TimeZone;
|
use chrono::TimeZone;
|
||||||
|
|
||||||
use crate::entities::*;
|
use entities::*;
|
||||||
use crate::query::FromValue;
|
use query::FromValue;
|
||||||
|
|
||||||
// Goal: Be able to parse https://github.com/edn-format/edn
|
// Goal: Be able to parse https://github.com/edn-format/edn
|
||||||
// Also extensible to help parse http://docs.datomic.com/query.html
|
// Also extensible to help parse http://docs.datomic.com/query.html
|
||||||
|
@ -72,7 +68,7 @@ use crate::query::FromValue;
|
||||||
// TODO: Support tagged elements
|
// TODO: Support tagged elements
|
||||||
// TODO: Support discard
|
// TODO: Support discard
|
||||||
|
|
||||||
pub type ParseError = peg::error::ParseError<peg::str::LineCol>;
|
pub type ParseErrorKind = peg::error::ParseError<peg::str::LineCol>;
|
||||||
|
|
||||||
peg::parser!(pub grammar parse() for str {
|
peg::parser!(pub grammar parse() for str {
|
||||||
|
|
||||||
|
@ -128,7 +124,7 @@ peg::parser!(pub grammar parse() for str {
|
||||||
// result = r#""foo\\bar""#
|
// result = r#""foo\\bar""#
|
||||||
// For the typical case, string_normal_chars will match multiple, leading to a single-element vec.
|
// For the typical case, string_normal_chars will match multiple, leading to a single-element vec.
|
||||||
pub rule raw_text() -> String = "\"" t:((string_special_char() / string_normal_chars())*) "\""
|
pub rule raw_text() -> String = "\"" t:((string_special_char() / string_normal_chars())*) "\""
|
||||||
{ t.join("") }
|
{ t.join(&"").to_string() }
|
||||||
|
|
||||||
pub rule text() -> SpannedValue
|
pub rule text() -> SpannedValue
|
||||||
= v:raw_text() { SpannedValue::Text(v) }
|
= v:raw_text() { SpannedValue::Text(v) }
|
||||||
|
@ -152,17 +148,17 @@ peg::parser!(pub grammar parse() for str {
|
||||||
rule inst_micros() -> DateTime<Utc> =
|
rule inst_micros() -> DateTime<Utc> =
|
||||||
"#instmicros" whitespace()+ d:$( digit()+ ) {
|
"#instmicros" whitespace()+ d:$( digit()+ ) {
|
||||||
let micros = d.parse::<i64>().unwrap();
|
let micros = d.parse::<i64>().unwrap();
|
||||||
let seconds: i64 = micros / 1_000_000;
|
let seconds: i64 = micros / 1000000;
|
||||||
let nanos: u32 = ((micros % 1_000_000).unsigned_abs() as u32) * 1000;
|
let nanos: u32 = ((micros % 1000000).abs() as u32) * 1000;
|
||||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
Utc.timestamp(seconds, nanos)
|
||||||
}
|
}
|
||||||
|
|
||||||
rule inst_millis() -> DateTime<Utc> =
|
rule inst_millis() -> DateTime<Utc> =
|
||||||
"#instmillis" whitespace()+ d:$( digit()+ ) {
|
"#instmillis" whitespace()+ d:$( digit()+ ) {
|
||||||
let millis = d.parse::<i64>().unwrap();
|
let millis = d.parse::<i64>().unwrap();
|
||||||
let seconds: i64 = millis / 1000;
|
let seconds: i64 = millis / 1000;
|
||||||
let nanos: u32 = ((millis % 1000).unsigned_abs() as u32) * 1_000_000;
|
let nanos: u32 = ((millis % 1000).abs() as u32) * 1000000;
|
||||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
Utc.timestamp(seconds, nanos)
|
||||||
}
|
}
|
||||||
|
|
||||||
rule inst() -> SpannedValue = t:(inst_millis() / inst_micros() / inst_string())
|
rule inst() -> SpannedValue = t:(inst_millis() / inst_micros() / inst_string())
|
||||||
|
@ -176,14 +172,6 @@ peg::parser!(pub grammar parse() for str {
|
||||||
pub rule uuid() -> SpannedValue = "#uuid" whitespace()+ u:uuid_string()
|
pub rule uuid() -> SpannedValue = "#uuid" whitespace()+ u:uuid_string()
|
||||||
{ SpannedValue::Uuid(u) }
|
{ SpannedValue::Uuid(u) }
|
||||||
|
|
||||||
rule byte_buffer() -> Bytes =
|
|
||||||
u:$( hex()+ ) {
|
|
||||||
let b = decode(u).expect("this is a valid hex byte string");
|
|
||||||
Bytes::copy_from_slice(&b)
|
|
||||||
}
|
|
||||||
pub rule bytes() -> SpannedValue = "#bytes" whitespace()+ u:byte_buffer()
|
|
||||||
{ SpannedValue::Bytes(u) }
|
|
||||||
|
|
||||||
rule namespace_divider() = "."
|
rule namespace_divider() = "."
|
||||||
rule namespace_separator() = "/"
|
rule namespace_separator() = "/"
|
||||||
|
|
||||||
|
@ -231,7 +219,7 @@ peg::parser!(pub grammar parse() for str {
|
||||||
|
|
||||||
// Note: It's important that float comes before integer or the parser assumes that floats are integers and fails to parse.
|
// Note: It's important that float comes before integer or the parser assumes that floats are integers and fails to parse.
|
||||||
pub rule value() -> ValueAndSpan =
|
pub rule value() -> ValueAndSpan =
|
||||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / bytes() / text() / keyword() / symbol() / list() / vector() / map() / set() ) end:position!() __ {
|
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / text() / keyword() / symbol() / list() / vector() / map() / set()) end:position!() __ {
|
||||||
ValueAndSpan {
|
ValueAndSpan {
|
||||||
inner: v,
|
inner: v,
|
||||||
span: Span::new(start, end)
|
span: Span::new(start, end)
|
||||||
|
@ -323,7 +311,7 @@ peg::parser!(pub grammar parse() for str {
|
||||||
/ __ v:atom() __ { ValuePlace::Atom(v) }
|
/ __ v:atom() __ { ValuePlace::Atom(v) }
|
||||||
|
|
||||||
pub rule entity() -> Entity<ValueAndSpan>
|
pub rule entity() -> Entity<ValueAndSpan>
|
||||||
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e, a: AttributePlace::Entid(a), v } }
|
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e: e, a: AttributePlace::Entid(a), v: v } }
|
||||||
/ __ "[" __ op:(op()) __ e:(value_place()) __ a:(backward_entid()) __ v:(entity_place()) __ "]" __ { Entity::AddOrRetract { op, e: v, a: AttributePlace::Entid(a), v: e } }
|
/ __ "[" __ op:(op()) __ e:(value_place()) __ a:(backward_entid()) __ v:(entity_place()) __ "]" __ { Entity::AddOrRetract { op, e: v, a: AttributePlace::Entid(a), v: e } }
|
||||||
/ __ map:map_notation() __ { Entity::MapNotation(map) }
|
/ __ map:map_notation() __ { Entity::MapNotation(map) }
|
||||||
/ expected!("entity")
|
/ expected!("entity")
|
||||||
|
@ -361,11 +349,11 @@ peg::parser!(pub grammar parse() for str {
|
||||||
= __ "*" __ { query::PullAttributeSpec::Wildcard }
|
= __ "*" __ { query::PullAttributeSpec::Wildcard }
|
||||||
/ __ k:raw_forward_namespaced_keyword() __ alias:(":as" __ alias:raw_forward_keyword() __ { alias })? {
|
/ __ k:raw_forward_namespaced_keyword() __ alias:(":as" __ alias:raw_forward_keyword() __ { alias })? {
|
||||||
let attribute = query::PullConcreteAttribute::Ident(::std::rc::Rc::new(k));
|
let attribute = query::PullConcreteAttribute::Ident(::std::rc::Rc::new(k));
|
||||||
let alias = alias.map(::std::rc::Rc::new);
|
let alias = alias.map(|alias| ::std::rc::Rc::new(alias));
|
||||||
query::PullAttributeSpec::Attribute(
|
query::PullAttributeSpec::Attribute(
|
||||||
query::NamedPullAttribute {
|
query::NamedPullAttribute {
|
||||||
attribute,
|
attribute,
|
||||||
alias,
|
alias: alias,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -482,7 +470,7 @@ peg::parser!(pub grammar parse() for str {
|
||||||
query::WhereClause::Pred(
|
query::WhereClause::Pred(
|
||||||
query::Predicate {
|
query::Predicate {
|
||||||
operator: func.0,
|
operator: func.0,
|
||||||
args,
|
args: args,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -491,7 +479,7 @@ peg::parser!(pub grammar parse() for str {
|
||||||
query::WhereClause::WhereFn(
|
query::WhereClause::WhereFn(
|
||||||
query::WhereFn {
|
query::WhereFn {
|
||||||
operator: func.0,
|
operator: func.0,
|
||||||
args,
|
args: args,
|
||||||
binding,
|
binding,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ use itertools::diff_with;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::symbols;
|
use symbols;
|
||||||
use crate::types::Value;
|
use types::Value;
|
||||||
|
|
||||||
/// A trait defining pattern matching rules for any given pattern of type `T`.
|
/// A trait defining pattern matching rules for any given pattern of type `T`.
|
||||||
trait PatternMatchingRules<'a, T> {
|
trait PatternMatchingRules<'a, T> {
|
||||||
|
@ -87,7 +87,7 @@ impl<'a> Matcher<'a> {
|
||||||
where
|
where
|
||||||
T: PatternMatchingRules<'a, Value>,
|
T: PatternMatchingRules<'a, Value>,
|
||||||
{
|
{
|
||||||
use crate::Value::*;
|
use Value::*;
|
||||||
|
|
||||||
if T::matches_any(pattern) {
|
if T::matches_any(pattern) {
|
||||||
true
|
true
|
||||||
|
@ -140,7 +140,7 @@ impl Value {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::parse;
|
use parse;
|
||||||
|
|
||||||
macro_rules! assert_match {
|
macro_rules! assert_match {
|
||||||
( $pattern:tt, $value:tt, $expected:expr ) => {
|
( $pattern:tt, $value:tt, $expected:expr ) => {
|
||||||
|
|
|
@ -85,7 +85,7 @@ impl NamespaceableName {
|
||||||
|
|
||||||
NamespaceableName {
|
NamespaceableName {
|
||||||
components: dest,
|
components: dest,
|
||||||
boundary,
|
boundary: boundary,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ impl NamespaceableName {
|
||||||
if name.starts_with('_') {
|
if name.starts_with('_') {
|
||||||
Self::new(self.namespace(), &name[1..])
|
Self::new(self.namespace(), &name[1..])
|
||||||
} else {
|
} else {
|
||||||
Self::new(self.namespace(), format!("_{}", name))
|
Self::new(self.namespace(), &format!("_{}", name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ impl NamespaceableName {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn components(&self) -> (&str, &str) {
|
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||||
if self.boundary > 0 {
|
if self.boundary > 0 {
|
||||||
(
|
(
|
||||||
&self.components[0..self.boundary],
|
&self.components[0..self.boundary],
|
||||||
|
@ -205,8 +205,8 @@ impl fmt::Display for NamespaceableName {
|
||||||
// friendly and automatic (e.g. `derive`d), and just pass all work off to it in our custom
|
// friendly and automatic (e.g. `derive`d), and just pass all work off to it in our custom
|
||||||
// implementation of Serialize and Deserialize.
|
// implementation of Serialize and Deserialize.
|
||||||
#[cfg(feature = "serde_support")]
|
#[cfg(feature = "serde_support")]
|
||||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
|
||||||
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
||||||
|
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||||
struct SerializedNamespaceableName<'a> {
|
struct SerializedNamespaceableName<'a> {
|
||||||
namespace: Option<&'a str>,
|
namespace: Option<&'a str>,
|
||||||
name: &'a str,
|
name: &'a str,
|
||||||
|
@ -219,11 +219,11 @@ impl<'de> Deserialize<'de> for NamespaceableName {
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
{
|
{
|
||||||
let separated = SerializedNamespaceableName::deserialize(deserializer)?;
|
let separated = SerializedNamespaceableName::deserialize(deserializer)?;
|
||||||
if separated.name.is_empty() {
|
if separated.name.len() == 0 {
|
||||||
return Err(de::Error::custom("Empty name in keyword or symbol"));
|
return Err(de::Error::custom("Empty name in keyword or symbol"));
|
||||||
}
|
}
|
||||||
if let Some(ns) = separated.namespace {
|
if let Some(ns) = separated.namespace {
|
||||||
if ns.is_empty() {
|
if ns.len() == 0 {
|
||||||
Err(de::Error::custom(
|
Err(de::Error::custom(
|
||||||
"Empty but present namespace in keyword or symbol",
|
"Empty but present namespace in keyword or symbol",
|
||||||
))
|
))
|
||||||
|
@ -309,6 +309,17 @@ mod test {
|
||||||
|
|
||||||
arr.sort();
|
arr.sort();
|
||||||
|
|
||||||
assert_eq!(arr, [n0, n2, n1, n3, n4, n5, n6,]);
|
assert_eq!(
|
||||||
|
arr,
|
||||||
|
[
|
||||||
|
n0.clone(),
|
||||||
|
n2.clone(),
|
||||||
|
n1.clone(),
|
||||||
|
n3.clone(),
|
||||||
|
n4.clone(),
|
||||||
|
n5.clone(),
|
||||||
|
n6.clone(),
|
||||||
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ use pretty;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use crate::types::Value;
|
use types::Value;
|
||||||
|
|
||||||
impl Value {
|
impl Value {
|
||||||
/// Return a pretty string representation of this `Value`.
|
/// Return a pretty string representation of this `Value`.
|
||||||
|
@ -57,11 +57,10 @@ impl Value {
|
||||||
{
|
{
|
||||||
let open = open.into();
|
let open = open.into();
|
||||||
let n = open.len() as isize;
|
let n = open.len() as isize;
|
||||||
let i = {
|
let i = vs
|
||||||
let this = vs.into_iter().map(|v| v.as_doc(allocator));
|
.into_iter()
|
||||||
let element = allocator.line();
|
.map(|v| v.as_doc(allocator))
|
||||||
Itertools::intersperse(this, element)
|
.intersperse(allocator.line());
|
||||||
};
|
|
||||||
allocator
|
allocator
|
||||||
.text(open)
|
.text(open)
|
||||||
.append(allocator.concat(i).nest(n))
|
.append(allocator.concat(i).nest(n))
|
||||||
|
@ -82,14 +81,11 @@ impl Value {
|
||||||
Value::List(ref vs) => self.bracket(pp, "(", vs, ")"),
|
Value::List(ref vs) => self.bracket(pp, "(", vs, ")"),
|
||||||
Value::Set(ref vs) => self.bracket(pp, "#{", vs, "}"),
|
Value::Set(ref vs) => self.bracket(pp, "#{", vs, "}"),
|
||||||
Value::Map(ref vs) => {
|
Value::Map(ref vs) => {
|
||||||
let xs = {
|
let xs = vs
|
||||||
let this = vs
|
.iter()
|
||||||
.iter()
|
.rev()
|
||||||
.rev()
|
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group())
|
||||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group());
|
.intersperse(pp.line());
|
||||||
let element = pp.line();
|
|
||||||
Itertools::intersperse(this, element)
|
|
||||||
};
|
|
||||||
pp.text("{")
|
pp.text("{")
|
||||||
.append(pp.concat(xs).nest(1))
|
.append(pp.concat(xs).nest(1))
|
||||||
.append(pp.text("}"))
|
.append(pp.text("}"))
|
||||||
|
@ -101,7 +97,7 @@ impl Value {
|
||||||
Value::Text(ref v) => pp.text("\"").append(v.as_str()).append("\""),
|
Value::Text(ref v) => pp.text("\"").append(v.as_str()).append("\""),
|
||||||
Value::Uuid(ref u) => pp
|
Value::Uuid(ref u) => pp
|
||||||
.text("#uuid \"")
|
.text("#uuid \"")
|
||||||
.append(u.hyphenated().to_string())
|
.append(u.to_hyphenated().to_string())
|
||||||
.append("\""),
|
.append("\""),
|
||||||
Value::Instant(ref v) => pp
|
Value::Instant(ref v) => pp
|
||||||
.text("#inst \"")
|
.text("#inst \"")
|
||||||
|
@ -114,7 +110,7 @@ impl Value {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::parse;
|
use parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_pp_io() {
|
fn test_pp_io() {
|
||||||
|
|
239
edn/src/query.rs
239
edn/src/query.rs
|
@ -35,11 +35,11 @@ use std;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::{BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
use {BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
||||||
|
|
||||||
use crate::value_rc::{FromRc, ValueRc};
|
use value_rc::{FromRc, ValueRc};
|
||||||
|
|
||||||
pub use crate::{Keyword, PlainSymbol};
|
pub use {Keyword, PlainSymbol};
|
||||||
|
|
||||||
pub type SrcVarName = String; // Do not include the required syntactic '$'.
|
pub type SrcVarName = String; // Do not include the required syntactic '$'.
|
||||||
|
|
||||||
|
@ -51,6 +51,10 @@ impl Variable {
|
||||||
self.0.as_ref().0.as_str()
|
self.0.as_ref().0.as_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_string(&self) -> String {
|
||||||
|
self.0.as_ref().0.clone()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> PlainSymbol {
|
pub fn name(&self) -> PlainSymbol {
|
||||||
self.0.as_ref().clone()
|
self.0.as_ref().clone()
|
||||||
}
|
}
|
||||||
|
@ -64,15 +68,15 @@ impl Variable {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FromValue<T> {
|
pub trait FromValue<T> {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<T>;
|
fn from_value(v: &::ValueAndSpan) -> Option<T>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the provided EDN value is a PlainSymbol beginning with '?', return
|
/// If the provided EDN value is a PlainSymbol beginning with '?', return
|
||||||
/// it wrapped in a Variable. If not, return None.
|
/// it wrapped in a Variable. If not, return None.
|
||||||
/// TODO: intern strings. #398.
|
/// TODO: intern strings. #398.
|
||||||
impl FromValue<Variable> for Variable {
|
impl FromValue<Variable> for Variable {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<Variable> {
|
fn from_value(v: &::ValueAndSpan) -> Option<Variable> {
|
||||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||||
Variable::from_symbol(s)
|
Variable::from_symbol(s)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -83,7 +87,7 @@ impl FromValue<Variable> for Variable {
|
||||||
impl Variable {
|
impl Variable {
|
||||||
pub fn from_rc(sym: Rc<PlainSymbol>) -> Option<Variable> {
|
pub fn from_rc(sym: Rc<PlainSymbol>) -> Option<Variable> {
|
||||||
if sym.is_var_symbol() {
|
if sym.is_var_symbol() {
|
||||||
Some(Variable(sym))
|
Some(Variable(sym.clone()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -115,8 +119,8 @@ impl std::fmt::Display for Variable {
|
||||||
pub struct QueryFunction(pub PlainSymbol);
|
pub struct QueryFunction(pub PlainSymbol);
|
||||||
|
|
||||||
impl FromValue<QueryFunction> for QueryFunction {
|
impl FromValue<QueryFunction> for QueryFunction {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<QueryFunction> {
|
fn from_value(v: &::ValueAndSpan) -> Option<QueryFunction> {
|
||||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||||
QueryFunction::from_symbol(s)
|
QueryFunction::from_symbol(s)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -154,8 +158,8 @@ pub enum SrcVar {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromValue<SrcVar> for SrcVar {
|
impl FromValue<SrcVar> for SrcVar {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<SrcVar> {
|
fn from_value(v: &::ValueAndSpan) -> Option<SrcVar> {
|
||||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||||
SrcVar::from_symbol(s)
|
SrcVar::from_symbol(s)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -213,8 +217,8 @@ pub enum FnArg {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromValue<FnArg> for FnArg {
|
impl FromValue<FnArg> for FnArg {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<FnArg> {
|
fn from_value(v: &::ValueAndSpan) -> Option<FnArg> {
|
||||||
use crate::SpannedValue::*;
|
use SpannedValue::*;
|
||||||
match v.inner {
|
match v.inner {
|
||||||
Integer(x) => Some(FnArg::EntidOrInteger(x)),
|
Integer(x) => Some(FnArg::EntidOrInteger(x)),
|
||||||
PlainSymbol(ref x) if x.is_src_symbol() => SrcVar::from_symbol(x).map(FnArg::SrcVar),
|
PlainSymbol(ref x) if x.is_src_symbol() => SrcVar::from_symbol(x).map(FnArg::SrcVar),
|
||||||
|
@ -233,7 +237,7 @@ impl FromValue<FnArg> for FnArg {
|
||||||
{
|
{
|
||||||
Some(FnArg::Constant(x.clone().into()))
|
Some(FnArg::Constant(x.clone().into()))
|
||||||
}
|
}
|
||||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) | Bytes(_) => None,
|
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,18 +246,18 @@ impl FromValue<FnArg> for FnArg {
|
||||||
impl std::fmt::Display for FnArg {
|
impl std::fmt::Display for FnArg {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FnArg::Variable(ref var) => write!(f, "{}", var),
|
&FnArg::Variable(ref var) => write!(f, "{}", var),
|
||||||
FnArg::SrcVar(ref var) => {
|
&FnArg::SrcVar(ref var) => {
|
||||||
if var == &SrcVar::DefaultSrc {
|
if var == &SrcVar::DefaultSrc {
|
||||||
write!(f, "$")
|
write!(f, "$")
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{:?}", var)
|
write!(f, "{:?}", var)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FnArg::EntidOrInteger(entid) => write!(f, "{}", entid),
|
&FnArg::EntidOrInteger(entid) => write!(f, "{}", entid),
|
||||||
FnArg::IdentOrKeyword(ref kw) => write!(f, "{}", kw),
|
&FnArg::IdentOrKeyword(ref kw) => write!(f, "{}", kw),
|
||||||
FnArg::Constant(ref constant) => write!(f, "{:?}", constant),
|
&FnArg::Constant(ref constant) => write!(f, "{:?}", constant),
|
||||||
FnArg::Vector(ref vec) => write!(f, "{:?}", vec),
|
&FnArg::Vector(ref vec) => write!(f, "{:?}", vec),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -261,7 +265,7 @@ impl std::fmt::Display for FnArg {
|
||||||
impl FnArg {
|
impl FnArg {
|
||||||
pub fn as_variable(&self) -> Option<&Variable> {
|
pub fn as_variable(&self) -> Option<&Variable> {
|
||||||
match self {
|
match self {
|
||||||
FnArg::Variable(ref v) => Some(v),
|
&FnArg::Variable(ref v) => Some(v),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -316,25 +320,27 @@ impl PatternNonValuePlace {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromValue<PatternNonValuePlace> for PatternNonValuePlace {
|
impl FromValue<PatternNonValuePlace> for PatternNonValuePlace {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
fn from_value(v: &::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
||||||
match v.inner {
|
match v.inner {
|
||||||
crate::SpannedValue::Integer(x) => {
|
::SpannedValue::Integer(x) => {
|
||||||
if x >= 0 {
|
if x >= 0 {
|
||||||
Some(PatternNonValuePlace::Entid(x))
|
Some(PatternNonValuePlace::Entid(x))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
::SpannedValue::PlainSymbol(ref x) => {
|
||||||
if x.0.as_str() == "_" {
|
if x.0.as_str() == "_" {
|
||||||
Some(PatternNonValuePlace::Placeholder)
|
Some(PatternNonValuePlace::Placeholder)
|
||||||
} else if let Some(v) = Variable::from_symbol(x) {
|
|
||||||
Some(PatternNonValuePlace::Variable(v))
|
|
||||||
} else {
|
} else {
|
||||||
None
|
if let Some(v) = Variable::from_symbol(x) {
|
||||||
|
Some(PatternNonValuePlace::Variable(v))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -371,46 +377,45 @@ impl From<Keyword> for PatternValuePlace {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromValue<PatternValuePlace> for PatternValuePlace {
|
impl FromValue<PatternValuePlace> for PatternValuePlace {
|
||||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternValuePlace> {
|
fn from_value(v: &::ValueAndSpan) -> Option<PatternValuePlace> {
|
||||||
match v.inner {
|
match v.inner {
|
||||||
crate::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
||||||
crate::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
||||||
Some(PatternValuePlace::Placeholder)
|
Some(PatternValuePlace::Placeholder)
|
||||||
}
|
}
|
||||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
::SpannedValue::PlainSymbol(ref x) => {
|
||||||
Variable::from_symbol(x).map(PatternValuePlace::Variable)
|
Variable::from_symbol(x).map(PatternValuePlace::Variable)
|
||||||
}
|
}
|
||||||
crate::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
||||||
crate::SpannedValue::Boolean(x) => {
|
::SpannedValue::Boolean(x) => {
|
||||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Boolean(x)))
|
Some(PatternValuePlace::Constant(NonIntegerConstant::Boolean(x)))
|
||||||
}
|
}
|
||||||
crate::SpannedValue::Float(x) => {
|
::SpannedValue::Float(x) => {
|
||||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Float(x)))
|
Some(PatternValuePlace::Constant(NonIntegerConstant::Float(x)))
|
||||||
}
|
}
|
||||||
crate::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
||||||
NonIntegerConstant::BigInteger(x.clone()),
|
NonIntegerConstant::BigInteger(x.clone()),
|
||||||
)),
|
)),
|
||||||
crate::SpannedValue::Instant(x) => {
|
::SpannedValue::Instant(x) => {
|
||||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Instant(x)))
|
Some(PatternValuePlace::Constant(NonIntegerConstant::Instant(x)))
|
||||||
}
|
}
|
||||||
crate::SpannedValue::Text(ref x) =>
|
::SpannedValue::Text(ref x) =>
|
||||||
// TODO: intern strings. #398.
|
// TODO: intern strings. #398.
|
||||||
{
|
{
|
||||||
Some(PatternValuePlace::Constant(x.clone().into()))
|
Some(PatternValuePlace::Constant(x.clone().into()))
|
||||||
}
|
}
|
||||||
crate::SpannedValue::Uuid(ref u) => {
|
::SpannedValue::Uuid(ref u) => Some(PatternValuePlace::Constant(
|
||||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Uuid(*u)))
|
NonIntegerConstant::Uuid(u.clone()),
|
||||||
}
|
)),
|
||||||
|
|
||||||
// These don't appear in queries.
|
// These don't appear in queries.
|
||||||
crate::SpannedValue::Nil => None,
|
::SpannedValue::Nil => None,
|
||||||
crate::SpannedValue::NamespacedSymbol(_) => None,
|
::SpannedValue::NamespacedSymbol(_) => None,
|
||||||
crate::SpannedValue::Keyword(_) => None, // … yet.
|
::SpannedValue::Keyword(_) => None, // … yet.
|
||||||
crate::SpannedValue::Map(_) => None,
|
::SpannedValue::Map(_) => None,
|
||||||
crate::SpannedValue::List(_) => None,
|
::SpannedValue::List(_) => None,
|
||||||
crate::SpannedValue::Set(_) => None,
|
::SpannedValue::Set(_) => None,
|
||||||
crate::SpannedValue::Vector(_) => None,
|
::SpannedValue::Vector(_) => None,
|
||||||
crate::SpannedValue::Bytes(_) => None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -493,15 +498,15 @@ pub enum PullAttributeSpec {
|
||||||
impl std::fmt::Display for PullConcreteAttribute {
|
impl std::fmt::Display for PullConcreteAttribute {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
PullConcreteAttribute::Ident(ref k) => write!(f, "{}", k),
|
&PullConcreteAttribute::Ident(ref k) => write!(f, "{}", k),
|
||||||
PullConcreteAttribute::Entid(i) => write!(f, "{}", i),
|
&PullConcreteAttribute::Entid(i) => write!(f, "{}", i),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for NamedPullAttribute {
|
impl std::fmt::Display for NamedPullAttribute {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
if let Some(ref alias) = self.alias {
|
if let &Some(ref alias) = &self.alias {
|
||||||
write!(f, "{} :as {}", self.attribute, alias)
|
write!(f, "{} :as {}", self.attribute, alias)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}", self.attribute)
|
write!(f, "{}", self.attribute)
|
||||||
|
@ -512,8 +517,8 @@ impl std::fmt::Display for NamedPullAttribute {
|
||||||
impl std::fmt::Display for PullAttributeSpec {
|
impl std::fmt::Display for PullAttributeSpec {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
PullAttributeSpec::Wildcard => write!(f, "*"),
|
&PullAttributeSpec::Wildcard => write!(f, "*"),
|
||||||
PullAttributeSpec::Attribute(ref attr) => write!(f, "{}", attr),
|
&PullAttributeSpec::Attribute(ref attr) => write!(f, "{}", attr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -548,10 +553,10 @@ impl Element {
|
||||||
/// Returns true if the element must yield only one value.
|
/// Returns true if the element must yield only one value.
|
||||||
pub fn is_unit(&self) -> bool {
|
pub fn is_unit(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Element::Variable(_) => false,
|
&Element::Variable(_) => false,
|
||||||
Element::Pull(_) => false,
|
&Element::Pull(_) => false,
|
||||||
Element::Aggregate(_) => true,
|
&Element::Aggregate(_) => true,
|
||||||
Element::Corresponding(_) => true,
|
&Element::Corresponding(_) => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -565,8 +570,8 @@ impl From<Variable> for Element {
|
||||||
impl std::fmt::Display for Element {
|
impl std::fmt::Display for Element {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Element::Variable(ref var) => write!(f, "{}", var),
|
&Element::Variable(ref var) => write!(f, "{}", var),
|
||||||
Element::Pull(Pull {
|
&Element::Pull(Pull {
|
||||||
ref var,
|
ref var,
|
||||||
ref patterns,
|
ref patterns,
|
||||||
}) => {
|
}) => {
|
||||||
|
@ -576,12 +581,12 @@ impl std::fmt::Display for Element {
|
||||||
}
|
}
|
||||||
write!(f, "])")
|
write!(f, "])")
|
||||||
}
|
}
|
||||||
Element::Aggregate(ref agg) => match agg.args.len() {
|
&Element::Aggregate(ref agg) => match agg.args.len() {
|
||||||
0 => write!(f, "({})", agg.func),
|
0 => write!(f, "({})", agg.func),
|
||||||
1 => write!(f, "({} {})", agg.func, agg.args[0]),
|
1 => write!(f, "({} {})", agg.func, agg.args[0]),
|
||||||
_ => write!(f, "({} {:?})", agg.func, agg.args),
|
_ => write!(f, "({} {:?})", agg.func, agg.args),
|
||||||
},
|
},
|
||||||
Element::Corresponding(ref var) => write!(f, "(the {})", var),
|
&Element::Corresponding(ref var) => write!(f, "(the {})", var),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -604,15 +609,20 @@ pub enum Limit {
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
/// # use edn::query::{Element, FindSpec, Variable};
|
/// # use edn::query::{Element, FindSpec, Variable};
|
||||||
/// let elements = vec![
|
|
||||||
/// Element::Variable(Variable::from_valid_name("?foo")),
|
|
||||||
/// Element::Variable(Variable::from_valid_name("?bar")),
|
|
||||||
/// ];
|
|
||||||
/// let rel = FindSpec::FindRel(elements);
|
|
||||||
///
|
///
|
||||||
/// if let FindSpec::FindRel(elements) = rel {
|
/// # fn main() {
|
||||||
/// assert_eq!(2, elements.len());
|
///
|
||||||
/// }
|
/// let elements = vec![
|
||||||
|
/// Element::Variable(Variable::from_valid_name("?foo")),
|
||||||
|
/// Element::Variable(Variable::from_valid_name("?bar")),
|
||||||
|
/// ];
|
||||||
|
/// let rel = FindSpec::FindRel(elements);
|
||||||
|
///
|
||||||
|
/// if let FindSpec::FindRel(elements) = rel {
|
||||||
|
/// assert_eq!(2, elements.len());
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// # }
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
@ -639,19 +649,19 @@ impl FindSpec {
|
||||||
pub fn is_unit_limited(&self) -> bool {
|
pub fn is_unit_limited(&self) -> bool {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
match self {
|
match self {
|
||||||
FindScalar(..) => true,
|
&FindScalar(..) => true,
|
||||||
FindTuple(..) => true,
|
&FindTuple(..) => true,
|
||||||
FindRel(..) => false,
|
&FindRel(..) => false,
|
||||||
FindColl(..) => false,
|
&FindColl(..) => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expected_column_count(&self) -> usize {
|
pub fn expected_column_count(&self) -> usize {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
match self {
|
match self {
|
||||||
FindScalar(..) => 1,
|
&FindScalar(..) => 1,
|
||||||
FindColl(..) => 1,
|
&FindColl(..) => 1,
|
||||||
FindTuple(ref elems) | &FindRel(ref elems) => elems.len(),
|
&FindTuple(ref elems) | &FindRel(ref elems) => elems.len(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -680,10 +690,10 @@ impl FindSpec {
|
||||||
pub fn columns<'s>(&'s self) -> Box<dyn Iterator<Item = &Element> + 's> {
|
pub fn columns<'s>(&'s self) -> Box<dyn Iterator<Item = &Element> + 's> {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
match self {
|
match self {
|
||||||
FindScalar(ref e) => Box::new(std::iter::once(e)),
|
&FindScalar(ref e) => Box::new(std::iter::once(e)),
|
||||||
FindColl(ref e) => Box::new(std::iter::once(e)),
|
&FindColl(ref e) => Box::new(std::iter::once(e)),
|
||||||
FindTuple(ref v) => Box::new(v.iter()),
|
&FindTuple(ref v) => Box::new(v.iter()),
|
||||||
FindRel(ref v) => Box::new(v.iter()),
|
&FindRel(ref v) => Box::new(v.iter()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -706,8 +716,8 @@ impl VariableOrPlaceholder {
|
||||||
|
|
||||||
pub fn var(&self) -> Option<&Variable> {
|
pub fn var(&self) -> Option<&Variable> {
|
||||||
match self {
|
match self {
|
||||||
VariableOrPlaceholder::Placeholder => None,
|
&VariableOrPlaceholder::Placeholder => None,
|
||||||
VariableOrPlaceholder::Variable(ref var) => Some(var),
|
&VariableOrPlaceholder::Variable(ref var) => Some(var),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -761,11 +771,11 @@ impl Binding {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn is_valid(&self) -> bool {
|
pub fn is_valid(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Binding::BindScalar(_) | &Binding::BindColl(_) => true,
|
&Binding::BindScalar(_) | &Binding::BindColl(_) => true,
|
||||||
Binding::BindRel(ref vars) | &Binding::BindTuple(ref vars) => {
|
&Binding::BindRel(ref vars) | &Binding::BindTuple(ref vars) => {
|
||||||
let mut acc = HashSet::<Variable>::new();
|
let mut acc = HashSet::<Variable>::new();
|
||||||
for var in vars {
|
for var in vars {
|
||||||
if let VariableOrPlaceholder::Variable(ref var) = *var {
|
if let &VariableOrPlaceholder::Variable(ref var) = var {
|
||||||
if !acc.insert(var.clone()) {
|
if !acc.insert(var.clone()) {
|
||||||
// It's invalid if there was an equal var already present in the set --
|
// It's invalid if there was an equal var already present in the set --
|
||||||
// i.e., we have a duplicate var.
|
// i.e., we have a duplicate var.
|
||||||
|
@ -822,7 +832,7 @@ impl Pattern {
|
||||||
entity: v_e,
|
entity: v_e,
|
||||||
attribute: k.to_reversed().into(),
|
attribute: k.to_reversed().into(),
|
||||||
value: e_v,
|
value: e_v,
|
||||||
tx,
|
tx: tx,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
return None;
|
return None;
|
||||||
|
@ -834,7 +844,7 @@ impl Pattern {
|
||||||
entity: e,
|
entity: e,
|
||||||
attribute: a,
|
attribute: a,
|
||||||
value: v,
|
value: v,
|
||||||
tx,
|
tx: tx,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -883,7 +893,10 @@ pub enum UnifyVars {
|
||||||
|
|
||||||
impl WhereClause {
|
impl WhereClause {
|
||||||
pub fn is_pattern(&self) -> bool {
|
pub fn is_pattern(&self) -> bool {
|
||||||
matches!(self, WhereClause::Pattern(_))
|
match self {
|
||||||
|
&WhereClause::Pattern(_) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -896,8 +909,8 @@ pub enum OrWhereClause {
|
||||||
impl OrWhereClause {
|
impl OrWhereClause {
|
||||||
pub fn is_pattern_or_patterns(&self) -> bool {
|
pub fn is_pattern_or_patterns(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
OrWhereClause::Clause(WhereClause::Pattern(_)) => true,
|
&OrWhereClause::Clause(WhereClause::Pattern(_)) => true,
|
||||||
OrWhereClause::And(ref clauses) => clauses.iter().all(|clause| clause.is_pattern()),
|
&OrWhereClause::And(ref clauses) => clauses.iter().all(|clause| clause.is_pattern()),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -921,8 +934,8 @@ pub struct NotJoin {
|
||||||
impl NotJoin {
|
impl NotJoin {
|
||||||
pub fn new(unify_vars: UnifyVars, clauses: Vec<WhereClause>) -> NotJoin {
|
pub fn new(unify_vars: UnifyVars, clauses: Vec<WhereClause>) -> NotJoin {
|
||||||
NotJoin {
|
NotJoin {
|
||||||
unify_vars,
|
unify_vars: unify_vars,
|
||||||
clauses,
|
clauses: clauses,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1028,8 +1041,8 @@ impl ParsedQuery {
|
||||||
Ok(ParsedQuery {
|
Ok(ParsedQuery {
|
||||||
find_spec: find_spec.ok_or("expected :find")?,
|
find_spec: find_spec.ok_or("expected :find")?,
|
||||||
default_source: SrcVar::DefaultSrc,
|
default_source: SrcVar::DefaultSrc,
|
||||||
with: with.unwrap_or_default(),
|
with: with.unwrap_or(vec![]),
|
||||||
in_vars: in_vars.unwrap_or_default(),
|
in_vars: in_vars.unwrap_or(vec![]),
|
||||||
in_sources: BTreeSet::default(),
|
in_sources: BTreeSet::default(),
|
||||||
limit: limit.unwrap_or(Limit::None),
|
limit: limit.unwrap_or(Limit::None),
|
||||||
where_clauses: where_clauses.ok_or("expected :where")?,
|
where_clauses: where_clauses.ok_or("expected :where")?,
|
||||||
|
@ -1041,8 +1054,8 @@ impl ParsedQuery {
|
||||||
impl OrJoin {
|
impl OrJoin {
|
||||||
pub fn new(unify_vars: UnifyVars, clauses: Vec<OrWhereClause>) -> OrJoin {
|
pub fn new(unify_vars: UnifyVars, clauses: Vec<OrWhereClause>) -> OrJoin {
|
||||||
OrJoin {
|
OrJoin {
|
||||||
unify_vars,
|
unify_vars: unify_vars,
|
||||||
clauses,
|
clauses: clauses,
|
||||||
mentioned_vars: None,
|
mentioned_vars: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1051,8 +1064,8 @@ impl OrJoin {
|
||||||
/// every variable mentioned inside the join is also mentioned in the `UnifyVars` list.
|
/// every variable mentioned inside the join is also mentioned in the `UnifyVars` list.
|
||||||
pub fn is_fully_unified(&self) -> bool {
|
pub fn is_fully_unified(&self) -> bool {
|
||||||
match &self.unify_vars {
|
match &self.unify_vars {
|
||||||
UnifyVars::Implicit => true,
|
&UnifyVars::Implicit => true,
|
||||||
UnifyVars::Explicit(ref vars) => {
|
&UnifyVars::Explicit(ref vars) => {
|
||||||
// We know that the join list must be a subset of the vars in the pattern, or
|
// We know that the join list must be a subset of the vars in the pattern, or
|
||||||
// it would have failed validation. That allows us to simply compare counts here.
|
// it would have failed validation. That allows us to simply compare counts here.
|
||||||
// TODO: in debug mode, do a full intersection, and verify that our count check
|
// TODO: in debug mode, do a full intersection, and verify that our count check
|
||||||
|
@ -1081,13 +1094,13 @@ impl ContainsVariables for WhereClause {
|
||||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||||
use self::WhereClause::*;
|
use self::WhereClause::*;
|
||||||
match self {
|
match self {
|
||||||
OrJoin(ref o) => o.accumulate_mentioned_variables(acc),
|
&OrJoin(ref o) => o.accumulate_mentioned_variables(acc),
|
||||||
Pred(ref p) => p.accumulate_mentioned_variables(acc),
|
&Pred(ref p) => p.accumulate_mentioned_variables(acc),
|
||||||
Pattern(ref p) => p.accumulate_mentioned_variables(acc),
|
&Pattern(ref p) => p.accumulate_mentioned_variables(acc),
|
||||||
NotJoin(ref n) => n.accumulate_mentioned_variables(acc),
|
&NotJoin(ref n) => n.accumulate_mentioned_variables(acc),
|
||||||
WhereFn(ref f) => f.accumulate_mentioned_variables(acc),
|
&WhereFn(ref f) => f.accumulate_mentioned_variables(acc),
|
||||||
TypeAnnotation(ref a) => a.accumulate_mentioned_variables(acc),
|
&TypeAnnotation(ref a) => a.accumulate_mentioned_variables(acc),
|
||||||
RuleExpr => (),
|
&RuleExpr => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1096,12 +1109,12 @@ impl ContainsVariables for OrWhereClause {
|
||||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||||
use self::OrWhereClause::*;
|
use self::OrWhereClause::*;
|
||||||
match self {
|
match self {
|
||||||
And(ref clauses) => {
|
&And(ref clauses) => {
|
||||||
for clause in clauses {
|
for clause in clauses {
|
||||||
clause.accumulate_mentioned_variables(acc)
|
clause.accumulate_mentioned_variables(acc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Clause(ref clause) => clause.accumulate_mentioned_variables(acc),
|
&Clause(ref clause) => clause.accumulate_mentioned_variables(acc),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1148,7 +1161,7 @@ impl ContainsVariables for NotJoin {
|
||||||
impl ContainsVariables for Predicate {
|
impl ContainsVariables for Predicate {
|
||||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||||
for arg in &self.args {
|
for arg in &self.args {
|
||||||
if let FnArg::Variable(ref v) = *arg {
|
if let &FnArg::Variable(ref v) = arg {
|
||||||
acc_ref(acc, v)
|
acc_ref(acc, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1164,10 +1177,10 @@ impl ContainsVariables for TypeAnnotation {
|
||||||
impl ContainsVariables for Binding {
|
impl ContainsVariables for Binding {
|
||||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||||
match self {
|
match self {
|
||||||
Binding::BindScalar(ref v) | &Binding::BindColl(ref v) => acc_ref(acc, v),
|
&Binding::BindScalar(ref v) | &Binding::BindColl(ref v) => acc_ref(acc, v),
|
||||||
Binding::BindRel(ref vs) | &Binding::BindTuple(ref vs) => {
|
&Binding::BindRel(ref vs) | &Binding::BindTuple(ref vs) => {
|
||||||
for v in vs {
|
for v in vs {
|
||||||
if let VariableOrPlaceholder::Variable(ref v) = *v {
|
if let &VariableOrPlaceholder::Variable(ref v) = v {
|
||||||
acc_ref(acc, v);
|
acc_ref(acc, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1179,7 +1192,7 @@ impl ContainsVariables for Binding {
|
||||||
impl ContainsVariables for WhereFn {
|
impl ContainsVariables for WhereFn {
|
||||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||||
for arg in &self.args {
|
for arg in &self.args {
|
||||||
if let FnArg::Variable(ref v) = *arg {
|
if let &FnArg::Variable(ref v) = arg {
|
||||||
acc_ref(acc, v)
|
acc_ref(acc, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use std::fmt::{Display, Formatter, Write};
|
use std::fmt::{Display, Formatter, Write};
|
||||||
|
|
||||||
use crate::namespaceable_name::NamespaceableName;
|
use namespaceable_name::NamespaceableName;
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! ns_keyword {
|
macro_rules! ns_keyword {
|
||||||
|
@ -130,7 +130,7 @@ impl NamespacedSymbol {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn components(&self) -> (&str, &str) {
|
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||||
self.0.components()
|
self.0.components()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -180,7 +180,7 @@ impl Keyword {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn components(&self) -> (&str, &str) {
|
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||||
self.0.components()
|
self.0.components()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||||
// specific language governing permissions and limitations under the License.
|
// specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
#![allow(redundant_semicolons)]
|
#![allow(redundant_semicolon)]
|
||||||
|
|
||||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||||
|
@ -25,10 +25,8 @@ use num::BigInt;
|
||||||
use ordered_float::OrderedFloat;
|
use ordered_float::OrderedFloat;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::symbols;
|
use symbols;
|
||||||
|
|
||||||
use bytes::Bytes;
|
|
||||||
use hex::encode;
|
|
||||||
/// Value represents one of the allowed values in an EDN string.
|
/// Value represents one of the allowed values in an EDN string.
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
|
@ -54,7 +52,6 @@ pub enum Value {
|
||||||
// See https://internals.rust-lang.org/t/implementing-hash-for-hashset-hashmap/3817/1
|
// See https://internals.rust-lang.org/t/implementing-hash-for-hashset-hashmap/3817/1
|
||||||
Set(BTreeSet<Value>),
|
Set(BTreeSet<Value>),
|
||||||
Map(BTreeMap<Value, Value>),
|
Map(BTreeMap<Value, Value>),
|
||||||
Bytes(Bytes),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `SpannedValue` is the parallel to `Value` but used in `ValueAndSpan`.
|
/// `SpannedValue` is the parallel to `Value` but used in `ValueAndSpan`.
|
||||||
|
@ -76,7 +73,6 @@ pub enum SpannedValue {
|
||||||
List(LinkedList<ValueAndSpan>),
|
List(LinkedList<ValueAndSpan>),
|
||||||
Set(BTreeSet<ValueAndSpan>),
|
Set(BTreeSet<ValueAndSpan>),
|
||||||
Map(BTreeMap<ValueAndSpan, ValueAndSpan>),
|
Map(BTreeMap<ValueAndSpan, ValueAndSpan>),
|
||||||
Bytes(Bytes),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Span represents the current offset (start, end) into the input string.
|
/// Span represents the current offset (start, end) into the input string.
|
||||||
|
@ -143,7 +139,7 @@ impl Value {
|
||||||
/// But right now, it's used in the bootstrapper. We'll fix that soon.
|
/// But right now, it's used in the bootstrapper. We'll fix that soon.
|
||||||
pub fn with_spans(self) -> ValueAndSpan {
|
pub fn with_spans(self) -> ValueAndSpan {
|
||||||
let s = self.to_pretty(120).unwrap();
|
let s = self.to_pretty(120).unwrap();
|
||||||
use crate::parse;
|
use parse;
|
||||||
let with_spans = parse::value(&s).unwrap();
|
let with_spans = parse::value(&s).unwrap();
|
||||||
assert_eq!(self, with_spans.clone().without_spans());
|
assert_eq!(self, with_spans.clone().without_spans());
|
||||||
with_spans
|
with_spans
|
||||||
|
@ -176,7 +172,6 @@ impl From<SpannedValue> for Value {
|
||||||
.map(|(x, y)| (x.without_spans(), y.without_spans()))
|
.map(|(x, y)| (x.without_spans(), y.without_spans()))
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
SpannedValue::Bytes(b) => Value::Bytes(b),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -214,9 +209,9 @@ macro_rules! def_from_option {
|
||||||
macro_rules! def_is {
|
macro_rules! def_is {
|
||||||
($name: ident, $pat: pat) => {
|
($name: ident, $pat: pat) => {
|
||||||
pub fn $name(&self) -> bool {
|
pub fn $name(&self) -> bool {
|
||||||
matches!(*self, $pat)
|
match *self { $pat => true, _ => false }
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates `as_$TYPE` helper functions for Value or SpannedValue, like
|
/// Creates `as_$TYPE` helper functions for Value or SpannedValue, like
|
||||||
|
@ -236,12 +231,9 @@ macro_rules! def_as {
|
||||||
macro_rules! def_as_ref {
|
macro_rules! def_as_ref {
|
||||||
($name: ident, $kind: path, $t: ty) => {
|
($name: ident, $kind: path, $t: ty) => {
|
||||||
pub fn $name(&self) -> Option<&$t> {
|
pub fn $name(&self) -> Option<&$t> {
|
||||||
match *self {
|
match *self { $kind(ref v) => Some(v), _ => None }
|
||||||
$kind(ref v) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates `into_$TYPE` helper functions for Value or SpannedValue, like
|
/// Creates `into_$TYPE` helper functions for Value or SpannedValue, like
|
||||||
|
@ -333,18 +325,17 @@ macro_rules! def_common_value_methods {
|
||||||
def_is!(is_list, $t::List(_));
|
def_is!(is_list, $t::List(_));
|
||||||
def_is!(is_set, $t::Set(_));
|
def_is!(is_set, $t::Set(_));
|
||||||
def_is!(is_map, $t::Map(_));
|
def_is!(is_map, $t::Map(_));
|
||||||
def_is!(is_bytes, $t::Bytes(_));
|
|
||||||
|
|
||||||
pub fn is_keyword(&self) -> bool {
|
pub fn is_keyword(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
$t::Keyword(ref k) => !k.is_namespaced(),
|
&$t::Keyword(ref k) => !k.is_namespaced(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_namespaced_keyword(&self) -> bool {
|
pub fn is_namespaced_keyword(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
$t::Keyword(ref k) => k.is_namespaced(),
|
&$t::Keyword(ref k) => k.is_namespaced(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -366,25 +357,24 @@ macro_rules! def_common_value_methods {
|
||||||
def_as_ref!(as_uuid, $t::Uuid, Uuid);
|
def_as_ref!(as_uuid, $t::Uuid, Uuid);
|
||||||
def_as_ref!(as_symbol, $t::PlainSymbol, symbols::PlainSymbol);
|
def_as_ref!(as_symbol, $t::PlainSymbol, symbols::PlainSymbol);
|
||||||
def_as_ref!(as_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol);
|
def_as_ref!(as_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol);
|
||||||
def_as_ref!(as_bytes, $t::Bytes, Bytes);
|
|
||||||
|
|
||||||
pub fn as_keyword(&self) -> Option<&symbols::Keyword> {
|
pub fn as_keyword(&self) -> Option<&symbols::Keyword> {
|
||||||
match self {
|
match self {
|
||||||
$t::Keyword(ref k) => Some(k),
|
&$t::Keyword(ref k) => Some(k),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_plain_keyword(&self) -> Option<&symbols::Keyword> {
|
pub fn as_plain_keyword(&self) -> Option<&symbols::Keyword> {
|
||||||
match self {
|
match self {
|
||||||
$t::Keyword(ref k) if !k.is_namespaced() => Some(k),
|
&$t::Keyword(ref k) if !k.is_namespaced() => Some(k),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_namespaced_keyword(&self) -> Option<&symbols::Keyword> {
|
pub fn as_namespaced_keyword(&self) -> Option<&symbols::Keyword> {
|
||||||
match self {
|
match self {
|
||||||
$t::Keyword(ref k) if k.is_namespaced() => Some(k),
|
&$t::Keyword(ref k) if k.is_namespaced() => Some(k),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -404,7 +394,6 @@ macro_rules! def_common_value_methods {
|
||||||
def_into!(into_uuid, $t::Uuid, Uuid,);
|
def_into!(into_uuid, $t::Uuid, Uuid,);
|
||||||
def_into!(into_symbol, $t::PlainSymbol, symbols::PlainSymbol,);
|
def_into!(into_symbol, $t::PlainSymbol, symbols::PlainSymbol,);
|
||||||
def_into!(into_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol,);
|
def_into!(into_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol,);
|
||||||
def_into!(into_bytes, $t::Bytes, Bytes,);
|
|
||||||
|
|
||||||
pub fn into_keyword(self) -> Option<symbols::Keyword> {
|
pub fn into_keyword(self) -> Option<symbols::Keyword> {
|
||||||
match self {
|
match self {
|
||||||
|
@ -475,7 +464,6 @@ macro_rules! def_common_value_methods {
|
||||||
$t::List(_) => 13,
|
$t::List(_) => 13,
|
||||||
$t::Set(_) => 14,
|
$t::Set(_) => 14,
|
||||||
$t::Map(_) => 15,
|
$t::Map(_) => 15,
|
||||||
$t::Bytes(_) => 16,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -496,7 +484,6 @@ macro_rules! def_common_value_methods {
|
||||||
$t::List(_) => true,
|
$t::List(_) => true,
|
||||||
$t::Set(_) => true,
|
$t::Set(_) => true,
|
||||||
$t::Map(_) => true,
|
$t::Map(_) => true,
|
||||||
$t::Bytes(_) => false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -534,7 +521,6 @@ macro_rules! def_common_value_ord {
|
||||||
(&$t::List(ref a), &$t::List(ref b)) => b.cmp(a),
|
(&$t::List(ref a), &$t::List(ref b)) => b.cmp(a),
|
||||||
(&$t::Set(ref a), &$t::Set(ref b)) => b.cmp(a),
|
(&$t::Set(ref a), &$t::Set(ref b)) => b.cmp(a),
|
||||||
(&$t::Map(ref a), &$t::Map(ref b)) => b.cmp(a),
|
(&$t::Map(ref a), &$t::Map(ref b)) => b.cmp(a),
|
||||||
(&$t::Bytes(ref a), &$t::Bytes(ref b)) => b.cmp(a),
|
|
||||||
_ => $value.precedence().cmp(&$other.precedence()),
|
_ => $value.precedence().cmp(&$other.precedence()),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -569,7 +555,7 @@ macro_rules! def_common_value_display {
|
||||||
}
|
}
|
||||||
// TODO: EDN escaping.
|
// TODO: EDN escaping.
|
||||||
$t::Text(ref v) => write!($f, "\"{}\"", v),
|
$t::Text(ref v) => write!($f, "\"{}\"", v),
|
||||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.hyphenated().to_string()),
|
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.to_hyphenated().to_string()),
|
||||||
$t::PlainSymbol(ref v) => v.fmt($f),
|
$t::PlainSymbol(ref v) => v.fmt($f),
|
||||||
$t::NamespacedSymbol(ref v) => v.fmt($f),
|
$t::NamespacedSymbol(ref v) => v.fmt($f),
|
||||||
$t::Keyword(ref v) => v.fmt($f),
|
$t::Keyword(ref v) => v.fmt($f),
|
||||||
|
@ -601,10 +587,6 @@ macro_rules! def_common_value_display {
|
||||||
}
|
}
|
||||||
write!($f, " }}")
|
write!($f, " }}")
|
||||||
}
|
}
|
||||||
$t::Bytes(ref v) => {
|
|
||||||
let s = encode(v);
|
|
||||||
write!($f, "#bytes {}", s)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -668,7 +650,7 @@ pub trait FromMicros {
|
||||||
|
|
||||||
impl FromMicros for DateTime<Utc> {
|
impl FromMicros for DateTime<Utc> {
|
||||||
fn from_micros(ts: i64) -> Self {
|
fn from_micros(ts: i64) -> Self {
|
||||||
Utc.timestamp_opt(ts / 1_000_000, ((ts % 1_000_000).unsigned_abs() as u32) * 1_000).unwrap()
|
Utc.timestamp(ts / 1_000_000, ((ts % 1_000_000).abs() as u32) * 1_000)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -690,7 +672,7 @@ pub trait FromMillis {
|
||||||
|
|
||||||
impl FromMillis for DateTime<Utc> {
|
impl FromMillis for DateTime<Utc> {
|
||||||
fn from_millis(ts: i64) -> Self {
|
fn from_millis(ts: i64) -> Self {
|
||||||
Utc.timestamp_opt(ts / 1_000, ((ts % 1_000).unsigned_abs() as u32) * 1_000).unwrap()
|
Utc.timestamp(ts / 1_000, ((ts % 1_000).abs() as u32) * 1_000)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -719,7 +701,7 @@ mod test {
|
||||||
use std::f64;
|
use std::f64;
|
||||||
use std::iter::FromIterator;
|
use std::iter::FromIterator;
|
||||||
|
|
||||||
use crate::parse;
|
use parse;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use num::BigInt;
|
use num::BigInt;
|
||||||
|
@ -752,12 +734,12 @@ mod test {
|
||||||
fn test_print_edn() {
|
fn test_print_edn() {
|
||||||
assert_eq!("1234N", Value::from_bigint("1234").unwrap().to_string());
|
assert_eq!("1234N", Value::from_bigint("1234").unwrap().to_string());
|
||||||
|
|
||||||
let string = "[ 1 2 ( 7.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
let string = "[ 1 2 ( 3.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
||||||
|
|
||||||
let data = Value::Vector(vec![
|
let data = Value::Vector(vec![
|
||||||
Value::Integer(1),
|
Value::Integer(1),
|
||||||
Value::Integer(2),
|
Value::Integer(2),
|
||||||
Value::List(LinkedList::from_iter(vec![Value::from_float(7.14)])),
|
Value::List(LinkedList::from_iter(vec![Value::from_float(3.14)])),
|
||||||
Value::Set(BTreeSet::from_iter(vec![Value::from_bigint("4").unwrap()])),
|
Value::Set(BTreeSet::from_iter(vec![Value::from_bigint("4").unwrap()])),
|
||||||
Value::Map(BTreeMap::from_iter(vec![
|
Value::Map(BTreeMap::from_iter(vec![
|
||||||
(Value::from_symbol("foo", "bar"), Value::Integer(42)),
|
(Value::from_symbol("foo", "bar"), Value::Integer(42)),
|
||||||
|
@ -859,10 +841,10 @@ mod test {
|
||||||
|
|
||||||
assert!(n_v.clone().into_keyword().is_some());
|
assert!(n_v.clone().into_keyword().is_some());
|
||||||
assert!(n_v.clone().into_plain_keyword().is_none());
|
assert!(n_v.clone().into_plain_keyword().is_none());
|
||||||
assert!(n_v.into_namespaced_keyword().is_some());
|
assert!(n_v.clone().into_namespaced_keyword().is_some());
|
||||||
|
|
||||||
assert!(p_v.clone().into_keyword().is_some());
|
assert!(p_v.clone().into_keyword().is_some());
|
||||||
assert!(p_v.clone().into_plain_keyword().is_some());
|
assert!(p_v.clone().into_plain_keyword().is_some());
|
||||||
assert!(p_v.into_namespaced_keyword().is_none());
|
assert!(p_v.clone().into_namespaced_keyword().is_none());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use crate::types::Value;
|
use types::Value;
|
||||||
|
|
||||||
/// Merge the EDN `Value::Map` instance `right` into `left`. Returns `None` if either `left` or
|
/// Merge the EDN `Value::Map` instance `right` into `left`. Returns `None` if either `left` or
|
||||||
/// `right` is not a `Value::Map`.
|
/// `right` is not a `Value::Map`.
|
||||||
|
@ -21,9 +21,9 @@ use crate::types::Value;
|
||||||
/// TODO: implement `merge` for [Value], following the `concat`/`SliceConcatExt` pattern.
|
/// TODO: implement `merge` for [Value], following the `concat`/`SliceConcatExt` pattern.
|
||||||
pub fn merge(left: &Value, right: &Value) -> Option<Value> {
|
pub fn merge(left: &Value, right: &Value) -> Option<Value> {
|
||||||
match (left, right) {
|
match (left, right) {
|
||||||
(Value::Map(l), Value::Map(r)) => {
|
(&Value::Map(ref l), &Value::Map(ref r)) => {
|
||||||
let mut result = l.clone();
|
let mut result = l.clone();
|
||||||
result.extend(r.clone());
|
result.extend(r.clone().into_iter());
|
||||||
Some(Value::Map(result))
|
Some(Value::Map(result))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
|
|
@ -22,7 +22,7 @@ where
|
||||||
T: Sized + Clone,
|
T: Sized + Clone,
|
||||||
{
|
{
|
||||||
fn from_rc(val: Rc<T>) -> Self {
|
fn from_rc(val: Rc<T>) -> Self {
|
||||||
val
|
val.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_arc(val: Arc<T>) -> Self {
|
fn from_arc(val: Arc<T>) -> Self {
|
||||||
|
@ -45,7 +45,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_arc(val: Arc<T>) -> Self {
|
fn from_arc(val: Arc<T>) -> Self {
|
||||||
val
|
val.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ use chrono::{TimeZone, Utc};
|
||||||
use edn::{
|
use edn::{
|
||||||
parse, symbols,
|
parse, symbols,
|
||||||
types::{Span, SpannedValue, Value, ValueAndSpan},
|
types::{Span, SpannedValue, Value, ValueAndSpan},
|
||||||
utils, ParseError,
|
utils, ParseErrorKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper for making wrapped keywords with a namespace.
|
// Helper for making wrapped keywords with a namespace.
|
||||||
|
@ -82,7 +82,6 @@ fn_parse_into_value!(vector);
|
||||||
fn_parse_into_value!(set);
|
fn_parse_into_value!(set);
|
||||||
fn_parse_into_value!(map);
|
fn_parse_into_value!(map);
|
||||||
fn_parse_into_value!(value);
|
fn_parse_into_value!(value);
|
||||||
fn_parse_into_value!(bytes);
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_nil() {
|
fn test_nil() {
|
||||||
|
@ -317,38 +316,6 @@ fn test_uuid() {
|
||||||
assert_eq!(value.to_pretty(100).unwrap(), s);
|
assert_eq!(value.to_pretty(100).unwrap(), s);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bytes() {
|
|
||||||
assert!(parse::bytes("#bytes01 ").is_err()); // No whitespace.
|
|
||||||
assert!(parse::bytes("#bytes _ZZ").is_err()); // No whitespace.
|
|
||||||
assert!(parse::bytes("#bytes 01 ").is_err()); // No whitespace.
|
|
||||||
assert!(parse::bytes("#01 ").is_err()); // No whitespace.
|
|
||||||
|
|
||||||
let expected = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
|
||||||
let s = format!("{} {}", "#bytes", hex::encode(expected.clone()));
|
|
||||||
let actual: Value = parse::bytes(&s).expect("parse success").into();
|
|
||||||
assert!(actual.is_bytes());
|
|
||||||
assert_eq!(expected, actual.as_bytes().unwrap().to_vec());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
self::bytes("#bytes 010203050403022a").unwrap(),
|
|
||||||
Value::Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
|
||||||
1, 2, 3, 5, 4, 3, 2, 42
|
|
||||||
)))
|
|
||||||
);
|
|
||||||
let data =
|
|
||||||
r#"[ { :test/instant #inst "2018-01-01T11:00:00Z" :test/bytes #bytes 010203050403022a } ]"#;
|
|
||||||
let result = parse::value(data).unwrap().without_spans().to_string();
|
|
||||||
assert_eq!(data, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_entities() {
|
|
||||||
let d2 = r#"[ { :test/boolean true :test/long 33 :test/double 1.4 :test/string "foo" :test/keyword :foo/bar :test/uuid #uuid "12341234-1234-1234-1234-123412341234" :test/instant #inst "2018-01-01T11:00:00Z" :test/ref 1 :test/bytes #bytes 010203050403022a } ]"#;
|
|
||||||
let r2 = parse::entities(d2);
|
|
||||||
assert!(r2.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_inst() {
|
fn test_inst() {
|
||||||
assert!(parse::value("#inst\"2016-01-01T11:00:00.000Z\"").is_err()); // No whitespace.
|
assert!(parse::value("#inst\"2016-01-01T11:00:00.000Z\"").is_err()); // No whitespace.
|
||||||
|
@ -617,12 +584,6 @@ fn test_value() {
|
||||||
value("#inst \"2017-04-28T20:23:05.187Z\"").unwrap(),
|
value("#inst \"2017-04-28T20:23:05.187Z\"").unwrap(),
|
||||||
Instant(Utc.timestamp(1493410985, 187000000))
|
Instant(Utc.timestamp(1493410985, 187000000))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
|
||||||
value("#bytes 010203050403022a").unwrap(),
|
|
||||||
Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
|
||||||
1, 2, 3, 5, 4, 3, 2, 42
|
|
||||||
)))
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1536,7 +1497,7 @@ macro_rules! def_test_into_type {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg_attr(feature = "cargo-clippy", allow(clippy::float_cmp, clippy::unit_cmp))]
|
#[cfg_attr(feature = "cargo-clippy", allow(float_cmp))]
|
||||||
fn test_is_and_as_type_helper_functions() {
|
fn test_is_and_as_type_helper_functions() {
|
||||||
let max_i64 = i64::max_value().to_bigint().unwrap();
|
let max_i64 = i64::max_value().to_bigint().unwrap();
|
||||||
let bigger = &max_i64 * &max_i64;
|
let bigger = &max_i64 * &max_i64;
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
;; movie schema
|
|
||||||
[{:db/ident :movie/title
|
|
||||||
:db/valueType :db.type/string
|
|
||||||
:db/cardinality :db.cardinality/one
|
|
||||||
:db/doc "The title of the movie"}
|
|
||||||
|
|
||||||
{:db/ident :movie/genre
|
|
||||||
:db/valueType :db.type/string
|
|
||||||
:db/cardinality :db.cardinality/one
|
|
||||||
:db/doc "The genre of the movie"}
|
|
||||||
|
|
||||||
{:db/ident :movie/release-year
|
|
||||||
:db/valueType :db.type/long
|
|
||||||
:db/cardinality :db.cardinality/one
|
|
||||||
:db/doc "The year the movie was released in theaters"}]
|
|
||||||
|
|
||||||
;; a few movies
|
|
||||||
[{:movie/title "The Goonies"
|
|
||||||
:movie/genre "action/adventure"
|
|
||||||
:movie/release-year 1985}
|
|
||||||
{:movie/title "Commando"
|
|
||||||
:movie/genre "thriller/action"
|
|
||||||
:movie/release-year 1985}
|
|
||||||
{:movie/title "Repo Man"
|
|
||||||
:movie/genre "punk dystopia"
|
|
||||||
:movie/release-year 1984}]
|
|
||||||
|
|
||||||
;; query
|
|
||||||
[:find ?movie-title
|
|
||||||
:where [_ :movie/title ?movie-title]]
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "mentat_ffi"
|
name = "mentat_ffi"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
authors = ["Emily Toop <etoop@mozilla.com>"]
|
authors = ["Emily Toop <etoop@mozilla.com>"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -13,7 +13,7 @@ sqlcipher = ["mentat/sqlcipher"]
|
||||||
bundled_sqlite3 = ["mentat/bundled_sqlite3"]
|
bundled_sqlite3 = ["mentat/bundled_sqlite3"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
libc = "~0.2"
|
libc = "0.2"
|
||||||
|
|
||||||
[dependencies.mentat]
|
[dependencies.mentat]
|
||||||
path = "../"
|
path = "../"
|
||||||
|
|
305
ffi/src/lib.rs
305
ffi/src/lib.rs
|
@ -70,7 +70,6 @@
|
||||||
//! (for `Result<(), T>`). Callers are responsible for freeing the `message` field of `ExternError`.
|
//! (for `Result<(), T>`). Callers are responsible for freeing the `message` field of `ExternError`.
|
||||||
|
|
||||||
#![allow(unused_doc_comments)]
|
#![allow(unused_doc_comments)]
|
||||||
#![allow(clippy::missing_safety_doc)]
|
|
||||||
|
|
||||||
extern crate core;
|
extern crate core;
|
||||||
extern crate libc;
|
extern crate libc;
|
||||||
|
@ -177,12 +176,6 @@ pub unsafe extern "C" fn store_open(uri: *const c_char, error: *mut ExternError)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Variant of store_open that opens an encrypted database.
|
/// Variant of store_open that opens an encrypted database.
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// Callers are responsible for managing the memory for the return value.
|
|
||||||
/// A destructor `store_destroy` is provided for releasing the memory for this
|
|
||||||
/// pointer type.
|
|
||||||
#[cfg(feature = "sqlcipher")]
|
#[cfg(feature = "sqlcipher")]
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn store_open_encrypted(
|
pub unsafe extern "C" fn store_open_encrypted(
|
||||||
|
@ -253,11 +246,6 @@ pub unsafe extern "C" fn in_progress_transact<'m>(
|
||||||
/// Commit all the transacts that have been performed using this
|
/// Commit all the transacts that have been performed using this
|
||||||
/// in progress transaction.
|
/// in progress transaction.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
/// Callers are responsible for managing the memory for the return value.
|
|
||||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
|
||||||
/// pointer type.
|
|
||||||
///
|
|
||||||
/// TODO: Document the errors that can result from transact
|
/// TODO: Document the errors that can result from transact
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_commit<'m>(
|
pub unsafe extern "C" fn in_progress_commit<'m>(
|
||||||
|
@ -272,12 +260,6 @@ pub unsafe extern "C" fn in_progress_commit<'m>(
|
||||||
/// Rolls back all the transacts that have been performed using this
|
/// Rolls back all the transacts that have been performed using this
|
||||||
/// in progress transaction.
|
/// in progress transaction.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// Callers are responsible for managing the memory for the return value.
|
|
||||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
|
||||||
/// pointer type.
|
|
||||||
///
|
|
||||||
/// TODO: Document the errors that can result from rollback
|
/// TODO: Document the errors that can result from rollback
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_rollback<'m>(
|
pub unsafe extern "C" fn in_progress_rollback<'m>(
|
||||||
|
@ -360,7 +342,7 @@ pub unsafe extern "C" fn store_in_progress_builder<'a, 'c>(
|
||||||
let store = &mut *store;
|
let store = &mut *store;
|
||||||
let result = store
|
let result = store
|
||||||
.begin_transaction()
|
.begin_transaction()
|
||||||
.map(|in_progress| in_progress.builder());
|
.and_then(|in_progress| Ok(in_progress.builder()));
|
||||||
translate_result(result, error)
|
translate_result(result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,7 +365,7 @@ pub unsafe extern "C" fn store_entity_builder_from_temp_id<'a, 'c>(
|
||||||
let temp_id = c_char_to_string(temp_id);
|
let temp_id = c_char_to_string(temp_id);
|
||||||
let result = store
|
let result = store
|
||||||
.begin_transaction()
|
.begin_transaction()
|
||||||
.map(|in_progress| in_progress.builder().describe_tempid(&temp_id));
|
.and_then(|in_progress| Ok(in_progress.builder().describe_tempid(&temp_id)));
|
||||||
translate_result(result, error)
|
translate_result(result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,7 +387,7 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
||||||
let store = &mut *store;
|
let store = &mut *store;
|
||||||
let result = store
|
let result = store
|
||||||
.begin_transaction()
|
.begin_transaction()
|
||||||
.map(|in_progress| in_progress.builder().describe(KnownEntid(entid)));
|
.and_then(|in_progress| Ok(in_progress.builder().describe(KnownEntid(entid))));
|
||||||
translate_result(result, error)
|
translate_result(result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -417,12 +399,10 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_string(
|
pub unsafe extern "C" fn in_progress_builder_add_string<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
|
@ -442,13 +422,10 @@ pub unsafe extern "C" fn in_progress_builder_add_string(
|
||||||
/// If `entid` is not present in the store.
|
/// If `entid` is not present in the store.
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||||
///
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_long(
|
pub unsafe extern "C" fn in_progress_builder_add_long<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -469,13 +446,10 @@ pub unsafe extern "C" fn in_progress_builder_add_long(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If `value` is not present as an Entid in the store.
|
/// If `value` is not present as an Entid in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||||
///
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_ref(
|
pub unsafe extern "C" fn in_progress_builder_add_ref<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -497,12 +471,10 @@ pub unsafe extern "C" fn in_progress_builder_add_ref(
|
||||||
/// If `value` is not present as an attribute in the store.
|
/// If `value` is not present as an attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_keyword(
|
pub unsafe extern "C" fn in_progress_builder_add_keyword<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
|
@ -523,12 +495,10 @@ pub unsafe extern "C" fn in_progress_builder_add_keyword(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_boolean(
|
pub unsafe extern "C" fn in_progress_builder_add_boolean<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: bool,
|
value: bool,
|
||||||
|
@ -549,12 +519,10 @@ pub unsafe extern "C" fn in_progress_builder_add_boolean(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_double(
|
pub unsafe extern "C" fn in_progress_builder_add_double<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: f64,
|
value: f64,
|
||||||
|
@ -575,12 +543,10 @@ pub unsafe extern "C" fn in_progress_builder_add_double(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_timestamp(
|
pub unsafe extern "C" fn in_progress_builder_add_timestamp<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -601,12 +567,10 @@ pub unsafe extern "C" fn in_progress_builder_add_timestamp(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_add_uuid(
|
pub unsafe extern "C" fn in_progress_builder_add_uuid<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const [u8; 16],
|
value: *const [u8; 16],
|
||||||
|
@ -629,12 +593,10 @@ pub unsafe extern "C" fn in_progress_builder_add_uuid(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_string(
|
pub unsafe extern "C" fn in_progress_builder_retract_string<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
|
@ -655,12 +617,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_string(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_long(
|
pub unsafe extern "C" fn in_progress_builder_retract_long<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -681,12 +641,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_long(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_ref(
|
pub unsafe extern "C" fn in_progress_builder_retract_ref<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -707,12 +665,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_ref(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_keyword(
|
pub unsafe extern "C" fn in_progress_builder_retract_keyword<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
|
@ -733,12 +689,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_keyword(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_boolean(
|
pub unsafe extern "C" fn in_progress_builder_retract_boolean<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: bool,
|
value: bool,
|
||||||
|
@ -759,12 +713,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_boolean(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_double(
|
pub unsafe extern "C" fn in_progress_builder_retract_double<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: f64,
|
value: f64,
|
||||||
|
@ -785,12 +737,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_double(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_timestamp(
|
pub unsafe extern "C" fn in_progress_builder_retract_timestamp<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
|
@ -811,13 +761,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_timestamp(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
// TODO don't panic if the UUID is not valid - return result instead.
|
// TODO don't panic if the UUID is not valid - return result instead.
|
||||||
|
//
|
||||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_retract_uuid(
|
pub unsafe extern "C" fn in_progress_builder_retract_uuid<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
entid: c_longlong,
|
entid: c_longlong,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const [u8; 16],
|
value: *const [u8; 16],
|
||||||
|
@ -837,12 +786,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_uuid(
|
||||||
///
|
///
|
||||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
// TODO: Document the errors that can result from transact
|
// TODO: Document the errors that can result from transact
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn in_progress_builder_commit(
|
pub unsafe extern "C" fn in_progress_builder_commit<'a, 'c>(
|
||||||
builder: *mut InProgressBuilder,
|
builder: *mut InProgressBuilder<'a, 'c>,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
) -> *mut TxReport {
|
) -> *mut TxReport {
|
||||||
assert_not_null!(builder);
|
assert_not_null!(builder);
|
||||||
|
@ -881,12 +828,10 @@ pub unsafe extern "C" fn in_progress_builder_transact<'a, 'c>(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_string(
|
pub unsafe extern "C" fn entity_builder_add_string<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -906,12 +851,10 @@ pub unsafe extern "C" fn entity_builder_add_string(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_long(
|
pub unsafe extern "C" fn entity_builder_add_long<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -931,12 +874,10 @@ pub unsafe extern "C" fn entity_builder_add_long(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_ref(
|
pub unsafe extern "C" fn entity_builder_add_ref<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -956,12 +897,10 @@ pub unsafe extern "C" fn entity_builder_add_ref(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_keyword(
|
pub unsafe extern "C" fn entity_builder_add_keyword<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -981,12 +920,10 @@ pub unsafe extern "C" fn entity_builder_add_keyword(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_boolean(
|
pub unsafe extern "C" fn entity_builder_add_boolean<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: bool,
|
value: bool,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1006,12 +943,10 @@ pub unsafe extern "C" fn entity_builder_add_boolean(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_double(
|
pub unsafe extern "C" fn entity_builder_add_double<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: f64,
|
value: f64,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1031,12 +966,10 @@ pub unsafe extern "C" fn entity_builder_add_double(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_timestamp(
|
pub unsafe extern "C" fn entity_builder_add_timestamp<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1056,12 +989,10 @@ pub unsafe extern "C" fn entity_builder_add_timestamp(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_add_uuid(
|
pub unsafe extern "C" fn entity_builder_add_uuid<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const [u8; 16],
|
value: *const [u8; 16],
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1083,12 +1014,10 @@ pub unsafe extern "C" fn entity_builder_add_uuid(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_string(
|
pub unsafe extern "C" fn entity_builder_retract_string<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1108,12 +1037,10 @@ pub unsafe extern "C" fn entity_builder_retract_string(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_long(
|
pub unsafe extern "C" fn entity_builder_retract_long<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1133,12 +1060,10 @@ pub unsafe extern "C" fn entity_builder_retract_long(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_ref(
|
pub unsafe extern "C" fn entity_builder_retract_ref<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1158,12 +1083,10 @@ pub unsafe extern "C" fn entity_builder_retract_ref(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_keyword(
|
pub unsafe extern "C" fn entity_builder_retract_keyword<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const c_char,
|
value: *const c_char,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1183,12 +1106,10 @@ pub unsafe extern "C" fn entity_builder_retract_keyword(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_boolean(
|
pub unsafe extern "C" fn entity_builder_retract_boolean<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: bool,
|
value: bool,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1208,12 +1129,10 @@ pub unsafe extern "C" fn entity_builder_retract_boolean(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_double(
|
pub unsafe extern "C" fn entity_builder_retract_double<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: f64,
|
value: f64,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1233,12 +1152,10 @@ pub unsafe extern "C" fn entity_builder_retract_double(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_timestamp(
|
pub unsafe extern "C" fn entity_builder_retract_timestamp<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: c_longlong,
|
value: c_longlong,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1258,13 +1175,11 @@ pub unsafe extern "C" fn entity_builder_retract_timestamp(
|
||||||
/// If `kw` is not a valid attribute in the store.
|
/// If `kw` is not a valid attribute in the store.
|
||||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||||
///
|
///
|
||||||
/// # Safety
|
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||||
/// TODO:
|
// TODO don't panic if the UUID is not valid - return result instead.
|
||||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
|
||||||
// TODO: don't panic if the UUID is not valid - return result instead.
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_retract_uuid(
|
pub unsafe extern "C" fn entity_builder_retract_uuid<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
kw: *const c_char,
|
kw: *const c_char,
|
||||||
value: *const [u8; 16],
|
value: *const [u8; 16],
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -1306,12 +1221,10 @@ pub unsafe extern "C" fn entity_builder_transact<'a, 'c>(
|
||||||
///
|
///
|
||||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
/// TODO: Document the errors that can result from transact
|
/// TODO: Document the errors that can result from transact
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn entity_builder_commit(
|
pub unsafe extern "C" fn entity_builder_commit<'a, 'c>(
|
||||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
) -> *mut TxReport {
|
) -> *mut TxReport {
|
||||||
assert_not_null!(builder);
|
assert_not_null!(builder);
|
||||||
|
@ -1321,8 +1234,6 @@ pub unsafe extern "C" fn entity_builder_commit(
|
||||||
|
|
||||||
/// Performs a single transaction against the store.
|
/// Performs a single transaction against the store.
|
||||||
///
|
///
|
||||||
/// # Safety
|
|
||||||
/// TODO:
|
|
||||||
/// TODO: Document the errors that can result from transact
|
/// TODO: Document the errors that can result from transact
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn store_transact(
|
pub unsafe extern "C" fn store_transact(
|
||||||
|
@ -1342,7 +1253,6 @@ pub unsafe extern "C" fn store_transact(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetches the `tx_id` for the given [TxReport](mentat::TxReport)`.
|
/// Fetches the `tx_id` for the given [TxReport](mentat::TxReport)`.
|
||||||
/// # Safety
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_longlong {
|
pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_longlong {
|
||||||
assert_not_null!(tx_report);
|
assert_not_null!(tx_report);
|
||||||
|
@ -1351,7 +1261,6 @@ pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_long
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetches the `tx_instant` for the given [TxReport](mentat::TxReport).
|
/// Fetches the `tx_instant` for the given [TxReport](mentat::TxReport).
|
||||||
/// # Safety
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub unsafe extern "C" fn tx_report_get_tx_instant(tx_report: *mut TxReport) -> c_longlong {
|
pub unsafe extern "C" fn tx_report_get_tx_instant(tx_report: *mut TxReport) -> c_longlong {
|
||||||
assert_not_null!(tx_report);
|
assert_not_null!(tx_report);
|
||||||
|
@ -1374,7 +1283,7 @@ pub unsafe extern "C" fn tx_report_entity_for_temp_id(
|
||||||
let tx_report = &*tx_report;
|
let tx_report = &*tx_report;
|
||||||
let key = c_char_to_string(tempid);
|
let key = c_char_to_string(tempid);
|
||||||
if let Some(entid) = tx_report.tempids.get(key) {
|
if let Some(entid) = tx_report.tempids.get(key) {
|
||||||
Box::into_raw(Box::new(*entid as c_longlong))
|
Box::into_raw(Box::new(entid.clone() as c_longlong))
|
||||||
} else {
|
} else {
|
||||||
std::ptr::null_mut()
|
std::ptr::null_mut()
|
||||||
}
|
}
|
||||||
|
@ -1499,7 +1408,7 @@ pub unsafe extern "C" fn query_builder_bind_ref_kw(
|
||||||
let kw = kw_from_string(c_char_to_string(value));
|
let kw = kw_from_string(c_char_to_string(value));
|
||||||
let query_builder = &mut *query_builder;
|
let query_builder = &mut *query_builder;
|
||||||
if let Some(err) = query_builder.bind_ref_from_kw(&var, kw).err() {
|
if let Some(err) = query_builder.bind_ref_from_kw(&var, kw).err() {
|
||||||
std::panic::panic_any(err);
|
panic!(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2159,7 +2068,7 @@ pub unsafe extern "C" fn store_register_observer(
|
||||||
.map(|(tx_id, changes)| {
|
.map(|(tx_id, changes)| {
|
||||||
(
|
(
|
||||||
*tx_id,
|
*tx_id,
|
||||||
changes.iter().map(|eid| *eid as c_longlong).collect(),
|
changes.into_iter().map(|eid| *eid as c_longlong).collect(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -14,12 +14,9 @@ pub mod strings {
|
||||||
|
|
||||||
use mentat::Keyword;
|
use mentat::Keyword;
|
||||||
|
|
||||||
/// # Safety
|
pub fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
||||||
///
|
|
||||||
/// This function TODO
|
|
||||||
pub unsafe fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
|
||||||
assert!(!cchar.is_null());
|
assert!(!cchar.is_null());
|
||||||
let c_str = CStr::from_ptr(cchar);
|
let c_str = unsafe { CStr::from_ptr(cchar) };
|
||||||
c_str.to_str().unwrap_or("")
|
c_str.to_str().unwrap_or("")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,8 +29,8 @@ pub mod strings {
|
||||||
|
|
||||||
pub fn kw_from_string(keyword_string: &'static str) -> Keyword {
|
pub fn kw_from_string(keyword_string: &'static str) -> Keyword {
|
||||||
// TODO: validate. The input might not be a keyword!
|
// TODO: validate. The input might not be a keyword!
|
||||||
let attr_name = keyword_string.trim_start_matches(':');
|
let attr_name = keyword_string.trim_start_matches(":");
|
||||||
let parts: Vec<&str> = attr_name.split('/').collect();
|
let parts: Vec<&str> = attr_name.split("/").collect();
|
||||||
Keyword::namespaced(parts[0], parts[1])
|
Keyword::namespaced(parts[0], parts[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -110,8 +107,6 @@ pub mod error {
|
||||||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||||
/// message (which was allocated on the heap and should eventually be freed) into
|
/// message (which was allocated on the heap and should eventually be freed) into
|
||||||
/// `error.message`
|
/// `error.message`
|
||||||
/// # Safety
|
|
||||||
/// Be afraid... TODO
|
|
||||||
pub unsafe fn translate_result<T, E>(result: Result<T, E>, error: *mut ExternError) -> *mut T
|
pub unsafe fn translate_result<T, E>(result: Result<T, E>, error: *mut ExternError) -> *mut T
|
||||||
where
|
where
|
||||||
E: Display,
|
E: Display,
|
||||||
|
@ -138,8 +133,6 @@ pub mod error {
|
||||||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||||
/// message (which was allocated on the heap and should eventually be freed) into
|
/// message (which was allocated on the heap and should eventually be freed) into
|
||||||
/// `error.message`
|
/// `error.message`
|
||||||
/// # Safety
|
|
||||||
/// Be afraid... TODO
|
|
||||||
pub unsafe fn translate_opt_result<T, E>(
|
pub unsafe fn translate_opt_result<T, E>(
|
||||||
result: Result<Option<T>, E>,
|
result: Result<Option<T>, E>,
|
||||||
error: *mut ExternError,
|
error: *mut ExternError,
|
||||||
|
@ -162,8 +155,6 @@ pub mod error {
|
||||||
|
|
||||||
/// Identical to `translate_result`, but with additional type checking for the case that we have
|
/// Identical to `translate_result`, but with additional type checking for the case that we have
|
||||||
/// a `Result<(), E>` (which we're about to drop on the floor).
|
/// a `Result<(), E>` (which we're about to drop on the floor).
|
||||||
/// # Safety
|
|
||||||
/// Be afraid... TODO
|
|
||||||
pub unsafe fn translate_void_result<E>(result: Result<(), E>, error: *mut ExternError)
|
pub unsafe fn translate_void_result<E>(result: Result<(), E>, error: *mut ExternError)
|
||||||
where
|
where
|
||||||
E: Display,
|
E: Display,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "public_traits"
|
name = "public_traits"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -13,23 +13,15 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
||||||
syncable = ["tolstoy_traits", "hyper", "serde_json"]
|
syncable = ["tolstoy_traits", "hyper", "serde_json"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
failure_derive = "~0.1"
|
failure_derive = "0.1"
|
||||||
http = "~0.2"
|
http = "0.2"
|
||||||
tokio = { version = "1.8.0", features = ["full"] }
|
tokio-core = "0.1"
|
||||||
uuid = "~1.0"
|
uuid = "0.8"
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21"
|
||||||
features = ["limits", "bundled"]
|
features = ["limits"]
|
||||||
|
|
||||||
[dependencies.hyper]
|
|
||||||
version = "~0.14"
|
|
||||||
optional = true
|
|
||||||
|
|
||||||
[dependencies.serde_json]
|
|
||||||
version = "~1.0"
|
|
||||||
optional = true
|
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
@ -55,3 +47,11 @@ path = "../sql-traits"
|
||||||
[dependencies.tolstoy_traits]
|
[dependencies.tolstoy_traits]
|
||||||
path = "../tolstoy-traits"
|
path = "../tolstoy-traits"
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.hyper]
|
||||||
|
version = "0.13"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.serde_json]
|
||||||
|
version = "1.0"
|
||||||
|
optional = true
|
||||||
|
|
|
@ -16,6 +16,12 @@ use std::collections::BTreeSet;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use rusqlite;
|
use rusqlite;
|
||||||
|
use failure::{
|
||||||
|
Backtrace,
|
||||||
|
Context,
|
||||||
|
Fail,
|
||||||
|
};
|
||||||
|
+use std::fmt;
|
||||||
use uuid;
|
use uuid;
|
||||||
|
|
||||||
use edn;
|
use edn;
|
||||||
|
@ -39,8 +45,51 @@ use serde_json;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, MentatError>;
|
pub type Result<T> = std::result::Result<T, MentatError>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct MentatError(Box<Context<MentatErrorKind>>);
|
||||||
|
|
||||||
|
impl Fail for MentatError {
|
||||||
|
#[inline]
|
||||||
|
fn cause(&self) -> Option<&Fail> {
|
||||||
|
self.0.cause()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn backtrace(&self) -> Option<&Backtrace> {
|
||||||
|
self.0.backtrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for MentatError {
|
||||||
|
#[inline]
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&*self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MentatError {
|
||||||
|
#[inline]
|
||||||
|
pub fn kind(&self) -> &MentatErrorKind {
|
||||||
|
&*self.0.get_context()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<MentatErrorKind> for MentatError {
|
||||||
|
#[inline]
|
||||||
|
fn from(kind: MentatErrorKind) -> MentatError {
|
||||||
|
MentatError(Box::new(Context::new(kind)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Context<MentatErrorKind>> for MentatError {
|
||||||
|
#[inline]
|
||||||
|
fn from(inner: Context<MentatErrorKind>) -> MentatError {
|
||||||
|
MentatError(Box::new(inner))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Fail)]
|
#[derive(Debug, Fail)]
|
||||||
pub enum MentatError {
|
pub enum MentatErrorKind {
|
||||||
#[fail(display = "bad uuid {}", _0)]
|
#[fail(display = "bad uuid {}", _0)]
|
||||||
BadUuid(String),
|
BadUuid(String),
|
||||||
|
|
||||||
|
@ -140,9 +189,67 @@ pub enum MentatError {
|
||||||
SerializationError(#[cause] serde_json::Error),
|
SerializationError(#[cause] serde_json::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for MentatErrorKind {
|
||||||
|
fn from(error: std::io::Error) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::IoError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<rusqlite::Error> for MentatErrorKind {
|
||||||
|
fn from(error: rusqlite::Error) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::RusqliteError(error.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<edn::ParseError> for MentatErrorKind {
|
||||||
|
fn from(error: edn::ParseError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::EdnParseError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_db::DbError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_db::DbError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::DbError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_query_algebrizer::AlgebrizerError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_query_algebrizer::AlgebrizerError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::AlgebrizerError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_query_projector::ProjectorError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_query_projector::ProjectorError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::ProjectorError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_query_pull::PullError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_query_pull::PullError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::PullError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_sql::SQLError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_sql::SQLError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::SQLError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "syncable")]
|
||||||
|
impl From<mentat_tolstoy::TolstoyError> for MentatErrorKind {
|
||||||
|
fn from(error: mentat_tolstoy::TolstoyError) -> MentatErrorKind {
|
||||||
|
MentatErrorKind::TolstoyError(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// XXX reduce dupe if this isn't completely throwaway
|
||||||
|
|
||||||
|
|
||||||
impl From<std::io::Error> for MentatError {
|
impl From<std::io::Error> for MentatError {
|
||||||
fn from(error: std::io::Error) -> Self {
|
fn from(error: std::io::Error) -> Self {
|
||||||
MentatError::IoError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -152,76 +259,76 @@ impl From<rusqlite::Error> for MentatError {
|
||||||
Some(e) => e.to_string(),
|
Some(e) => e.to_string(),
|
||||||
None => "".to_string(),
|
None => "".to_string(),
|
||||||
};
|
};
|
||||||
MentatError::RusqliteError(error.to_string(), cause)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<uuid::Error> for MentatError {
|
impl From<uuid::Error> for MentatError {
|
||||||
fn from(error: uuid::Error) -> Self {
|
fn from(error: uuid::Error) -> Self {
|
||||||
MentatError::UuidError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<edn::ParseError> for MentatError {
|
impl From<edn::ParseError> for MentatError {
|
||||||
fn from(error: edn::ParseError) -> Self {
|
fn from(error: edn::ParseError) -> Self {
|
||||||
MentatError::EdnParseError(error)
|
MentatError:from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<DbError> for MentatError {
|
impl From<DbError> for MentatError {
|
||||||
fn from(error: DbError) -> Self {
|
fn from(error: DbError) -> Self {
|
||||||
MentatError::DbError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<AlgebrizerError> for MentatError {
|
impl From<AlgebrizerError> for MentatError {
|
||||||
fn from(error: AlgebrizerError) -> Self {
|
fn from(error: AlgebrizerError) -> Self {
|
||||||
MentatError::AlgebrizerError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ProjectorError> for MentatError {
|
impl From<ProjectorError> for MentatError {
|
||||||
fn from(error: ProjectorError) -> Self {
|
fn from(error: ProjectorError) -> Self {
|
||||||
MentatError::ProjectorError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<PullError> for MentatError {
|
impl From<PullError> for MentatError {
|
||||||
fn from(error: PullError) -> Self {
|
fn from(error: PullError) -> Self {
|
||||||
MentatError::PullError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SQLError> for MentatError {
|
impl From<SQLError> for MentatError {
|
||||||
fn from(error: SQLError) -> Self {
|
fn from(error: SQLError) -> Self {
|
||||||
MentatError::SQLError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "syncable")]
|
#[cfg(feature = "syncable")]
|
||||||
impl From<TolstoyError> for MentatError {
|
impl From<TolstoyError> for MentatError {
|
||||||
fn from(error: TolstoyError) -> Self {
|
fn from(error: TolstoyError) -> Self {
|
||||||
MentatError::TolstoyError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "syncable")]
|
#[cfg(feature = "syncable")]
|
||||||
impl From<serde_json::Error> for MentatError {
|
impl From<serde_json::Error> for MentatError {
|
||||||
fn from(error: serde_json::Error) -> Self {
|
fn from(error: serde_json::Error) -> Self {
|
||||||
MentatError::SerializationError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "syncable")]
|
#[cfg(feature = "syncable")]
|
||||||
impl From<hyper::Error> for MentatError {
|
impl From<hyper::Error> for MentatError {
|
||||||
fn from(error: hyper::Error) -> Self {
|
fn from(error: hyper::Error) -> Self {
|
||||||
MentatError::NetworkError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "syncable")]
|
#[cfg(feature = "syncable")]
|
||||||
impl From<http::uri::InvalidUri> for MentatError {
|
impl From<http::uri::InvalidUri> for MentatError {
|
||||||
fn from(error: http::uri::InvalidUri) -> Self {
|
fn from(error: http::uri::InvalidUri) -> Self {
|
||||||
MentatError::UriError(error)
|
MentatError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "query_algebrizer_traits"
|
name = "query_algebrizer_traits"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -8,8 +8,8 @@ name = "query_algebrizer_traits"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
failure_derive = "~0.1"
|
failure_derive = "0.1"
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
|
|
@ -12,10 +12,60 @@ use std; // To refer to std::result::Result.
|
||||||
|
|
||||||
use core_traits::{ValueType, ValueTypeSet};
|
use core_traits::{ValueType, ValueTypeSet};
|
||||||
|
|
||||||
use edn::{query::PlainSymbol, ParseError};
|
use std::fmt;
|
||||||
|
use failure::{
|
||||||
|
Backtrace,
|
||||||
|
Context,
|
||||||
|
Fail,
|
||||||
|
};
|
||||||
|
|
||||||
|
use edn::{query::PlainSymbol, ParseErrorKind};
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, AlgebrizerError>;
|
pub type Result<T> = std::result::Result<T, AlgebrizerError>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AlgebrizerError(Box<Context<AlgebrizerErrorKind>>);
|
||||||
|
|
||||||
|
impl Fail for AlgebrizerError {
|
||||||
|
#[inline]
|
||||||
|
fn cause(&self) -> Option<&dyn Fail> {
|
||||||
|
self.0.cause()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn backtrace(&self) -> Option<&Backtrace> {
|
||||||
|
self.0.backtrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for AlgebrizerError {
|
||||||
|
#[inline]
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&*self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AlgebrizerError {
|
||||||
|
#[inline]
|
||||||
|
pub fn kind(&self) -> &AlgebrizerErrorKind {
|
||||||
|
&*self.0.get_context()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AlgebrizerErrorKind> for AlgebrizerError {
|
||||||
|
#[inline]
|
||||||
|
fn from(kind: AlgebrizerErrorKind) -> AlgebrizerError {
|
||||||
|
AlgebrizerError(Box::new(Context::new(kind)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Context<AlgebrizerErrorKind>> for AlgebrizerError {
|
||||||
|
#[inline]
|
||||||
|
fn from(inner: Context<AlgebrizerErrorKind>) -> AlgebrizerError {
|
||||||
|
AlgebrizerError(Box::new(inner))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum BindingError {
|
pub enum BindingError {
|
||||||
NoBoundVariable,
|
NoBoundVariable,
|
||||||
|
@ -40,7 +90,7 @@ pub enum BindingError {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, Fail, PartialEq)]
|
#[derive(Clone, Debug, Eq, Fail, PartialEq)]
|
||||||
pub enum AlgebrizerError {
|
pub enum AlgebrizerErrorKind {
|
||||||
#[fail(display = "{} var {} is duplicated", _0, _1)]
|
#[fail(display = "{} var {} is duplicated", _0, _1)]
|
||||||
DuplicateVariableError(PlainSymbol, &'static str),
|
DuplicateVariableError(PlainSymbol, &'static str),
|
||||||
|
|
||||||
|
@ -107,11 +157,11 @@ pub enum AlgebrizerError {
|
||||||
InvalidBinding(PlainSymbol, BindingError),
|
InvalidBinding(PlainSymbol, BindingError),
|
||||||
|
|
||||||
#[fail(display = "{}", _0)]
|
#[fail(display = "{}", _0)]
|
||||||
EdnParseError(#[cause] ParseError),
|
EdnParseError(#[cause] ParseErrorKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ParseError> for AlgebrizerError {
|
impl From<ParseErrorKind> for AlgebrizerError {
|
||||||
fn from(error: ParseError) -> AlgebrizerError {
|
fn from(error: ParseErrorKind) -> AlgebrizerError {
|
||||||
AlgebrizerError::EdnParseError(error)
|
AlgebrizerError::from(error).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,9 +9,6 @@
|
||||||
// specific language governing permissions and limitations under the License.
|
// specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
extern crate failure;
|
extern crate failure;
|
||||||
#[macro_use]
|
|
||||||
extern crate failure_derive;
|
|
||||||
|
|
||||||
extern crate core_traits;
|
extern crate core_traits;
|
||||||
extern crate edn;
|
extern crate edn;
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "mentat_query_algebrizer"
|
name = "mentat_query_algebrizer"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1.1"
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
@ -19,4 +19,4 @@ path = "../core-traits"
|
||||||
path = "../query-algebrizer-traits"
|
path = "../query-algebrizer-traits"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
itertools = "~0.10"
|
itertools = "0.8"
|
||||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, SQLValueType, Schema};
|
||||||
|
|
||||||
use edn::query::{FnArg, NonIntegerConstant, Variable};
|
use edn::query::{FnArg, NonIntegerConstant, Variable};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::types::EmptyBecause;
|
use types::EmptyBecause;
|
||||||
|
|
||||||
macro_rules! coerce_to_typed_value {
|
macro_rules! coerce_to_typed_value {
|
||||||
($var: ident, $val: ident, $types: expr, $type: path, $constructor: path) => {{
|
($var: ident, $val: ident, $types: expr, $type: path, $constructor: path) => {{
|
||||||
|
@ -62,11 +62,11 @@ impl ValueTypes for FnArg {
|
||||||
|
|
||||||
&FnArg::Constant(NonIntegerConstant::BigInteger(_)) => {
|
&FnArg::Constant(NonIntegerConstant::BigInteger(_)) => {
|
||||||
// Not yet implemented.
|
// Not yet implemented.
|
||||||
bail!(AlgebrizerError::UnsupportedArgument)
|
bail!(AlgebrizerErrorKind::UnsupportedArgument)
|
||||||
}
|
}
|
||||||
|
|
||||||
// These don't make sense here. TODO: split FnArg into scalar and non-scalar…
|
// These don't make sense here. TODO: split FnArg into scalar and non-scalar…
|
||||||
&FnArg::Vector(_) | &FnArg::SrcVar(_) => bail!(AlgebrizerError::UnsupportedArgument),
|
&FnArg::Vector(_) | &FnArg::SrcVar(_) => bail!(AlgebrizerErrorKind::UnsupportedArgument),
|
||||||
|
|
||||||
// These are all straightforward.
|
// These are all straightforward.
|
||||||
&FnArg::Constant(NonIntegerConstant::Boolean(_)) => {
|
&FnArg::Constant(NonIntegerConstant::Boolean(_)) => {
|
||||||
|
@ -116,7 +116,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
let constrained_types;
|
let constrained_types;
|
||||||
if let Some(required) = self.required_types.get(var) {
|
if let Some(required) = self.required_types.get(var) {
|
||||||
constrained_types = known_types.intersection(*required);
|
constrained_types = known_types.intersection(required);
|
||||||
} else {
|
} else {
|
||||||
constrained_types = known_types;
|
constrained_types = known_types;
|
||||||
}
|
}
|
||||||
|
@ -191,7 +191,7 @@ impl ConjoiningClauses {
|
||||||
FnArg::Variable(in_var) => {
|
FnArg::Variable(in_var) => {
|
||||||
// TODO: technically you could ground an existing variable inside the query….
|
// TODO: technically you could ground an existing variable inside the query….
|
||||||
if !self.input_variables.contains(&in_var) {
|
if !self.input_variables.contains(&in_var) {
|
||||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||||
}
|
}
|
||||||
match self.bound_value(&in_var) {
|
match self.bound_value(&in_var) {
|
||||||
// The type is already known if it's a bound variable….
|
// The type is already known if it's a bound variable….
|
||||||
|
@ -200,7 +200,7 @@ impl ConjoiningClauses {
|
||||||
// The variable is present in `:in`, but it hasn't yet been provided.
|
// The variable is present in `:in`, but it hasn't yet been provided.
|
||||||
// This is a restriction we will eventually relax: we don't yet have a way
|
// This is a restriction we will eventually relax: we don't yet have a way
|
||||||
// to collect variables as part of a computed table or substitution.
|
// to collect variables as part of a computed table or substitution.
|
||||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -209,7 +209,7 @@ impl ConjoiningClauses {
|
||||||
FnArg::Constant(NonIntegerConstant::BigInteger(_)) => unimplemented!(),
|
FnArg::Constant(NonIntegerConstant::BigInteger(_)) => unimplemented!(),
|
||||||
|
|
||||||
// These don't make sense here.
|
// These don't make sense here.
|
||||||
FnArg::Vector(_) | FnArg::SrcVar(_) => bail!(AlgebrizerError::InvalidGroundConstant),
|
FnArg::Vector(_) | FnArg::SrcVar(_) => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||||
|
|
||||||
// These are all straightforward.
|
// These are all straightforward.
|
||||||
FnArg::Constant(NonIntegerConstant::Boolean(x)) => {
|
FnArg::Constant(NonIntegerConstant::Boolean(x)) => {
|
||||||
|
|
|
@ -16,22 +16,22 @@ use mentat_core::util::Either;
|
||||||
|
|
||||||
use edn::query::{Binding, FnArg, NonIntegerConstant, SrcVar, VariableOrPlaceholder, WhereFn};
|
use edn::query::{Binding, FnArg, NonIntegerConstant, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
Column, ColumnConstraint, DatomsColumn, DatomsTable, EmptyBecause, FulltextColumn,
|
Column, ColumnConstraint, DatomsColumn, DatomsTable, EmptyBecause, FulltextColumn,
|
||||||
QualifiedAlias, QueryValue, SourceAlias,
|
QualifiedAlias, QueryValue, SourceAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
pub(crate) fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
pub(crate) fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||||
if where_fn.args.len() != 3 {
|
if where_fn.args.len() != 3 {
|
||||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
where_fn.args.len(),
|
where_fn.args.len(),
|
||||||
3
|
3
|
||||||
|
@ -40,7 +40,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if where_fn.binding.is_empty() {
|
if where_fn.binding.is_empty() {
|
||||||
// The binding must introduce at least one bound variable.
|
// The binding must introduce at least one bound variable.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::NoBoundVariable
|
BindingError::NoBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -48,7 +48,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if !where_fn.binding.is_valid() {
|
if !where_fn.binding.is_valid() {
|
||||||
// The binding must not duplicate bound variables.
|
// The binding must not duplicate bound variables.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -59,7 +59,7 @@ impl ConjoiningClauses {
|
||||||
Binding::BindRel(bindings) => {
|
Binding::BindRel(bindings) => {
|
||||||
let bindings_count = bindings.len();
|
let bindings_count = bindings.len();
|
||||||
if bindings_count < 1 || bindings_count > 4 {
|
if bindings_count < 1 || bindings_count > 4 {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::InvalidNumberOfBindings {
|
BindingError::InvalidNumberOfBindings {
|
||||||
number: bindings.len(),
|
number: bindings.len(),
|
||||||
|
@ -70,7 +70,7 @@ impl ConjoiningClauses {
|
||||||
bindings
|
bindings
|
||||||
}
|
}
|
||||||
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::ExpectedBindRel
|
BindingError::ExpectedBindRel
|
||||||
))
|
))
|
||||||
|
@ -90,10 +90,10 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
let mut args = where_fn.args.into_iter();
|
let mut args = where_fn.args.into_iter();
|
||||||
|
|
||||||
// TODO(gburd): process source variables.
|
// TODO: process source variables.
|
||||||
match args.next().unwrap() {
|
match args.next().unwrap() {
|
||||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"source variable",
|
"source variable",
|
||||||
0
|
0
|
||||||
|
@ -104,7 +104,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
// TODO: accept placeholder and set of attributes. Alternately, consider putting the search
|
// TODO: accept placeholder and set of attributes. Alternately, consider putting the search
|
||||||
// term before the attribute arguments and collect the (variadic) attributes into a set.
|
// term before the attribute arguments and collect the (variadic) attributes into a set.
|
||||||
// let a: Entid = self.resolve_attribute_argument(&where_fn.operator, 1, args.next().unwrap())?;
|
// let a: Entid = self.resolve_attribute_argument(&where_fn.operator, 1, args.next().unwrap())?;
|
||||||
//
|
//
|
||||||
// TODO: improve the expression of this matching, possibly by using attribute_for_* uniformly.
|
// TODO: improve the expression of this matching, possibly by using attribute_for_* uniformly.
|
||||||
let a = match args.next().unwrap() {
|
let a = match args.next().unwrap() {
|
||||||
|
@ -116,12 +116,12 @@ impl ConjoiningClauses {
|
||||||
// TODO: allow non-constant attributes.
|
// TODO: allow non-constant attributes.
|
||||||
match self.bound_value(&v) {
|
match self.bound_value(&v) {
|
||||||
Some(TypedValue::Ref(entid)) => Some(entid),
|
Some(TypedValue::Ref(entid)) => Some(entid),
|
||||||
Some(tv) => bail!(AlgebrizerError::InputTypeDisagreement(
|
Some(tv) => bail!(AlgebrizerErrorKind::InputTypeDisagreement(
|
||||||
v.name(),
|
v.name().clone(),
|
||||||
ValueType::Ref,
|
ValueType::Ref,
|
||||||
tv.value_type()
|
tv.value_type()
|
||||||
)),
|
)),
|
||||||
None => bail!(AlgebrizerError::UnboundVariable((*v.0).clone())),
|
None => bail!(AlgebrizerErrorKind::UnboundVariable((*v.0).clone())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -130,13 +130,20 @@ impl ConjoiningClauses {
|
||||||
// An unknown ident, or an entity that isn't present in the store, or isn't a fulltext
|
// An unknown ident, or an entity that isn't present in the store, or isn't a fulltext
|
||||||
// attribute, is likely enough to be a coding error that we choose to bail instead of
|
// attribute, is likely enough to be a coding error that we choose to bail instead of
|
||||||
// marking the pattern as known-empty.
|
// marking the pattern as known-empty.
|
||||||
let op = where_fn.operator.clone(); //TODO(gburd): remove me...
|
let a = a.ok_or(AlgebrizerErrorKind::InvalidArgument(
|
||||||
let a = a.ok_or_else(move || AlgebrizerError::InvalidArgument(op, "attribute", 1))?;
|
where_fn.operator.clone(),
|
||||||
let op = where_fn.operator.clone(); //TODO(gburd): remove me...
|
"attribute",
|
||||||
let attribute = schema
|
1,
|
||||||
.attribute_for_entid(a)
|
))?;
|
||||||
.cloned()
|
let attribute =
|
||||||
.ok_or_else(move || AlgebrizerError::InvalidArgument(op, "attribute", 1))?;
|
schema
|
||||||
|
.attribute_for_entid(a)
|
||||||
|
.cloned()
|
||||||
|
.ok_or(AlgebrizerErrorKind::InvalidArgument(
|
||||||
|
where_fn.operator.clone(),
|
||||||
|
"attribute",
|
||||||
|
1,
|
||||||
|
))?;
|
||||||
|
|
||||||
if !attribute.fulltext {
|
if !attribute.fulltext {
|
||||||
// We can never get results from a non-fulltext attribute!
|
// We can never get results from a non-fulltext attribute!
|
||||||
|
@ -183,7 +190,7 @@ impl ConjoiningClauses {
|
||||||
FnArg::Variable(in_var) => {
|
FnArg::Variable(in_var) => {
|
||||||
match self.bound_value(&in_var) {
|
match self.bound_value(&in_var) {
|
||||||
Some(t @ TypedValue::String(_)) => Either::Left(t),
|
Some(t @ TypedValue::String(_)) => Either::Left(t),
|
||||||
Some(_) => bail!(AlgebrizerError::InvalidArgument(
|
Some(_) => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"string",
|
"string",
|
||||||
2
|
2
|
||||||
|
@ -192,7 +199,7 @@ impl ConjoiningClauses {
|
||||||
// Regardless of whether we'll be providing a string later, or the value
|
// Regardless of whether we'll be providing a string later, or the value
|
||||||
// comes from a column, it must be a string.
|
// comes from a column, it must be a string.
|
||||||
if self.known_type(&in_var) != Some(ValueType::String) {
|
if self.known_type(&in_var) != Some(ValueType::String) {
|
||||||
bail!(AlgebrizerError::InvalidArgument(
|
bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"string",
|
"string",
|
||||||
2
|
2
|
||||||
|
@ -202,7 +209,7 @@ impl ConjoiningClauses {
|
||||||
if self.input_variables.contains(&in_var) {
|
if self.input_variables.contains(&in_var) {
|
||||||
// Sorry, we haven't implemented late binding.
|
// Sorry, we haven't implemented late binding.
|
||||||
// TODO: implement this.
|
// TODO: implement this.
|
||||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||||
} else {
|
} else {
|
||||||
// It must be bound earlier in the query. We already established that
|
// It must be bound earlier in the query. We already established that
|
||||||
// it must be a string column.
|
// it must be a string column.
|
||||||
|
@ -213,13 +220,13 @@ impl ConjoiningClauses {
|
||||||
{
|
{
|
||||||
Either::Right(binding)
|
Either::Right(binding)
|
||||||
} else {
|
} else {
|
||||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"string",
|
"string",
|
||||||
2
|
2
|
||||||
|
@ -264,7 +271,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
self.bind_column_to_var(
|
self.bind_column_to_var(
|
||||||
schema,
|
schema,
|
||||||
fulltext_values_alias,
|
fulltext_values_alias.clone(),
|
||||||
Column::Fulltext(FulltextColumn::Text),
|
Column::Fulltext(FulltextColumn::Text),
|
||||||
var.clone(),
|
var.clone(),
|
||||||
);
|
);
|
||||||
|
@ -277,7 +284,12 @@ impl ConjoiningClauses {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.bind_column_to_var(schema, datoms_table_alias, DatomsColumn::Tx, var.clone());
|
self.bind_column_to_var(
|
||||||
|
schema,
|
||||||
|
datoms_table_alias.clone(),
|
||||||
|
DatomsColumn::Tx,
|
||||||
|
var.clone(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let VariableOrPlaceholder::Variable(ref var) = b_score {
|
if let VariableOrPlaceholder::Variable(ref var) = b_score {
|
||||||
|
@ -286,7 +298,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
// We do not allow the score to be bound.
|
// We do not allow the score to be bound.
|
||||||
if self.value_bindings.contains_key(var) || self.input_variables.contains(var) {
|
if self.value_bindings.contains_key(var) || self.input_variables.contains(var) {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
var.name(),
|
var.name(),
|
||||||
BindingError::UnexpectedBinding
|
BindingError::UnexpectedBinding
|
||||||
));
|
));
|
||||||
|
@ -311,7 +323,7 @@ mod testing {
|
||||||
|
|
||||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident};
|
use clauses::{add_attribute, associate_ident};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_apply_fulltext() {
|
fn test_apply_fulltext() {
|
||||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
||||||
|
|
||||||
use edn::query::{Binding, FnArg, Variable, VariableOrPlaceholder, WhereFn};
|
use edn::query::{Binding, FnArg, Variable, VariableOrPlaceholder, WhereFn};
|
||||||
|
|
||||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
use clauses::{ConjoiningClauses, PushComputed};
|
||||||
|
|
||||||
use crate::clauses::convert::ValueConversion;
|
use clauses::convert::ValueConversion;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||||
|
|
||||||
use crate::types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
use types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
/// Take a relation: a matrix of values which will successively bind to named variables of
|
/// Take a relation: a matrix of values which will successively bind to named variables of
|
||||||
|
@ -47,7 +47,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
let named_values = ComputedTable::NamedValues {
|
let named_values = ComputedTable::NamedValues {
|
||||||
names: names.clone(),
|
names: names.clone(),
|
||||||
values,
|
values: values,
|
||||||
};
|
};
|
||||||
|
|
||||||
let table = self.computed_tables.push_computed(named_values);
|
let table = self.computed_tables.push_computed(named_values);
|
||||||
|
@ -103,13 +103,13 @@ impl ConjoiningClauses {
|
||||||
if existing != value {
|
if existing != value {
|
||||||
self.mark_known_empty(EmptyBecause::ConflictingBindings {
|
self.mark_known_empty(EmptyBecause::ConflictingBindings {
|
||||||
var: var.clone(),
|
var: var.clone(),
|
||||||
existing,
|
existing: existing.clone(),
|
||||||
desired: value,
|
desired: value,
|
||||||
});
|
});
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.bind_value(&var, value);
|
self.bind_value(&var, value.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -117,7 +117,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
pub(crate) fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
pub(crate) fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||||
if where_fn.args.len() != 1 {
|
if where_fn.args.len() != 1 {
|
||||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
where_fn.args.len(),
|
where_fn.args.len(),
|
||||||
1
|
1
|
||||||
|
@ -128,16 +128,16 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if where_fn.binding.is_empty() {
|
if where_fn.binding.is_empty() {
|
||||||
// The binding must introduce at least one bound variable.
|
// The binding must introduce at least one bound variable.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator,
|
where_fn.operator.clone(),
|
||||||
BindingError::NoBoundVariable
|
BindingError::NoBoundVariable
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !where_fn.binding.is_valid() {
|
if !where_fn.binding.is_valid() {
|
||||||
// The binding must not duplicate bound variables.
|
// The binding must not duplicate bound variables.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator,
|
where_fn.operator.clone(),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -154,7 +154,7 @@ impl ConjoiningClauses {
|
||||||
// Just the same, but we bind more than one column at a time.
|
// Just the same, but we bind more than one column at a time.
|
||||||
if children.len() != places.len() {
|
if children.len() != places.len() {
|
||||||
// Number of arguments don't match the number of values. TODO: better error message.
|
// Number of arguments don't match the number of values. TODO: better error message.
|
||||||
bail!(AlgebrizerError::GroundBindingsMismatch)
|
bail!(AlgebrizerErrorKind::GroundBindingsMismatch)
|
||||||
}
|
}
|
||||||
for (place, arg) in places.into_iter().zip(children.into_iter()) {
|
for (place, arg) in places.into_iter().zip(children.into_iter()) {
|
||||||
self.apply_ground_place(schema, place, arg)? // TODO: short-circuit on impossible.
|
self.apply_ground_place(schema, place, arg)? // TODO: short-circuit on impossible.
|
||||||
|
@ -168,7 +168,7 @@ impl ConjoiningClauses {
|
||||||
// are all in a single structure. That makes it substantially simpler!
|
// are all in a single structure. That makes it substantially simpler!
|
||||||
(Binding::BindColl(var), FnArg::Vector(children)) => {
|
(Binding::BindColl(var), FnArg::Vector(children)) => {
|
||||||
if children.is_empty() {
|
if children.is_empty() {
|
||||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Turn a collection of arguments into a Vec of `TypedValue`s of the same type.
|
// Turn a collection of arguments into a Vec of `TypedValue`s of the same type.
|
||||||
|
@ -180,7 +180,7 @@ impl ConjoiningClauses {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|arg| -> Option<Result<TypedValue>> {
|
.filter_map(|arg| -> Option<Result<TypedValue>> {
|
||||||
// We need to get conversion errors out.
|
// We need to get conversion errors out.
|
||||||
// We also want to mark known-empty on impossibility, but
|
// We also want to mark known-empty on impossibilty, but
|
||||||
// still detect serious errors.
|
// still detect serious errors.
|
||||||
match self.typed_value_from_arg(schema, &var, arg, known_types) {
|
match self.typed_value_from_arg(schema, &var, arg, known_types) {
|
||||||
Ok(ValueConversion::Val(tv)) => {
|
Ok(ValueConversion::Val(tv)) => {
|
||||||
|
@ -188,7 +188,7 @@ impl ConjoiningClauses {
|
||||||
&& !accumulated_types.is_unit()
|
&& !accumulated_types.is_unit()
|
||||||
{
|
{
|
||||||
// Values not all of the same type.
|
// Values not all of the same type.
|
||||||
Some(Err(AlgebrizerError::InvalidGroundConstant))
|
Some(Err(AlgebrizerErrorKind::InvalidGroundConstant.into()))
|
||||||
} else {
|
} else {
|
||||||
Some(Ok(tv))
|
Some(Ok(tv))
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,7 @@ impl ConjoiningClauses {
|
||||||
skip = Some(because);
|
skip = Some(because);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
Err(e) => Some(Err(e)),
|
Err(e) => Some(Err(e.into())),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<TypedValue>>>()?;
|
.collect::<Result<Vec<TypedValue>>>()?;
|
||||||
|
@ -211,7 +211,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
// Otherwise, we now have the values and the type.
|
// Otherwise, we now have the values and the type.
|
||||||
let types = vec![accumulated_types.exemplar().unwrap()];
|
let types = vec![accumulated_types.exemplar().unwrap()];
|
||||||
let names = vec![var];
|
let names = vec![var.clone()];
|
||||||
|
|
||||||
self.collect_named_bindings(schema, names, types, values);
|
self.collect_named_bindings(schema, names, types, values);
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -219,7 +219,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
(Binding::BindRel(places), FnArg::Vector(rows)) => {
|
(Binding::BindRel(places), FnArg::Vector(rows)) => {
|
||||||
if rows.is_empty() {
|
if rows.is_empty() {
|
||||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grab the known types to which these args must conform, and track
|
// Grab the known types to which these args must conform, and track
|
||||||
|
@ -227,8 +227,8 @@ impl ConjoiningClauses {
|
||||||
let template: Vec<Option<(Variable, ValueTypeSet)>> = places
|
let template: Vec<Option<(Variable, ValueTypeSet)>> = places
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| match x {
|
.map(|x| match x {
|
||||||
VariableOrPlaceholder::Placeholder => None,
|
&VariableOrPlaceholder::Placeholder => None,
|
||||||
VariableOrPlaceholder::Variable(ref v) => {
|
&VariableOrPlaceholder::Variable(ref v) => {
|
||||||
Some((v.clone(), self.known_type_set(v)))
|
Some((v.clone(), self.known_type_set(v)))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -243,7 +243,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if expected_width == 0 {
|
if expected_width == 0 {
|
||||||
// They can't all be placeholders.
|
// They can't all be placeholders.
|
||||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Accumulate values into `matrix` and types into `a_t_f_c`.
|
// Accumulate values into `matrix` and types into `a_t_f_c`.
|
||||||
|
@ -259,7 +259,7 @@ impl ConjoiningClauses {
|
||||||
FnArg::Vector(cols) => {
|
FnArg::Vector(cols) => {
|
||||||
// Make sure that every row is the same length.
|
// Make sure that every row is the same length.
|
||||||
if cols.len() != full_width {
|
if cols.len() != full_width {
|
||||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: don't accumulate twice.
|
// TODO: don't accumulate twice.
|
||||||
|
@ -271,7 +271,7 @@ impl ConjoiningClauses {
|
||||||
// Convert each item in the row.
|
// Convert each item in the row.
|
||||||
// If any value in the row is impossible, then skip the row.
|
// If any value in the row is impossible, then skip the row.
|
||||||
// If all rows are impossible, fail the entire CC.
|
// If all rows are impossible, fail the entire CC.
|
||||||
if let Some(ref pair) = pair {
|
if let &Some(ref pair) = pair {
|
||||||
match self.typed_value_from_arg(schema, &pair.0, col, pair.1)? {
|
match self.typed_value_from_arg(schema, &pair.0, col, pair.1)? {
|
||||||
ValueConversion::Val(tv) => vals.push(tv),
|
ValueConversion::Val(tv) => vals.push(tv),
|
||||||
ValueConversion::Impossible(because) => {
|
ValueConversion::Impossible(because) => {
|
||||||
|
@ -297,12 +297,12 @@ impl ConjoiningClauses {
|
||||||
let inserted = acc.insert(val.value_type());
|
let inserted = acc.insert(val.value_type());
|
||||||
if inserted && !acc.is_unit() {
|
if inserted && !acc.is_unit() {
|
||||||
// Heterogeneous types.
|
// Heterogeneous types.
|
||||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||||
}
|
}
|
||||||
matrix.push(val);
|
matrix.push(val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => bail!(AlgebrizerError::InvalidGroundConstant),
|
_ => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -329,7 +329,7 @@ impl ConjoiningClauses {
|
||||||
self.collect_named_bindings(schema, names, types, matrix);
|
self.collect_named_bindings(schema, names, types, matrix);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
(_, _) => bail!(AlgebrizerError::InvalidGroundConstant),
|
(_, _) => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -342,7 +342,7 @@ mod testing {
|
||||||
|
|
||||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident};
|
use clauses::{add_attribute, associate_ident};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_apply_ground() {
|
fn test_apply_ground() {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use core_traits::{TypedValue, ValueType};
|
||||||
|
|
||||||
use edn::query::Variable;
|
use edn::query::Variable;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
/// Define the inputs to a query. This is in two parts: a set of values known now, and a set of
|
/// Define the inputs to a query. This is in two parts: a set of values known now, and a set of
|
||||||
/// types known now.
|
/// types known now.
|
||||||
|
@ -55,7 +55,7 @@ impl QueryInputs {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(var, val)| (var.clone(), val.value_type()))
|
.map(|(var, val)| (var.clone(), val.value_type()))
|
||||||
.collect(),
|
.collect(),
|
||||||
values,
|
values: values,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,10 +69,13 @@ impl QueryInputs {
|
||||||
let old = types.insert(var.clone(), t);
|
let old = types.insert(var.clone(), t);
|
||||||
if let Some(old) = old {
|
if let Some(old) = old {
|
||||||
if old != t {
|
if old != t {
|
||||||
bail!(AlgebrizerError::InputTypeDisagreement(var.name(), old, t));
|
bail!(AlgebrizerErrorKind::InputTypeDisagreement(var.name(), old, t));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(QueryInputs { types, values })
|
Ok(QueryInputs {
|
||||||
|
types: types,
|
||||||
|
values: values,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,9 +24,9 @@ use mentat_core::counter::RcCounter;
|
||||||
|
|
||||||
use edn::query::{Element, FindSpec, Keyword, PatternNonValuePlace, Pull, Variable, WhereClause};
|
use edn::query::{Element, FindSpec, Keyword, PatternNonValuePlace, Pull, Variable, WhereClause};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
Column, ColumnConstraint, ColumnIntersection, ComputedTable, DatomsColumn, DatomsTable,
|
Column, ColumnConstraint, ColumnIntersection, ComputedTable, DatomsColumn, DatomsTable,
|
||||||
EmptyBecause, EvolvedNonValuePlace, EvolvedPattern, EvolvedValuePlace, FulltextColumn,
|
EmptyBecause, EvolvedNonValuePlace, EvolvedPattern, EvolvedValuePlace, FulltextColumn,
|
||||||
PlaceOrEmpty, QualifiedAlias, QueryValue, SourceAlias, TableAlias,
|
PlaceOrEmpty, QualifiedAlias, QueryValue, SourceAlias, TableAlias,
|
||||||
|
@ -45,11 +45,11 @@ mod ground;
|
||||||
mod tx_log_api;
|
mod tx_log_api;
|
||||||
mod where_fn;
|
mod where_fn;
|
||||||
|
|
||||||
use crate::validate::{validate_not_join, validate_or_join};
|
use validate::{validate_not_join, validate_or_join};
|
||||||
|
|
||||||
pub use self::inputs::QueryInputs;
|
pub use self::inputs::QueryInputs;
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
trait Contains<K, T> {
|
trait Contains<K, T> {
|
||||||
fn when_contains<F: FnOnce() -> T>(&self, k: &K, f: F) -> Option<T>;
|
fn when_contains<F: FnOnce() -> T>(&self, k: &K, f: F) -> Option<T>;
|
||||||
|
@ -147,8 +147,8 @@ pub struct ConjoiningClauses {
|
||||||
/// A map from var to qualified columns. Used to project.
|
/// A map from var to qualified columns. Used to project.
|
||||||
pub column_bindings: BTreeMap<Variable, Vec<QualifiedAlias>>,
|
pub column_bindings: BTreeMap<Variable, Vec<QualifiedAlias>>,
|
||||||
|
|
||||||
/// A list of variables mentioned in the enclosing query's `:in` clause all of which must be
|
/// A list of variables mentioned in the enclosing query's :in clause. These must all be bound
|
||||||
/// bound before the query can be executed. TODO: clarify what this means for nested CCs.
|
/// before the query can be executed. TODO: clarify what this means for nested CCs.
|
||||||
pub input_variables: BTreeSet<Variable>,
|
pub input_variables: BTreeSet<Variable>,
|
||||||
|
|
||||||
/// In some situations -- e.g., when a query is being run only once -- we know in advance the
|
/// In some situations -- e.g., when a query is being run only once -- we know in advance the
|
||||||
|
@ -279,7 +279,7 @@ impl ConjoiningClauses {
|
||||||
values.keep_intersected_keys(&in_variables);
|
values.keep_intersected_keys(&in_variables);
|
||||||
|
|
||||||
let mut cc = ConjoiningClauses {
|
let mut cc = ConjoiningClauses {
|
||||||
alias_counter,
|
alias_counter: alias_counter,
|
||||||
input_variables: in_variables,
|
input_variables: in_variables,
|
||||||
value_bindings: values,
|
value_bindings: values,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -301,8 +301,14 @@ impl ConjoiningClauses {
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
pub(crate) fn derive_types_from_find_spec(&mut self, find_spec: &FindSpec) {
|
pub(crate) fn derive_types_from_find_spec(&mut self, find_spec: &FindSpec) {
|
||||||
for spec in find_spec.columns() {
|
for spec in find_spec.columns() {
|
||||||
if let Element::Pull(Pull { ref var, .. }) = spec {
|
match spec {
|
||||||
self.constrain_var_to_type(var.clone(), ValueType::Ref);
|
&Element::Pull(Pull {
|
||||||
|
ref var,
|
||||||
|
patterns: _,
|
||||||
|
}) => {
|
||||||
|
self.constrain_var_to_type(var.clone(), ValueType::Ref);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -404,7 +410,7 @@ impl ConjoiningClauses {
|
||||||
self.known_types
|
self.known_types
|
||||||
.get(var)
|
.get(var)
|
||||||
.cloned()
|
.cloned()
|
||||||
.unwrap_or_else(ValueTypeSet::any)
|
.unwrap_or(ValueTypeSet::any())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn bind_column_to_var<C: Into<Column>>(
|
pub(crate) fn bind_column_to_var<C: Into<Column>>(
|
||||||
|
@ -508,7 +514,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
self.column_bindings
|
self.column_bindings
|
||||||
.entry(var)
|
.entry(var)
|
||||||
.or_insert_with(Vec::new)
|
.or_insert(vec![])
|
||||||
.push(alias);
|
.push(alias);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -579,10 +585,10 @@ impl ConjoiningClauses {
|
||||||
these_types: ValueTypeSet,
|
these_types: ValueTypeSet,
|
||||||
) -> Option<EmptyBecause> {
|
) -> Option<EmptyBecause> {
|
||||||
if let Some(existing) = self.known_types.get(var) {
|
if let Some(existing) = self.known_types.get(var) {
|
||||||
if existing.intersection(these_types).is_empty() {
|
if existing.intersection(&these_types).is_empty() {
|
||||||
return Some(EmptyBecause::TypeMismatch {
|
return Some(EmptyBecause::TypeMismatch {
|
||||||
var: var.clone(),
|
var: var.clone(),
|
||||||
existing: *existing,
|
existing: existing.clone(),
|
||||||
desired: these_types,
|
desired: these_types,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -634,7 +640,7 @@ impl ConjoiningClauses {
|
||||||
// We have an existing requirement. The new requirement will be
|
// We have an existing requirement. The new requirement will be
|
||||||
// the intersection, but we'll `mark_known_empty` if that's empty.
|
// the intersection, but we'll `mark_known_empty` if that's empty.
|
||||||
let existing = *entry.get();
|
let existing = *entry.get();
|
||||||
let intersection = types.intersection(existing);
|
let intersection = types.intersection(&existing);
|
||||||
entry.insert(intersection);
|
entry.insert(intersection);
|
||||||
|
|
||||||
if !intersection.is_empty() {
|
if !intersection.is_empty() {
|
||||||
|
@ -642,8 +648,8 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
|
|
||||||
EmptyBecause::TypeMismatch {
|
EmptyBecause::TypeMismatch {
|
||||||
var,
|
var: var,
|
||||||
existing,
|
existing: existing,
|
||||||
desired: types,
|
desired: types,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -678,7 +684,7 @@ impl ConjoiningClauses {
|
||||||
panic!("Uh oh: we failed this pattern, probably because {:?} couldn't match, but now we're broadening its type.",
|
panic!("Uh oh: we failed this pattern, probably because {:?} couldn't match, but now we're broadening its type.",
|
||||||
e.key());
|
e.key());
|
||||||
}
|
}
|
||||||
new = existing_types.union(new_types);
|
new = existing_types.union(&new_types);
|
||||||
}
|
}
|
||||||
e.insert(new);
|
e.insert(new);
|
||||||
}
|
}
|
||||||
|
@ -704,11 +710,11 @@ impl ConjoiningClauses {
|
||||||
e.insert(types);
|
e.insert(types);
|
||||||
}
|
}
|
||||||
Entry::Occupied(mut e) => {
|
Entry::Occupied(mut e) => {
|
||||||
let intersected: ValueTypeSet = types.intersection(*e.get());
|
let intersected: ValueTypeSet = types.intersection(e.get());
|
||||||
if intersected.is_empty() {
|
if intersected.is_empty() {
|
||||||
let reason = EmptyBecause::TypeMismatch {
|
let reason = EmptyBecause::TypeMismatch {
|
||||||
var: e.key().clone(),
|
var: e.key().clone(),
|
||||||
existing: *e.get(),
|
existing: e.get().clone(),
|
||||||
desired: types,
|
desired: types,
|
||||||
};
|
};
|
||||||
empty_because = Some(reason);
|
empty_because = Some(reason);
|
||||||
|
@ -745,7 +751,7 @@ impl ConjoiningClauses {
|
||||||
// If it's a variable, record that it has the right type.
|
// If it's a variable, record that it has the right type.
|
||||||
// Ident or attribute resolution errors (the only other check we need to do) will be done
|
// Ident or attribute resolution errors (the only other check we need to do) will be done
|
||||||
// by the caller.
|
// by the caller.
|
||||||
if let EvolvedNonValuePlace::Variable(ref v) = value {
|
if let &EvolvedNonValuePlace::Variable(ref v) = value {
|
||||||
self.constrain_var_to_type(v.clone(), ValueType::Ref)
|
self.constrain_var_to_type(v.clone(), ValueType::Ref)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -778,12 +784,12 @@ impl ConjoiningClauses {
|
||||||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||||
if attribute.fulltext {
|
if attribute.fulltext {
|
||||||
match value {
|
match value {
|
||||||
EvolvedValuePlace::Placeholder => Ok(DatomsTable::Datoms), // We don't need the value.
|
&EvolvedValuePlace::Placeholder => Ok(DatomsTable::Datoms), // We don't need the value.
|
||||||
|
|
||||||
// TODO: an existing non-string binding can cause this pattern to fail.
|
// TODO: an existing non-string binding can cause this pattern to fail.
|
||||||
EvolvedValuePlace::Variable(_) => Ok(DatomsTable::FulltextDatoms),
|
&EvolvedValuePlace::Variable(_) => Ok(DatomsTable::FulltextDatoms),
|
||||||
|
|
||||||
EvolvedValuePlace::Value(TypedValue::String(_)) => Ok(DatomsTable::FulltextDatoms),
|
&EvolvedValuePlace::Value(TypedValue::String(_)) => Ok(DatomsTable::FulltextDatoms),
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
// We can't succeed if there's a non-string constant value for a fulltext
|
// We can't succeed if there's a non-string constant value for a fulltext
|
||||||
|
@ -796,9 +802,9 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn table_for_unknown_attribute(
|
fn table_for_unknown_attribute<'s, 'a>(
|
||||||
&self,
|
&self,
|
||||||
value: &EvolvedValuePlace,
|
value: &'a EvolvedValuePlace,
|
||||||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||||
// If the value is known to be non-textual, we can simply use the regular datoms
|
// If the value is known to be non-textual, we can simply use the regular datoms
|
||||||
// table (TODO: and exclude on `index_fulltext`!).
|
// table (TODO: and exclude on `index_fulltext`!).
|
||||||
|
@ -811,7 +817,7 @@ impl ConjoiningClauses {
|
||||||
Ok(match value {
|
Ok(match value {
|
||||||
// TODO: see if the variable is projected, aggregated, or compared elsewhere in
|
// TODO: see if the variable is projected, aggregated, or compared elsewhere in
|
||||||
// the query. If it's not, we don't need to use all_datoms here.
|
// the query. If it's not, we don't need to use all_datoms here.
|
||||||
EvolvedValuePlace::Variable(ref v) => {
|
&EvolvedValuePlace::Variable(ref v) => {
|
||||||
// If `required_types` and `known_types` don't exclude strings,
|
// If `required_types` and `known_types` don't exclude strings,
|
||||||
// we need to query `all_datoms`.
|
// we need to query `all_datoms`.
|
||||||
if self
|
if self
|
||||||
|
@ -828,7 +834,7 @@ impl ConjoiningClauses {
|
||||||
DatomsTable::Datoms
|
DatomsTable::Datoms
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EvolvedValuePlace::Value(TypedValue::String(_)) => DatomsTable::AllDatoms,
|
&EvolvedValuePlace::Value(TypedValue::String(_)) => DatomsTable::AllDatoms,
|
||||||
_ => DatomsTable::Datoms,
|
_ => DatomsTable::Datoms,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -844,14 +850,14 @@ impl ConjoiningClauses {
|
||||||
value: &'a EvolvedValuePlace,
|
value: &'a EvolvedValuePlace,
|
||||||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||||
match attribute {
|
match attribute {
|
||||||
EvolvedNonValuePlace::Entid(id) => schema
|
&EvolvedNonValuePlace::Entid(id) => schema
|
||||||
.attribute_for_entid(*id)
|
.attribute_for_entid(id)
|
||||||
.ok_or_else(|| EmptyBecause::InvalidAttributeEntid(*id))
|
.ok_or_else(|| EmptyBecause::InvalidAttributeEntid(id))
|
||||||
.and_then(|attribute| self.table_for_attribute_and_value(attribute, value)),
|
.and_then(|attribute| self.table_for_attribute_and_value(attribute, value)),
|
||||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||||
// #278.
|
// #278.
|
||||||
EvolvedNonValuePlace::Placeholder => self.table_for_unknown_attribute(value),
|
&EvolvedNonValuePlace::Placeholder => self.table_for_unknown_attribute(value),
|
||||||
EvolvedNonValuePlace::Variable(ref v) => {
|
&EvolvedNonValuePlace::Variable(ref v) => {
|
||||||
// See if we have a binding for the variable.
|
// See if we have a binding for the variable.
|
||||||
match self.bound_value(v) {
|
match self.bound_value(v) {
|
||||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||||
|
@ -877,7 +883,7 @@ impl ConjoiningClauses {
|
||||||
// attribute place.
|
// attribute place.
|
||||||
Err(EmptyBecause::InvalidBinding(
|
Err(EmptyBecause::InvalidBinding(
|
||||||
Column::Fixed(DatomsColumn::Attribute),
|
Column::Fixed(DatomsColumn::Attribute),
|
||||||
v,
|
v.clone(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -916,8 +922,8 @@ impl ConjoiningClauses {
|
||||||
) -> Option<&'s Attribute> {
|
) -> Option<&'s Attribute> {
|
||||||
match value {
|
match value {
|
||||||
// We know this one is known if the attribute lookup succeeds…
|
// We know this one is known if the attribute lookup succeeds…
|
||||||
TypedValue::Ref(id) => schema.attribute_for_entid(*id),
|
&TypedValue::Ref(id) => schema.attribute_for_entid(id),
|
||||||
TypedValue::Keyword(ref kw) => schema.attribute_for_ident(kw).map(|(a, _id)| a),
|
&TypedValue::Keyword(ref kw) => schema.attribute_for_ident(kw).map(|(a, _id)| a),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -975,7 +981,7 @@ impl ConjoiningClauses {
|
||||||
pub(crate) fn expand_column_bindings(&mut self) {
|
pub(crate) fn expand_column_bindings(&mut self) {
|
||||||
for cols in self.column_bindings.values() {
|
for cols in self.column_bindings.values() {
|
||||||
if cols.len() > 1 {
|
if cols.len() > 1 {
|
||||||
let primary = &cols[0];
|
let ref primary = cols[0];
|
||||||
let secondaries = cols.iter().skip(1);
|
let secondaries = cols.iter().skip(1);
|
||||||
for secondary in secondaries {
|
for secondary in secondaries {
|
||||||
// TODO: if both primary and secondary are .v, should we make sure
|
// TODO: if both primary and secondary are .v, should we make sure
|
||||||
|
@ -1023,18 +1029,18 @@ impl ConjoiningClauses {
|
||||||
let mut empty_because: Option<EmptyBecause> = None;
|
let mut empty_because: Option<EmptyBecause> = None;
|
||||||
for (var, types) in self.required_types.clone().into_iter() {
|
for (var, types) in self.required_types.clone().into_iter() {
|
||||||
if let Some(already_known) = self.known_types.get(&var) {
|
if let Some(already_known) = self.known_types.get(&var) {
|
||||||
if already_known.is_disjoint(types) {
|
if already_known.is_disjoint(&types) {
|
||||||
// If we know the constraint can't be one of the types
|
// If we know the constraint can't be one of the types
|
||||||
// the variable could take, then we know we're empty.
|
// the variable could take, then we know we're empty.
|
||||||
empty_because = Some(EmptyBecause::TypeMismatch {
|
empty_because = Some(EmptyBecause::TypeMismatch {
|
||||||
var,
|
var: var,
|
||||||
existing: *already_known,
|
existing: *already_known,
|
||||||
desired: types,
|
desired: types,
|
||||||
});
|
});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if already_known.is_subset(types) {
|
if already_known.is_subset(&types) {
|
||||||
// TODO: I'm not convinced that we can do nothing here.
|
// TODO: I'm not convinced that we can do nothing here.
|
||||||
//
|
//
|
||||||
// Consider `[:find ?x ?v :where [_ _ ?v] [(> ?v 10)] [?x :foo/long ?v]]`.
|
// Consider `[:find ?x ?v :where [_ _ ?v] [(> ?v 10)] [?x :foo/long ?v]]`.
|
||||||
|
@ -1065,7 +1071,7 @@ impl ConjoiningClauses {
|
||||||
let qa = self
|
let qa = self
|
||||||
.extracted_types
|
.extracted_types
|
||||||
.get(&var)
|
.get(&var)
|
||||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))?;
|
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()))?;
|
||||||
self.wheres.add_intersection(ColumnConstraint::HasTypes {
|
self.wheres.add_intersection(ColumnConstraint::HasTypes {
|
||||||
value: qa.0.clone(),
|
value: qa.0.clone(),
|
||||||
value_types: types,
|
value_types: types,
|
||||||
|
@ -1123,7 +1129,7 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mark_as_ref(&mut self, pos: &PatternNonValuePlace) {
|
fn mark_as_ref(&mut self, pos: &PatternNonValuePlace) {
|
||||||
if let PatternNonValuePlace::Variable(ref var) = pos {
|
if let &PatternNonValuePlace::Variable(ref var) = pos {
|
||||||
self.constrain_var_to_type(var.clone(), ValueType::Ref)
|
self.constrain_var_to_type(var.clone(), ValueType::Ref)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1136,13 +1142,13 @@ impl ConjoiningClauses {
|
||||||
// We apply (top level) type predicates first as an optimization.
|
// We apply (top level) type predicates first as an optimization.
|
||||||
for clause in where_clauses.iter() {
|
for clause in where_clauses.iter() {
|
||||||
match clause {
|
match clause {
|
||||||
WhereClause::TypeAnnotation(ref anno) => {
|
&WhereClause::TypeAnnotation(ref anno) => {
|
||||||
self.apply_type_anno(anno)?;
|
self.apply_type_anno(anno)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Patterns are common, so let's grab as much type information from
|
// Patterns are common, so let's grab as much type information from
|
||||||
// them as we can.
|
// them as we can.
|
||||||
WhereClause::Pattern(ref p) => {
|
&WhereClause::Pattern(ref p) => {
|
||||||
self.mark_as_ref(&p.entity);
|
self.mark_as_ref(&p.entity);
|
||||||
self.mark_as_ref(&p.attribute);
|
self.mark_as_ref(&p.attribute);
|
||||||
self.mark_as_ref(&p.tx);
|
self.mark_as_ref(&p.tx);
|
||||||
|
@ -1161,7 +1167,7 @@ impl ConjoiningClauses {
|
||||||
let mut patterns: VecDeque<EvolvedPattern> = VecDeque::with_capacity(remaining);
|
let mut patterns: VecDeque<EvolvedPattern> = VecDeque::with_capacity(remaining);
|
||||||
for clause in where_clauses {
|
for clause in where_clauses {
|
||||||
remaining -= 1;
|
remaining -= 1;
|
||||||
if let WhereClause::TypeAnnotation(_) = &clause {
|
if let &WhereClause::TypeAnnotation(_) = &clause {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
match clause {
|
match clause {
|
||||||
|
@ -1227,7 +1233,7 @@ impl PushComputed for Vec<ComputedTable> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||||
schema.entid_map.insert(e, i.clone());
|
schema.entid_map.insert(e, i.clone());
|
||||||
schema.ident_map.insert(i, e);
|
schema.ident_map.insert(i.clone(), e);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -10,13 +10,13 @@
|
||||||
|
|
||||||
use edn::query::{ContainsVariables, NotJoin, UnifyVars};
|
use edn::query::{ContainsVariables, NotJoin, UnifyVars};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::types::{ColumnConstraint, ComputedTable};
|
use types::{ColumnConstraint, ComputedTable};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
|
pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
|
||||||
|
@ -35,7 +35,7 @@ impl ConjoiningClauses {
|
||||||
let col = self.column_bindings.get(&v).unwrap()[0].clone();
|
let col = self.column_bindings.get(&v).unwrap()[0].clone();
|
||||||
template.column_bindings.insert(v.clone(), vec![col]);
|
template.column_bindings.insert(v.clone(), vec![col]);
|
||||||
} else {
|
} else {
|
||||||
bail!(AlgebrizerError::UnboundVariable(v.name()));
|
bail!(AlgebrizerErrorKind::UnboundVariable(v.name()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ impl ConjoiningClauses {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let subquery = ComputedTable::Subquery(Box::new(template));
|
let subquery = ComputedTable::Subquery(template);
|
||||||
|
|
||||||
self.wheres
|
self.wheres
|
||||||
.add_intersection(ColumnConstraint::NotExists(subquery));
|
.add_intersection(ColumnConstraint::NotExists(subquery));
|
||||||
|
@ -87,16 +87,16 @@ mod testing {
|
||||||
|
|
||||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident, QueryInputs};
|
use clauses::{add_attribute, associate_ident, QueryInputs};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::AlgebrizerError;
|
use query_algebrizer_traits::errors::AlgebrizerErrorKind;
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||||
DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{algebrize, algebrize_with_inputs, parse_find_string};
|
use {algebrize, algebrize_with_inputs, parse_find_string};
|
||||||
|
|
||||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||||
let known = Known::for_schema(schema);
|
let known = Known::for_schema(schema);
|
||||||
|
@ -216,17 +216,26 @@ mod testing {
|
||||||
.column_bindings
|
.column_bindings
|
||||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||||
subquery.wheres = ColumnIntersection(vec![
|
subquery.wheres = ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, parent)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows.clone())),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d1a.clone(),
|
||||||
QueryValue::Column(d1e),
|
parent,
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), ambar)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d2a.clone(),
|
||||||
|
knows.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d2v.clone(),
|
||||||
|
daphne,
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d0e.clone(),
|
||||||
QueryValue::Column(d2e),
|
QueryValue::Column(d1e.clone()),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0e.clone(),
|
||||||
|
QueryValue::Column(d2e.clone()),
|
||||||
)),
|
)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -238,10 +247,16 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, john)),
|
d0a.clone(),
|
||||||
|
knows.clone()
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0v.clone(),
|
||||||
|
john
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||||
ComputedTable::Subquery(Box::new(subquery))
|
ComputedTable::Subquery(subquery)
|
||||||
)),
|
)),
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -302,14 +317,17 @@ mod testing {
|
||||||
.column_bindings
|
.column_bindings
|
||||||
.insert(vy.clone(), vec![d0v.clone(), d3v.clone()]);
|
.insert(vy.clone(), vec![d0v.clone(), d3v.clone()]);
|
||||||
subquery.wheres = ColumnIntersection(vec![
|
subquery.wheres = ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d3a, parent)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d3a.clone(),
|
||||||
QueryValue::Column(d3e),
|
parent,
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0v,
|
d0e.clone(),
|
||||||
QueryValue::Column(d3v),
|
QueryValue::Column(d3e.clone()),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0v.clone(),
|
||||||
|
QueryValue::Column(d3v.clone()),
|
||||||
)),
|
)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -318,17 +336,26 @@ mod testing {
|
||||||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||||
subquery
|
subquery
|
||||||
.known_types
|
.known_types
|
||||||
.insert(vy, ValueTypeSet::of_one(ValueType::String));
|
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||||
|
|
||||||
assert!(!cc.is_known_empty());
|
assert!(!cc.is_known_empty());
|
||||||
let expected_wheres = ColumnIntersection(vec![
|
let expected_wheres = ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a.clone(), knows)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, age)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, eleven)),
|
d1a.clone(),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, name)),
|
age.clone(),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, john)),
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d1v.clone(),
|
||||||
|
eleven,
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d2a.clone(),
|
||||||
|
name.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v.clone(), john)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||||
ComputedTable::Subquery(Box::new(subquery)),
|
ComputedTable::Subquery(subquery),
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d0e.clone(),
|
||||||
|
@ -396,17 +423,29 @@ mod testing {
|
||||||
.column_bindings
|
.column_bindings
|
||||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||||
subquery.wheres = ColumnIntersection(vec![
|
subquery.wheres = ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d1a.clone(),
|
||||||
QueryValue::Column(d1e),
|
knows.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d1v.clone(),
|
||||||
|
john.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d2a.clone(),
|
||||||
|
knows.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d2v.clone(),
|
||||||
|
daphne.clone(),
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d0e.clone(),
|
||||||
QueryValue::Column(d2e),
|
QueryValue::Column(d1e.clone()),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0e.clone(),
|
||||||
|
QueryValue::Column(d2e.clone()),
|
||||||
)),
|
)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -418,14 +457,17 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0a.clone(),
|
||||||
|
age.clone()
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||||
operator: Inequality::LessThan,
|
operator: Inequality::LessThan,
|
||||||
left: QueryValue::Column(d0v),
|
left: QueryValue::Column(d0v.clone()),
|
||||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||||
}),
|
}),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||||
ComputedTable::Subquery(Box::new(subquery))
|
ComputedTable::Subquery(subquery)
|
||||||
)),
|
)),
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -448,7 +490,7 @@ mod testing {
|
||||||
let d0 = "datoms00".to_string();
|
let d0 = "datoms00".to_string();
|
||||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||||
|
|
||||||
let d1 = "datoms01".to_string();
|
let d1 = "datoms01".to_string();
|
||||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||||
|
@ -492,36 +534,51 @@ mod testing {
|
||||||
]),
|
]),
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d1a,
|
d1a.clone(),
|
||||||
knows.clone(),
|
knows.clone(),
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d1v.clone(),
|
||||||
|
ambar,
|
||||||
|
)),
|
||||||
]),
|
]),
|
||||||
])),
|
])),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, parent)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e.clone(),
|
d2a.clone(),
|
||||||
QueryValue::Column(d1e),
|
parent,
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e,
|
d2v.clone(),
|
||||||
QueryValue::Column(d2e),
|
daphne,
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0e.clone(),
|
||||||
|
QueryValue::Column(d1e.clone()),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0e.clone(),
|
||||||
|
QueryValue::Column(d2e.clone()),
|
||||||
)),
|
)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
subquery
|
subquery
|
||||||
.known_types
|
.known_types
|
||||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||||
|
|
||||||
assert!(!cc.is_known_empty());
|
assert!(!cc.is_known_empty());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
d0a.clone(),
|
||||||
|
knows
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0v.clone(),
|
||||||
|
bill
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||||
ComputedTable::Subquery(Box::new(subquery))
|
ComputedTable::Subquery(subquery)
|
||||||
)),
|
)),
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -554,7 +611,7 @@ mod testing {
|
||||||
let d0 = "datoms00".to_string();
|
let d0 = "datoms00".to_string();
|
||||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||||
|
|
||||||
let d1 = "datoms01".to_string();
|
let d1 = "datoms01".to_string();
|
||||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||||
|
@ -567,17 +624,20 @@ mod testing {
|
||||||
.column_bindings
|
.column_bindings
|
||||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone()]);
|
.insert(vx.clone(), vec![d0e.clone(), d1e.clone()]);
|
||||||
subquery.wheres = ColumnIntersection(vec![
|
subquery.wheres = ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0e,
|
d1a.clone(),
|
||||||
QueryValue::Column(d1e),
|
knows.clone(),
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), john)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0e.clone(),
|
||||||
|
QueryValue::Column(d1e.clone()),
|
||||||
)),
|
)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
subquery
|
subquery
|
||||||
.known_types
|
.known_types
|
||||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||||
subquery
|
subquery
|
||||||
.known_types
|
.known_types
|
||||||
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||||
|
@ -587,16 +647,22 @@ mod testing {
|
||||||
subquery.input_variables = input_vars;
|
subquery.input_variables = input_vars;
|
||||||
subquery
|
subquery
|
||||||
.value_bindings
|
.value_bindings
|
||||||
.insert(vy, TypedValue::typed_string("John"));
|
.insert(vy.clone(), TypedValue::typed_string("John"));
|
||||||
|
|
||||||
assert!(!cc.is_known_empty());
|
assert!(!cc.is_known_empty());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
d0a.clone(),
|
||||||
|
knows
|
||||||
|
)),
|
||||||
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0v.clone(),
|
||||||
|
bill
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||||
ComputedTable::Subquery(Box::new(subquery))
|
ComputedTable::Subquery(subquery)
|
||||||
)),
|
)),
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -648,7 +714,7 @@ mod testing {
|
||||||
let parsed = parse_find_string(query).expect("parse failed");
|
let parsed = parse_find_string(query).expect("parse failed");
|
||||||
let err = algebrize(known, parsed).expect_err("algebrization should have failed");
|
let err = algebrize(known, parsed).expect_err("algebrization should have failed");
|
||||||
match err {
|
match err {
|
||||||
AlgebrizerError::UnboundVariable(var) => {
|
AlgebrizerErrorKind::UnboundVariable(var) => {
|
||||||
assert_eq!(var, PlainSymbol("?x".to_string()));
|
assert_eq!(var, PlainSymbol("?x".to_string()));
|
||||||
}
|
}
|
||||||
x => panic!("expected Unbound Variable error, got {:?}", x),
|
x => panic!("expected Unbound Variable error, got {:?}", x),
|
||||||
|
|
|
@ -18,17 +18,17 @@ use edn::query::{
|
||||||
WhereClause,
|
WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
use clauses::{ConjoiningClauses, PushComputed};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::Result;
|
use query_algebrizer_traits::errors::Result;
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
ColumnAlternation, ColumnConstraintOrAlternation, ColumnIntersection, ComputedTable,
|
ColumnAlternation, ColumnConstraintOrAlternation, ColumnIntersection, ComputedTable,
|
||||||
DatomsTable, EmptyBecause, EvolvedPattern, PlaceOrEmpty, QualifiedAlias, SourceAlias,
|
DatomsTable, EmptyBecause, EvolvedPattern, PlaceOrEmpty, QualifiedAlias, SourceAlias,
|
||||||
VariableColumn,
|
VariableColumn,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
/// Return true if both left and right are the same variable or both are non-variable.
|
/// Return true if both left and right are the same variable or both are non-variable.
|
||||||
fn _simply_matches_place(left: &PatternNonValuePlace, right: &PatternNonValuePlace) -> bool {
|
fn _simply_matches_place(left: &PatternNonValuePlace, right: &PatternNonValuePlace) -> bool {
|
||||||
|
@ -642,7 +642,7 @@ impl ConjoiningClauses {
|
||||||
// For any variable which has an imprecise type anywhere in the UNION, add it to the
|
// For any variable which has an imprecise type anywhere in the UNION, add it to the
|
||||||
// set that needs type extraction. All UNION arms must project the same columns.
|
// set that needs type extraction. All UNION arms must project the same columns.
|
||||||
for var in projection.iter() {
|
for var in projection.iter() {
|
||||||
if acc.iter().any(|cc| cc.known_type(var).is_none()) {
|
if acc.iter().any(|cc| !cc.known_type(var).is_some()) {
|
||||||
type_needed.insert(var.clone());
|
type_needed.insert(var.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -672,7 +672,7 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
|
|
||||||
let union = ComputedTable::Union {
|
let union = ComputedTable::Union {
|
||||||
projection,
|
projection: projection,
|
||||||
type_extraction: type_needed,
|
type_extraction: type_needed,
|
||||||
arms: acc,
|
arms: acc,
|
||||||
};
|
};
|
||||||
|
@ -727,10 +727,10 @@ fn union_types(
|
||||||
for (var, new_types) in additional_types {
|
for (var, new_types) in additional_types {
|
||||||
match into.entry(var.clone()) {
|
match into.entry(var.clone()) {
|
||||||
Entry::Vacant(e) => {
|
Entry::Vacant(e) => {
|
||||||
e.insert(*new_types);
|
e.insert(new_types.clone());
|
||||||
}
|
}
|
||||||
Entry::Occupied(mut e) => {
|
Entry::Occupied(mut e) => {
|
||||||
let new = e.get().union(*new_types);
|
let new = e.get().union(&new_types);
|
||||||
e.insert(new);
|
e.insert(new);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -750,14 +750,14 @@ mod testing {
|
||||||
|
|
||||||
use edn::query::{Keyword, Variable};
|
use edn::query::{Keyword, Variable};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident};
|
use clauses::{add_attribute, associate_ident};
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
ColumnConstraint, DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue,
|
ColumnConstraint, DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue,
|
||||||
SourceAlias,
|
SourceAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{algebrize, algebrize_with_counter, parse_find_string};
|
use {algebrize, algebrize_with_counter, parse_find_string};
|
||||||
|
|
||||||
fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
||||||
let parsed = parse_find_string(input).expect("parse failed");
|
let parsed = parse_find_string(input).expect("parse failed");
|
||||||
|
@ -920,10 +920,12 @@ mod testing {
|
||||||
]),
|
]),
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0a, knows
|
d0a.clone(),
|
||||||
|
knows
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d0v, daphne
|
d0v.clone(),
|
||||||
|
daphne
|
||||||
))
|
))
|
||||||
]),
|
]),
|
||||||
])
|
])
|
||||||
|
@ -965,7 +967,10 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, name)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0a.clone(),
|
||||||
|
name.clone()
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
@ -989,10 +994,12 @@ mod testing {
|
||||||
]),
|
]),
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d1a, knows
|
d1a.clone(),
|
||||||
|
knows
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d1v, daphne
|
d1v.clone(),
|
||||||
|
daphne
|
||||||
))
|
))
|
||||||
]),
|
]),
|
||||||
])),
|
])),
|
||||||
|
@ -1044,10 +1051,13 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0a.clone(),
|
||||||
|
age.clone()
|
||||||
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||||
operator: Inequality::LessThan,
|
operator: Inequality::LessThan,
|
||||||
left: QueryValue::Column(d0v),
|
left: QueryValue::Column(d0v.clone()),
|
||||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||||
}),
|
}),
|
||||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||||
|
@ -1063,10 +1073,12 @@ mod testing {
|
||||||
]),
|
]),
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d1a, knows
|
d1a.clone(),
|
||||||
|
knows
|
||||||
)),
|
)),
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
d1v, daphne
|
d1v.clone(),
|
||||||
|
daphne
|
||||||
))
|
))
|
||||||
]),
|
]),
|
||||||
])),
|
])),
|
||||||
|
@ -1112,7 +1124,10 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
ColumnIntersection(vec![
|
ColumnIntersection(vec![
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
d0a.clone(),
|
||||||
|
knows.clone()
|
||||||
|
)),
|
||||||
// The outer pattern joins against the `or` on the entity, but not value -- ?y means
|
// The outer pattern joins against the `or` on the entity, but not value -- ?y means
|
||||||
// different things in each place.
|
// different things in each place.
|
||||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||||
|
|
|
@ -8,8 +8,6 @@
|
||||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||||
// specific language governing permissions and limitations under the License.
|
// specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
#![allow(clippy::single_match)]
|
|
||||||
|
|
||||||
use core_traits::{Entid, TypedValue, ValueType, ValueTypeSet};
|
use core_traits::{Entid, TypedValue, ValueType, ValueTypeSet};
|
||||||
|
|
||||||
use mentat_core::{Cloned, HasSchema};
|
use mentat_core::{Cloned, HasSchema};
|
||||||
|
@ -18,18 +16,18 @@ use edn::query::{
|
||||||
NonIntegerConstant, Pattern, PatternNonValuePlace, PatternValuePlace, SrcVar, Variable,
|
NonIntegerConstant, Pattern, PatternNonValuePlace, PatternValuePlace, SrcVar, Variable,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
ColumnConstraint, DatomsColumn, EmptyBecause, EvolvedNonValuePlace, EvolvedPattern,
|
ColumnConstraint, DatomsColumn, EmptyBecause, EvolvedNonValuePlace, EvolvedPattern,
|
||||||
EvolvedValuePlace, PlaceOrEmpty, SourceAlias,
|
EvolvedValuePlace, PlaceOrEmpty, SourceAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
pub fn into_typed_value(nic: NonIntegerConstant) -> TypedValue {
|
pub fn into_typed_value(nic: NonIntegerConstant) -> TypedValue {
|
||||||
match nic {
|
match nic {
|
||||||
NonIntegerConstant::BigInteger(_) => unimplemented!(), // TODO(gburd): #280.
|
NonIntegerConstant::BigInteger(_) => unimplemented!(), // TODO: #280.
|
||||||
NonIntegerConstant::Boolean(v) => TypedValue::Boolean(v),
|
NonIntegerConstant::Boolean(v) => TypedValue::Boolean(v),
|
||||||
NonIntegerConstant::Float(v) => TypedValue::Double(v),
|
NonIntegerConstant::Float(v) => TypedValue::Double(v),
|
||||||
NonIntegerConstant::Text(v) => v.into(),
|
NonIntegerConstant::Text(v) => v.into(),
|
||||||
|
@ -95,15 +93,17 @@ impl ConjoiningClauses {
|
||||||
self.constrain_to_ref(&pattern.entity);
|
self.constrain_to_ref(&pattern.entity);
|
||||||
self.constrain_to_ref(&pattern.attribute);
|
self.constrain_to_ref(&pattern.attribute);
|
||||||
|
|
||||||
let col = &alias.1;
|
let ref col = alias.1;
|
||||||
|
|
||||||
let schema = known.schema;
|
let schema = known.schema;
|
||||||
match pattern.entity {
|
match pattern.entity {
|
||||||
EvolvedNonValuePlace::Placeholder =>
|
EvolvedNonValuePlace::Placeholder =>
|
||||||
// Placeholders don't contribute any column bindings, nor do
|
// Placeholders don't contribute any column bindings, nor do
|
||||||
// they constrain the query -- there's no need to produce
|
// they constrain the query -- there's no need to produce
|
||||||
// IS NOT NULL, because we don't store nulls in our schema.
|
// IS NOT NULL, because we don't store nulls in our schema.
|
||||||
{}
|
{
|
||||||
|
()
|
||||||
|
}
|
||||||
EvolvedNonValuePlace::Variable(ref v) => {
|
EvolvedNonValuePlace::Variable(ref v) => {
|
||||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Entity, v.clone())
|
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Entity, v.clone())
|
||||||
}
|
}
|
||||||
|
@ -287,7 +287,7 @@ impl ConjoiningClauses {
|
||||||
None => {
|
None => {
|
||||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||||
value: val.clone(),
|
value: val.clone(),
|
||||||
attr,
|
attr: attr,
|
||||||
});
|
});
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -301,7 +301,7 @@ impl ConjoiningClauses {
|
||||||
None => {
|
None => {
|
||||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||||
value: val.clone(),
|
value: val.clone(),
|
||||||
attr,
|
attr: attr,
|
||||||
});
|
});
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -403,8 +403,8 @@ impl ConjoiningClauses {
|
||||||
None => {
|
None => {
|
||||||
self.mark_known_empty(
|
self.mark_known_empty(
|
||||||
EmptyBecause::CachedAttributeHasNoValues {
|
EmptyBecause::CachedAttributeHasNoValues {
|
||||||
entity,
|
entity: entity,
|
||||||
attr,
|
attr: attr,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
|
@ -416,7 +416,7 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {} // TODO: check constant values against the cache.
|
_ => {} // TODO: check constant values against cache.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -591,7 +591,7 @@ impl ConjoiningClauses {
|
||||||
entity: e,
|
entity: e,
|
||||||
attribute: a,
|
attribute: a,
|
||||||
value: v,
|
value: v,
|
||||||
tx,
|
tx: tx,
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -612,7 +612,7 @@ impl ConjoiningClauses {
|
||||||
let mut new_value: Option<EvolvedValuePlace> = None;
|
let mut new_value: Option<EvolvedValuePlace> = None;
|
||||||
|
|
||||||
match &pattern.entity {
|
match &pattern.entity {
|
||||||
EvolvedNonValuePlace::Variable(ref var) => {
|
&EvolvedNonValuePlace::Variable(ref var) => {
|
||||||
// See if we have it yet!
|
// See if we have it yet!
|
||||||
match self.bound_value(&var) {
|
match self.bound_value(&var) {
|
||||||
None => (),
|
None => (),
|
||||||
|
@ -631,12 +631,12 @@ impl ConjoiningClauses {
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
match &pattern.value {
|
match &pattern.value {
|
||||||
EvolvedValuePlace::Variable(ref var) => {
|
&EvolvedValuePlace::Variable(ref var) => {
|
||||||
// See if we have it yet!
|
// See if we have it yet!
|
||||||
match self.bound_value(&var) {
|
match self.bound_value(&var) {
|
||||||
None => (),
|
None => (),
|
||||||
Some(tv) => {
|
Some(tv) => {
|
||||||
new_value = Some(EvolvedValuePlace::Value(tv));
|
new_value = Some(EvolvedValuePlace::Value(tv.clone()));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -679,6 +679,7 @@ impl ConjoiningClauses {
|
||||||
// between an attribute and a value.
|
// between an attribute and a value.
|
||||||
// We know we cannot return a result, so we short-circuit here.
|
// We know we cannot return a result, so we short-circuit here.
|
||||||
self.mark_known_empty(EmptyBecause::AttributeLookupFailed);
|
self.mark_known_empty(EmptyBecause::AttributeLookupFailed);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -696,13 +697,11 @@ mod testing {
|
||||||
|
|
||||||
use edn::query::{Keyword, Variable};
|
use edn::query::{Keyword, Variable};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
use clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
||||||
|
|
||||||
use crate::types::{
|
use types::{Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias};
|
||||||
Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{algebrize, parse_find_string};
|
use {algebrize, parse_find_string};
|
||||||
|
|
||||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||||
let parsed = parse_find_string(input).expect("parse failed");
|
let parsed = parse_find_string(input).expect("parse failed");
|
||||||
|
@ -797,7 +796,7 @@ mod testing {
|
||||||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||||
|
|
||||||
// ?x is bound to datoms0.e.
|
// ?x is bound to datoms0.e.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
|
|
||||||
// Our 'where' clauses are two:
|
// Our 'where' clauses are two:
|
||||||
// - datoms0.a = 99
|
// - datoms0.a = 99
|
||||||
|
@ -846,7 +845,7 @@ mod testing {
|
||||||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||||
|
|
||||||
// ?x is bound to datoms0.e.
|
// ?x is bound to datoms0.e.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
|
|
||||||
// Our 'where' clauses are two:
|
// Our 'where' clauses are two:
|
||||||
// - datoms0.v = true
|
// - datoms0.v = true
|
||||||
|
@ -890,7 +889,7 @@ mod testing {
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: PatternNonValuePlace::Variable(a),
|
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||||
value: PatternValuePlace::Variable(v.clone()),
|
value: PatternValuePlace::Variable(v.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
|
@ -915,7 +914,7 @@ mod testing {
|
||||||
assert_eq!(cc.known_type(&v), Some(ValueType::Boolean));
|
assert_eq!(cc.known_type(&v), Some(ValueType::Boolean));
|
||||||
|
|
||||||
// ?x is bound to datoms0.e.
|
// ?x is bound to datoms0.e.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.wheres,
|
cc.wheres,
|
||||||
vec![ColumnConstraint::Equals(d0_a, QueryValue::Entid(99)),].into()
|
vec![ColumnConstraint::Equals(d0_a, QueryValue::Entid(99)),].into()
|
||||||
|
@ -940,9 +939,9 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: PatternNonValuePlace::Variable(a),
|
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||||
value: PatternValuePlace::Variable(v),
|
value: PatternValuePlace::Variable(v.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -969,8 +968,8 @@ mod testing {
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: PatternNonValuePlace::Variable(a),
|
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||||
value: PatternValuePlace::Variable(v),
|
value: PatternValuePlace::Variable(v.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -992,7 +991,7 @@ mod testing {
|
||||||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||||
|
|
||||||
// ?x is bound to datoms0.e.
|
// ?x is bound to datoms0.e.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
assert_eq!(cc.wheres, vec![].into());
|
assert_eq!(cc.wheres, vec![].into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1033,7 +1032,7 @@ mod testing {
|
||||||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||||
|
|
||||||
// ?x is bound to datoms0.e.
|
// ?x is bound to datoms0.e.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
|
|
||||||
// Our 'where' clauses are two:
|
// Our 'where' clauses are two:
|
||||||
// - datoms0.v = 'hello'
|
// - datoms0.v = 'hello'
|
||||||
|
@ -1095,7 +1094,7 @@ mod testing {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: ident("foo", "bar"),
|
attribute: ident("foo", "bar"),
|
||||||
value: PatternValuePlace::Variable(y),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -1204,7 +1203,7 @@ mod testing {
|
||||||
assert!(!cc.column_bindings.contains_key(&y));
|
assert!(!cc.column_bindings.contains_key(&y));
|
||||||
|
|
||||||
// ?x is bound to the entity.
|
// ?x is bound to the entity.
|
||||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1239,9 +1238,9 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: ident("foo", "bar"),
|
attribute: ident("foo", "bar"),
|
||||||
value: PatternValuePlace::Variable(y),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -1284,9 +1283,9 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: ident("foo", "bar"),
|
attribute: ident("foo", "bar"),
|
||||||
value: PatternValuePlace::Variable(y),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -1340,7 +1339,7 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: ident("foo", "bar"),
|
attribute: ident("foo", "bar"),
|
||||||
value: PatternValuePlace::Variable(y.clone()),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
|
@ -1354,7 +1353,7 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.empty_because.unwrap(),
|
cc.empty_because.unwrap(),
|
||||||
EmptyBecause::TypeMismatch {
|
EmptyBecause::TypeMismatch {
|
||||||
var: y,
|
var: y.clone(),
|
||||||
existing: ValueTypeSet::of_one(ValueType::String),
|
existing: ValueTypeSet::of_one(ValueType::String),
|
||||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||||
}
|
}
|
||||||
|
@ -1391,8 +1390,8 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(z),
|
entity: PatternNonValuePlace::Variable(z.clone()),
|
||||||
attribute: PatternNonValuePlace::Variable(y),
|
attribute: PatternNonValuePlace::Variable(y.clone()),
|
||||||
value: PatternValuePlace::Variable(x.clone()),
|
value: PatternValuePlace::Variable(x.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
},
|
},
|
||||||
|
@ -1405,7 +1404,7 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.empty_because.unwrap(),
|
cc.empty_because.unwrap(),
|
||||||
EmptyBecause::TypeMismatch {
|
EmptyBecause::TypeMismatch {
|
||||||
var: x,
|
var: x.clone(),
|
||||||
existing: ValueTypeSet::of_one(ValueType::Ref),
|
existing: ValueTypeSet::of_one(ValueType::Ref),
|
||||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
||||||
|
|
||||||
use edn::query::{FnArg, PlainSymbol, Predicate, TypeAnnotation};
|
use edn::query::{FnArg, PlainSymbol, Predicate, TypeAnnotation};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use crate::clauses::convert::ValueTypes;
|
use clauses::convert::ValueTypes;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
use types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
/// Application of predicates.
|
/// Application of predicates.
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
|
@ -38,13 +38,13 @@ impl ConjoiningClauses {
|
||||||
if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) {
|
if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) {
|
||||||
self.apply_inequality(known, op, predicate)
|
self.apply_inequality(known, op, predicate)
|
||||||
} else {
|
} else {
|
||||||
bail!(AlgebrizerError::UnknownFunction(predicate.operator.clone()))
|
bail!(AlgebrizerErrorKind::UnknownFunction(predicate.operator.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn potential_types(&self, schema: &Schema, fn_arg: &FnArg) -> Result<ValueTypeSet> {
|
fn potential_types(&self, schema: &Schema, fn_arg: &FnArg) -> Result<ValueTypeSet> {
|
||||||
match fn_arg {
|
match fn_arg {
|
||||||
FnArg::Variable(ref v) => Ok(self.known_type_set(v)),
|
&FnArg::Variable(ref v) => Ok(self.known_type_set(v)),
|
||||||
_ => fn_arg.potential_types(schema),
|
_ => fn_arg.potential_types(schema),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ impl ConjoiningClauses {
|
||||||
Some(value_type) => {
|
Some(value_type) => {
|
||||||
self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(value_type))
|
self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(value_type))
|
||||||
}
|
}
|
||||||
None => bail!(AlgebrizerError::InvalidArgumentType(
|
None => bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
PlainSymbol::plain("type"),
|
PlainSymbol::plain("type"),
|
||||||
ValueTypeSet::any(),
|
ValueTypeSet::any(),
|
||||||
2
|
2
|
||||||
|
@ -76,7 +76,7 @@ impl ConjoiningClauses {
|
||||||
predicate: Predicate,
|
predicate: Predicate,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if predicate.args.len() != 2 {
|
if predicate.args.len() != 2 {
|
||||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||||
predicate.operator.clone(),
|
predicate.operator.clone(),
|
||||||
predicate.args.len(),
|
predicate.args.len(),
|
||||||
2
|
2
|
||||||
|
@ -95,10 +95,10 @@ impl ConjoiningClauses {
|
||||||
let supported_types = comparison.supported_types();
|
let supported_types = comparison.supported_types();
|
||||||
let mut left_types = self
|
let mut left_types = self
|
||||||
.potential_types(known.schema, &left)?
|
.potential_types(known.schema, &left)?
|
||||||
.intersection(supported_types);
|
.intersection(&supported_types);
|
||||||
if left_types.is_empty() {
|
if left_types.is_empty() {
|
||||||
bail!(AlgebrizerError::InvalidArgumentType(
|
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
predicate.operator,
|
predicate.operator.clone(),
|
||||||
supported_types,
|
supported_types,
|
||||||
0
|
0
|
||||||
));
|
));
|
||||||
|
@ -106,10 +106,10 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
let mut right_types = self
|
let mut right_types = self
|
||||||
.potential_types(known.schema, &right)?
|
.potential_types(known.schema, &right)?
|
||||||
.intersection(supported_types);
|
.intersection(&supported_types);
|
||||||
if right_types.is_empty() {
|
if right_types.is_empty() {
|
||||||
bail!(AlgebrizerError::InvalidArgumentType(
|
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
predicate.operator,
|
predicate.operator.clone(),
|
||||||
supported_types,
|
supported_types,
|
||||||
1
|
1
|
||||||
));
|
));
|
||||||
|
@ -125,7 +125,7 @@ impl ConjoiningClauses {
|
||||||
left_types.insert(ValueType::Double);
|
left_types.insert(ValueType::Double);
|
||||||
}
|
}
|
||||||
|
|
||||||
let shared_types = left_types.intersection(right_types);
|
let shared_types = left_types.intersection(&right_types);
|
||||||
if shared_types.is_empty() {
|
if shared_types.is_empty() {
|
||||||
// In isolation these are both valid inputs to the operator, but the query cannot
|
// In isolation these are both valid inputs to the operator, but the query cannot
|
||||||
// succeed because the types don't match.
|
// succeed because the types don't match.
|
||||||
|
@ -160,8 +160,8 @@ impl ConjoiningClauses {
|
||||||
left_v = self.resolve_ref_argument(known.schema, &predicate.operator, 0, left)?;
|
left_v = self.resolve_ref_argument(known.schema, &predicate.operator, 0, left)?;
|
||||||
right_v = self.resolve_ref_argument(known.schema, &predicate.operator, 1, right)?;
|
right_v = self.resolve_ref_argument(known.schema, &predicate.operator, 1, right)?;
|
||||||
} else {
|
} else {
|
||||||
bail!(AlgebrizerError::InvalidArgumentType(
|
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
predicate.operator,
|
predicate.operator.clone(),
|
||||||
supported_types,
|
supported_types,
|
||||||
0
|
0
|
||||||
));
|
));
|
||||||
|
@ -176,8 +176,8 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Inequality {
|
impl Inequality {
|
||||||
fn to_constraint(self, left: QueryValue, right: QueryValue) -> ColumnConstraint {
|
fn to_constraint(&self, left: QueryValue, right: QueryValue) -> ColumnConstraint {
|
||||||
match self {
|
match *self {
|
||||||
Inequality::TxAfter | Inequality::TxBefore => {
|
Inequality::TxAfter | Inequality::TxBefore => {
|
||||||
// TODO: both ends of the range must be inside the tx partition!
|
// TODO: both ends of the range must be inside the tx partition!
|
||||||
// If we know the partition map -- and at this point we do, it's just
|
// If we know the partition map -- and at this point we do, it's just
|
||||||
|
@ -188,9 +188,9 @@ impl Inequality {
|
||||||
}
|
}
|
||||||
|
|
||||||
ColumnConstraint::Inequality {
|
ColumnConstraint::Inequality {
|
||||||
operator: self,
|
operator: *self,
|
||||||
left,
|
left: left,
|
||||||
right,
|
right: right,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -206,9 +206,9 @@ mod testing {
|
||||||
FnArg, Keyword, Pattern, PatternNonValuePlace, PatternValuePlace, PlainSymbol, Variable,
|
FnArg, Keyword, Pattern, PatternNonValuePlace, PatternValuePlace, PlainSymbol, Variable,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::clauses::{add_attribute, associate_ident, ident};
|
use clauses::{add_attribute, associate_ident, ident};
|
||||||
|
|
||||||
use crate::types::{ColumnConstraint, EmptyBecause, QueryValue};
|
use types::{ColumnConstraint, EmptyBecause, QueryValue};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
/// Apply two patterns: a pattern and a numeric predicate.
|
/// Apply two patterns: a pattern and a numeric predicate.
|
||||||
|
@ -235,7 +235,7 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: PatternNonValuePlace::Placeholder,
|
attribute: PatternNonValuePlace::Placeholder,
|
||||||
value: PatternValuePlace::Variable(y.clone()),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
|
@ -348,7 +348,7 @@ mod testing {
|
||||||
known,
|
known,
|
||||||
Pattern {
|
Pattern {
|
||||||
source: None,
|
source: None,
|
||||||
entity: PatternNonValuePlace::Variable(x),
|
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||||
attribute: ident("foo", "roz"),
|
attribute: ident("foo", "roz"),
|
||||||
value: PatternValuePlace::Variable(y.clone()),
|
value: PatternValuePlace::Variable(y.clone()),
|
||||||
tx: PatternNonValuePlace::Placeholder,
|
tx: PatternNonValuePlace::Placeholder,
|
||||||
|
@ -362,7 +362,7 @@ mod testing {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.empty_because.unwrap(),
|
cc.empty_because.unwrap(),
|
||||||
EmptyBecause::TypeMismatch {
|
EmptyBecause::TypeMismatch {
|
||||||
var: y,
|
var: y.clone(),
|
||||||
existing: ValueTypeSet::of_numeric_types(),
|
existing: ValueTypeSet::of_numeric_types(),
|
||||||
desired: ValueTypeSet::of_one(ValueType::String),
|
desired: ValueTypeSet::of_one(ValueType::String),
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, Schema};
|
||||||
|
|
||||||
use edn::query::{FnArg, NonIntegerConstant, PlainSymbol};
|
use edn::query::{FnArg, NonIntegerConstant, PlainSymbol};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::types::{EmptyBecause, QueryValue};
|
use types::{EmptyBecause, QueryValue};
|
||||||
|
|
||||||
/// Argument resolution.
|
/// Argument resolution.
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
|
@ -41,14 +41,14 @@ impl ConjoiningClauses {
|
||||||
if v.value_type().is_numeric() {
|
if v.value_type().is_numeric() {
|
||||||
Ok(QueryValue::TypedValue(v))
|
Ok(QueryValue::TypedValue(v))
|
||||||
} else {
|
} else {
|
||||||
bail!(AlgebrizerError::InputTypeDisagreement(var.name(), ValueType::Long, v.value_type()))
|
bail!(AlgebrizerErrorKind::InputTypeDisagreement(var.name().clone(), ValueType::Long, v.value_type()))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.constrain_var_to_numeric(var.clone());
|
self.constrain_var_to_numeric(var.clone());
|
||||||
self.column_bindings
|
self.column_bindings
|
||||||
.get(&var)
|
.get(&var)
|
||||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Can't be an entid.
|
// Can't be an entid.
|
||||||
|
@ -62,7 +62,7 @@ impl ConjoiningClauses {
|
||||||
Constant(NonIntegerConstant::BigInteger(_)) |
|
Constant(NonIntegerConstant::BigInteger(_)) |
|
||||||
Vector(_) => {
|
Vector(_) => {
|
||||||
self.mark_known_empty(EmptyBecause::NonNumericArgument);
|
self.mark_known_empty(EmptyBecause::NonNumericArgument);
|
||||||
bail!(AlgebrizerError::InvalidArgument(function.clone(), "numeric", position))
|
bail!(AlgebrizerErrorKind::InvalidArgument(function.clone(), "numeric", position))
|
||||||
},
|
},
|
||||||
Constant(NonIntegerConstant::Float(f)) => Ok(QueryValue::TypedValue(TypedValue::Double(f))),
|
Constant(NonIntegerConstant::Float(f)) => Ok(QueryValue::TypedValue(TypedValue::Double(f))),
|
||||||
}
|
}
|
||||||
|
@ -79,8 +79,8 @@ impl ConjoiningClauses {
|
||||||
match arg {
|
match arg {
|
||||||
FnArg::Variable(var) => match self.bound_value(&var) {
|
FnArg::Variable(var) => match self.bound_value(&var) {
|
||||||
Some(TypedValue::Instant(v)) => Ok(QueryValue::TypedValue(TypedValue::Instant(v))),
|
Some(TypedValue::Instant(v)) => Ok(QueryValue::TypedValue(TypedValue::Instant(v))),
|
||||||
Some(v) => bail!(AlgebrizerError::InputTypeDisagreement(
|
Some(v) => bail!(AlgebrizerErrorKind::InputTypeDisagreement(
|
||||||
var.name(),
|
var.name().clone(),
|
||||||
ValueType::Instant,
|
ValueType::Instant,
|
||||||
v.value_type()
|
v.value_type()
|
||||||
)),
|
)),
|
||||||
|
@ -89,7 +89,7 @@ impl ConjoiningClauses {
|
||||||
self.column_bindings
|
self.column_bindings
|
||||||
.get(&var)
|
.get(&var)
|
||||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Constant(NonIntegerConstant::Instant(v)) => {
|
Constant(NonIntegerConstant::Instant(v)) => {
|
||||||
|
@ -107,7 +107,7 @@ impl ConjoiningClauses {
|
||||||
| Constant(NonIntegerConstant::BigInteger(_))
|
| Constant(NonIntegerConstant::BigInteger(_))
|
||||||
| Vector(_) => {
|
| Vector(_) => {
|
||||||
self.mark_known_empty(EmptyBecause::NonInstantArgument);
|
self.mark_known_empty(EmptyBecause::NonInstantArgument);
|
||||||
bail!(AlgebrizerError::InvalidArgumentType(
|
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
function.clone(),
|
function.clone(),
|
||||||
ValueType::Instant.into(),
|
ValueType::Instant.into(),
|
||||||
position
|
position
|
||||||
|
@ -136,14 +136,14 @@ impl ConjoiningClauses {
|
||||||
self.column_bindings
|
self.column_bindings
|
||||||
.get(&var)
|
.get(&var)
|
||||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EntidOrInteger(i) => Ok(QueryValue::TypedValue(TypedValue::Ref(i))),
|
EntidOrInteger(i) => Ok(QueryValue::TypedValue(TypedValue::Ref(i))),
|
||||||
IdentOrKeyword(i) => schema
|
IdentOrKeyword(i) => schema
|
||||||
.get_entid(&i)
|
.get_entid(&i)
|
||||||
.map(|known_entid| QueryValue::Entid(known_entid.into()))
|
.map(|known_entid| QueryValue::Entid(known_entid.into()))
|
||||||
.ok_or_else(|| AlgebrizerError::UnrecognizedIdent(i.to_string())),
|
.ok_or_else(|| AlgebrizerErrorKind::UnrecognizedIdent(i.to_string()).into()),
|
||||||
Constant(NonIntegerConstant::Boolean(_))
|
Constant(NonIntegerConstant::Boolean(_))
|
||||||
| Constant(NonIntegerConstant::Float(_))
|
| Constant(NonIntegerConstant::Float(_))
|
||||||
| Constant(NonIntegerConstant::Text(_))
|
| Constant(NonIntegerConstant::Text(_))
|
||||||
|
@ -153,7 +153,7 @@ impl ConjoiningClauses {
|
||||||
| SrcVar(_)
|
| SrcVar(_)
|
||||||
| Vector(_) => {
|
| Vector(_) => {
|
||||||
self.mark_known_empty(EmptyBecause::NonEntityArgument);
|
self.mark_known_empty(EmptyBecause::NonEntityArgument);
|
||||||
bail!(AlgebrizerError::InvalidArgumentType(
|
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
function.clone(),
|
function.clone(),
|
||||||
ValueType::Ref.into(),
|
ValueType::Ref.into(),
|
||||||
position
|
position
|
||||||
|
@ -188,7 +188,7 @@ impl ConjoiningClauses {
|
||||||
.column_bindings
|
.column_bindings
|
||||||
.get(&var)
|
.get(&var)
|
||||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name())),
|
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into()),
|
||||||
},
|
},
|
||||||
EntidOrInteger(i) => Ok(QueryValue::PrimitiveLong(i)),
|
EntidOrInteger(i) => Ok(QueryValue::PrimitiveLong(i)),
|
||||||
IdentOrKeyword(_) => unimplemented!(), // TODO
|
IdentOrKeyword(_) => unimplemented!(), // TODO
|
||||||
|
|
|
@ -12,16 +12,16 @@ use core_traits::ValueType;
|
||||||
|
|
||||||
use edn::query::{Binding, FnArg, SrcVar, VariableOrPlaceholder, WhereFn};
|
use edn::query::{Binding, FnArg, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||||
|
|
||||||
use crate::types::{
|
use types::{
|
||||||
Column, ColumnConstraint, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
Column, ColumnConstraint, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||||
TransactionsColumn,
|
TransactionsColumn,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
// Log in Query: tx-ids and tx-data
|
// Log in Query: tx-ids and tx-data
|
||||||
|
@ -40,7 +40,7 @@ impl ConjoiningClauses {
|
||||||
// transactions that impact one of the given attributes.
|
// transactions that impact one of the given attributes.
|
||||||
pub(crate) fn apply_tx_ids(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
pub(crate) fn apply_tx_ids(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||||
if where_fn.args.len() != 3 {
|
if where_fn.args.len() != 3 {
|
||||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
where_fn.args.len(),
|
where_fn.args.len(),
|
||||||
3
|
3
|
||||||
|
@ -49,7 +49,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if where_fn.binding.is_empty() {
|
if where_fn.binding.is_empty() {
|
||||||
// The binding must introduce at least one bound variable.
|
// The binding must introduce at least one bound variable.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::NoBoundVariable
|
BindingError::NoBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -57,7 +57,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if !where_fn.binding.is_valid() {
|
if !where_fn.binding.is_valid() {
|
||||||
// The binding must not duplicate bound variables.
|
// The binding must not duplicate bound variables.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -68,7 +68,7 @@ impl ConjoiningClauses {
|
||||||
Binding::BindRel(bindings) => {
|
Binding::BindRel(bindings) => {
|
||||||
let bindings_count = bindings.len();
|
let bindings_count = bindings.len();
|
||||||
if bindings_count != 1 {
|
if bindings_count != 1 {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::InvalidNumberOfBindings {
|
BindingError::InvalidNumberOfBindings {
|
||||||
number: bindings_count,
|
number: bindings_count,
|
||||||
|
@ -83,7 +83,7 @@ impl ConjoiningClauses {
|
||||||
}
|
}
|
||||||
Binding::BindColl(v) => v,
|
Binding::BindColl(v) => v,
|
||||||
Binding::BindScalar(_) | Binding::BindTuple(_) => {
|
Binding::BindScalar(_) | Binding::BindTuple(_) => {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::ExpectedBindRelOrBindColl
|
BindingError::ExpectedBindRelOrBindColl
|
||||||
))
|
))
|
||||||
|
@ -95,7 +95,7 @@ impl ConjoiningClauses {
|
||||||
// TODO: process source variables.
|
// TODO: process source variables.
|
||||||
match args.next().unwrap() {
|
match args.next().unwrap() {
|
||||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"source variable",
|
"source variable",
|
||||||
0
|
0
|
||||||
|
@ -122,7 +122,7 @@ impl ConjoiningClauses {
|
||||||
known.schema,
|
known.schema,
|
||||||
transactions.clone(),
|
transactions.clone(),
|
||||||
TransactionsColumn::Tx,
|
TransactionsColumn::Tx,
|
||||||
tx_var,
|
tx_var.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let after_constraint = ColumnConstraint::Inequality {
|
let after_constraint = ColumnConstraint::Inequality {
|
||||||
|
@ -138,7 +138,7 @@ impl ConjoiningClauses {
|
||||||
let before_constraint = ColumnConstraint::Inequality {
|
let before_constraint = ColumnConstraint::Inequality {
|
||||||
operator: Inequality::LessThan,
|
operator: Inequality::LessThan,
|
||||||
left: QueryValue::Column(QualifiedAlias(
|
left: QueryValue::Column(QualifiedAlias(
|
||||||
transactions,
|
transactions.clone(),
|
||||||
Column::Transactions(TransactionsColumn::Tx),
|
Column::Transactions(TransactionsColumn::Tx),
|
||||||
)),
|
)),
|
||||||
right: tx2,
|
right: tx2,
|
||||||
|
@ -150,7 +150,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
pub(crate) fn apply_tx_data(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
pub(crate) fn apply_tx_data(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||||
if where_fn.args.len() != 2 {
|
if where_fn.args.len() != 2 {
|
||||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
where_fn.args.len(),
|
where_fn.args.len(),
|
||||||
2
|
2
|
||||||
|
@ -159,7 +159,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if where_fn.binding.is_empty() {
|
if where_fn.binding.is_empty() {
|
||||||
// The binding must introduce at least one bound variable.
|
// The binding must introduce at least one bound variable.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::NoBoundVariable
|
BindingError::NoBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -167,7 +167,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
if !where_fn.binding.is_valid() {
|
if !where_fn.binding.is_valid() {
|
||||||
// The binding must not duplicate bound variables.
|
// The binding must not duplicate bound variables.
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
));
|
));
|
||||||
|
@ -178,7 +178,7 @@ impl ConjoiningClauses {
|
||||||
Binding::BindRel(bindings) => {
|
Binding::BindRel(bindings) => {
|
||||||
let bindings_count = bindings.len();
|
let bindings_count = bindings.len();
|
||||||
if bindings_count < 1 || bindings_count > 5 {
|
if bindings_count < 1 || bindings_count > 5 {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::InvalidNumberOfBindings {
|
BindingError::InvalidNumberOfBindings {
|
||||||
number: bindings.len(),
|
number: bindings.len(),
|
||||||
|
@ -189,7 +189,7 @@ impl ConjoiningClauses {
|
||||||
bindings
|
bindings
|
||||||
}
|
}
|
||||||
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
||||||
bail!(AlgebrizerError::InvalidBinding(
|
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
BindingError::ExpectedBindRel
|
BindingError::ExpectedBindRel
|
||||||
))
|
))
|
||||||
|
@ -217,7 +217,7 @@ impl ConjoiningClauses {
|
||||||
// TODO: process source variables.
|
// TODO: process source variables.
|
||||||
match args.next().unwrap() {
|
match args.next().unwrap() {
|
||||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||||
where_fn.operator.clone(),
|
where_fn.operator.clone(),
|
||||||
"source variable",
|
"source variable",
|
||||||
0
|
0
|
||||||
|
@ -306,7 +306,7 @@ impl ConjoiningClauses {
|
||||||
|
|
||||||
self.bind_column_to_var(
|
self.bind_column_to_var(
|
||||||
known.schema,
|
known.schema,
|
||||||
transactions,
|
transactions.clone(),
|
||||||
TransactionsColumn::Added,
|
TransactionsColumn::Added,
|
||||||
var.clone(),
|
var.clone(),
|
||||||
);
|
);
|
||||||
|
|
|
@ -10,11 +10,11 @@
|
||||||
|
|
||||||
use edn::query::WhereFn;
|
use edn::query::WhereFn;
|
||||||
|
|
||||||
use crate::clauses::ConjoiningClauses;
|
use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
use crate::Known;
|
use Known;
|
||||||
|
|
||||||
/// Application of `where` functions.
|
/// Application of `where` functions.
|
||||||
impl ConjoiningClauses {
|
impl ConjoiningClauses {
|
||||||
|
@ -32,7 +32,7 @@ impl ConjoiningClauses {
|
||||||
"ground" => self.apply_ground(known, where_fn),
|
"ground" => self.apply_ground(known, where_fn),
|
||||||
"tx-data" => self.apply_tx_data(known, where_fn),
|
"tx-data" => self.apply_tx_data(known, where_fn),
|
||||||
"tx-ids" => self.apply_tx_ids(known, where_fn),
|
"tx-ids" => self.apply_tx_ids(known, where_fn),
|
||||||
_ => bail!(AlgebrizerError::UnknownFunction(where_fn.operator.clone())),
|
_ => bail!(AlgebrizerErrorKind::UnknownFunction(where_fn.operator.clone())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,11 +32,11 @@ use mentat_core::counter::RcCounter;
|
||||||
|
|
||||||
use edn::query::{Element, FindSpec, Limit, Order, ParsedQuery, SrcVar, Variable, WhereClause};
|
use edn::query::{Element, FindSpec, Limit, Order, ParsedQuery, SrcVar, Variable, WhereClause};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
pub use crate::clauses::{QueryInputs, VariableBindings};
|
pub use clauses::{QueryInputs, VariableBindings};
|
||||||
|
|
||||||
pub use crate::types::{EmptyBecause, FindQuery};
|
pub use types::{EmptyBecause, FindQuery};
|
||||||
|
|
||||||
/// A convenience wrapper around things known in memory: the schema and caches.
|
/// A convenience wrapper around things known in memory: the schema and caches.
|
||||||
/// We use a trait object here to avoid making dozens of functions generic over the type
|
/// We use a trait object here to avoid making dozens of functions generic over the type
|
||||||
|
@ -229,7 +229,7 @@ fn validate_and_simplify_order(
|
||||||
|
|
||||||
// Fail if the var isn't bound by the query.
|
// Fail if the var isn't bound by the query.
|
||||||
if !cc.column_bindings.contains_key(&var) {
|
if !cc.column_bindings.contains_key(&var) {
|
||||||
bail!(AlgebrizerError::UnboundVariable(var.name()))
|
bail!(AlgebrizerErrorKind::UnboundVariable(var.name()))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, determine if we also need to order by type…
|
// Otherwise, determine if we also need to order by type…
|
||||||
|
@ -263,7 +263,7 @@ fn simplify_limit(mut query: AlgebraicQuery) -> Result<AlgebraicQuery> {
|
||||||
Some(TypedValue::Long(n)) => {
|
Some(TypedValue::Long(n)) => {
|
||||||
if n <= 0 {
|
if n <= 0 {
|
||||||
// User-specified limits should always be natural numbers (> 0).
|
// User-specified limits should always be natural numbers (> 0).
|
||||||
bail!(AlgebrizerError::InvalidLimit(
|
bail!(AlgebrizerErrorKind::InvalidLimit(
|
||||||
n.to_string(),
|
n.to_string(),
|
||||||
ValueType::Long
|
ValueType::Long
|
||||||
))
|
))
|
||||||
|
@ -273,7 +273,7 @@ fn simplify_limit(mut query: AlgebraicQuery) -> Result<AlgebraicQuery> {
|
||||||
}
|
}
|
||||||
Some(val) => {
|
Some(val) => {
|
||||||
// Same.
|
// Same.
|
||||||
bail!(AlgebrizerError::InvalidLimit(
|
bail!(AlgebrizerErrorKind::InvalidLimit(
|
||||||
format!("{:?}", val),
|
format!("{:?}", val),
|
||||||
val.value_type()
|
val.value_type()
|
||||||
))
|
))
|
||||||
|
@ -312,7 +312,7 @@ pub fn algebrize_with_inputs(
|
||||||
cc.derive_types_from_find_spec(&parsed.find_spec);
|
cc.derive_types_from_find_spec(&parsed.find_spec);
|
||||||
|
|
||||||
// Do we have a variable limit? If so, tell the CC that the var must be numeric.
|
// Do we have a variable limit? If so, tell the CC that the var must be numeric.
|
||||||
if let Limit::Variable(ref var) = parsed.limit {
|
if let &Limit::Variable(ref var) = &parsed.limit {
|
||||||
cc.constrain_var_to_long(var.clone());
|
cc.constrain_var_to_long(var.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -338,18 +338,18 @@ pub fn algebrize_with_inputs(
|
||||||
has_aggregates: false, // TODO: we don't parse them yet.
|
has_aggregates: false, // TODO: we don't parse them yet.
|
||||||
with: parsed.with,
|
with: parsed.with,
|
||||||
named_projection: extra_vars,
|
named_projection: extra_vars,
|
||||||
order,
|
order: order,
|
||||||
limit,
|
limit: limit,
|
||||||
cc,
|
cc: cc,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Substitute in any fixed values and fail if they're out of range.
|
// Substitute in any fixed values and fail if they're out of range.
|
||||||
simplify_limit(q)
|
simplify_limit(q)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub use crate::clauses::ConjoiningClauses;
|
pub use clauses::ConjoiningClauses;
|
||||||
|
|
||||||
pub use crate::types::{
|
pub use types::{
|
||||||
Column, ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
Column, ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||||
ColumnName, ComputedTable, DatomsColumn, DatomsTable, FulltextColumn, OrderBy, QualifiedAlias,
|
ColumnName, ComputedTable, DatomsColumn, DatomsTable, FulltextColumn, OrderBy, QualifiedAlias,
|
||||||
QueryValue, SourceAlias, TableAlias, VariableColumn,
|
QueryValue, SourceAlias, TableAlias, VariableColumn,
|
||||||
|
@ -364,7 +364,7 @@ impl FindQuery {
|
||||||
in_vars: BTreeSet::default(),
|
in_vars: BTreeSet::default(),
|
||||||
in_sources: BTreeSet::default(),
|
in_sources: BTreeSet::default(),
|
||||||
limit: Limit::None,
|
limit: Limit::None,
|
||||||
where_clauses,
|
where_clauses: where_clauses,
|
||||||
order: None,
|
order: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -375,7 +375,7 @@ impl FindQuery {
|
||||||
|
|
||||||
for var in parsed.in_vars.into_iter() {
|
for var in parsed.in_vars.into_iter() {
|
||||||
if !set.insert(var.clone()) {
|
if !set.insert(var.clone()) {
|
||||||
bail!(AlgebrizerError::DuplicateVariableError(var.name(), ":in"));
|
bail!(AlgebrizerErrorKind::DuplicateVariableError(var.name(), ":in"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -387,7 +387,7 @@ impl FindQuery {
|
||||||
|
|
||||||
for var in parsed.with.into_iter() {
|
for var in parsed.with.into_iter() {
|
||||||
if !set.insert(var.clone()) {
|
if !set.insert(var.clone()) {
|
||||||
bail!(AlgebrizerError::DuplicateVariableError(var.name(), ":with"));
|
bail!(AlgebrizerErrorKind::DuplicateVariableError(var.name(), ":with"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -397,7 +397,7 @@ impl FindQuery {
|
||||||
// Make sure that if we have `:limit ?x`, `?x` appears in `:in`.
|
// Make sure that if we have `:limit ?x`, `?x` appears in `:in`.
|
||||||
if let Limit::Variable(ref v) = parsed.limit {
|
if let Limit::Variable(ref v) = parsed.limit {
|
||||||
if !in_vars.contains(v) {
|
if !in_vars.contains(v) {
|
||||||
bail!(AlgebrizerError::UnknownLimitVar(v.name()));
|
bail!(AlgebrizerErrorKind::UnknownLimitVar(v.name()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -417,5 +417,5 @@ impl FindQuery {
|
||||||
pub fn parse_find_string(string: &str) -> Result<FindQuery> {
|
pub fn parse_find_string(string: &str) -> Result<FindQuery> {
|
||||||
parse_query(string)
|
parse_query(string)
|
||||||
.map_err(|e| e.into())
|
.map_err(|e| e.into())
|
||||||
.and_then(FindQuery::from_parsed_query)
|
.and_then(|parsed| FindQuery::from_parsed_query(parsed))
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,11 +32,11 @@ pub enum DatomsTable {
|
||||||
/// A source of rows that isn't a named table -- typically a subquery or union.
|
/// A source of rows that isn't a named table -- typically a subquery or union.
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
pub enum ComputedTable {
|
pub enum ComputedTable {
|
||||||
Subquery(Box<crate::clauses::ConjoiningClauses>),
|
Subquery(::clauses::ConjoiningClauses),
|
||||||
Union {
|
Union {
|
||||||
projection: BTreeSet<Variable>,
|
projection: BTreeSet<Variable>,
|
||||||
type_extraction: BTreeSet<Variable>,
|
type_extraction: BTreeSet<Variable>,
|
||||||
arms: Vec<crate::clauses::ConjoiningClauses>,
|
arms: Vec<::clauses::ConjoiningClauses>,
|
||||||
},
|
},
|
||||||
NamedValues {
|
NamedValues {
|
||||||
names: Vec<Variable>,
|
names: Vec<Variable>,
|
||||||
|
@ -153,8 +153,8 @@ impl ColumnName for DatomsColumn {
|
||||||
impl ColumnName for VariableColumn {
|
impl ColumnName for VariableColumn {
|
||||||
fn column_name(&self) -> String {
|
fn column_name(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
VariableColumn::Variable(ref v) => v.to_string(),
|
&VariableColumn::Variable(ref v) => v.to_string(),
|
||||||
VariableColumn::VariableTypeTag(ref v) => format!("{}_value_type_tag", v.as_str()),
|
&VariableColumn::VariableTypeTag(ref v) => format!("{}_value_type_tag", v.as_str()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,8 +163,8 @@ impl Debug for VariableColumn {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
// These should agree with VariableColumn::column_name.
|
// These should agree with VariableColumn::column_name.
|
||||||
VariableColumn::Variable(ref v) => write!(f, "{}", v.as_str()),
|
&VariableColumn::Variable(ref v) => write!(f, "{}", v.as_str()),
|
||||||
VariableColumn::VariableTypeTag(ref v) => write!(f, "{}_value_type_tag", v.as_str()),
|
&VariableColumn::VariableTypeTag(ref v) => write!(f, "{}_value_type_tag", v.as_str()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,10 +178,10 @@ impl Debug for DatomsColumn {
|
||||||
impl Debug for Column {
|
impl Debug for Column {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Column::Fixed(ref c) => c.fmt(f),
|
&Column::Fixed(ref c) => c.fmt(f),
|
||||||
Column::Fulltext(ref c) => c.fmt(f),
|
&Column::Fulltext(ref c) => c.fmt(f),
|
||||||
Column::Variable(ref v) => v.fmt(f),
|
&Column::Variable(ref v) => v.fmt(f),
|
||||||
Column::Transactions(ref t) => t.fmt(f),
|
&Column::Transactions(ref t) => t.fmt(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -298,10 +298,10 @@ impl Debug for QueryValue {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
use self::QueryValue::*;
|
use self::QueryValue::*;
|
||||||
match self {
|
match self {
|
||||||
Column(ref qa) => write!(f, "{:?}", qa),
|
&Column(ref qa) => write!(f, "{:?}", qa),
|
||||||
Entid(ref entid) => write!(f, "entity({:?})", entid),
|
&Entid(ref entid) => write!(f, "entity({:?})", entid),
|
||||||
TypedValue(ref typed_value) => write!(f, "value({:?})", typed_value),
|
&TypedValue(ref typed_value) => write!(f, "value({:?})", typed_value),
|
||||||
PrimitiveLong(value) => write!(f, "primitive({:?})", value),
|
&PrimitiveLong(value) => write!(f, "primitive({:?})", value),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -375,15 +375,15 @@ impl Inequality {
|
||||||
}
|
}
|
||||||
|
|
||||||
// The built-in inequality operators apply to Long, Double, and Instant.
|
// The built-in inequality operators apply to Long, Double, and Instant.
|
||||||
pub fn supported_types(self) -> ValueTypeSet {
|
pub fn supported_types(&self) -> ValueTypeSet {
|
||||||
use self::Inequality::*;
|
use self::Inequality::*;
|
||||||
match self {
|
match self {
|
||||||
LessThan | LessThanOrEquals | GreaterThan | GreaterThanOrEquals | NotEquals => {
|
&LessThan | &LessThanOrEquals | &GreaterThan | &GreaterThanOrEquals | &NotEquals => {
|
||||||
let mut ts = ValueTypeSet::of_numeric_types();
|
let mut ts = ValueTypeSet::of_numeric_types();
|
||||||
ts.insert(ValueType::Instant);
|
ts.insert(ValueType::Instant);
|
||||||
ts
|
ts
|
||||||
}
|
}
|
||||||
Unpermute | Differ | TxAfter | TxBefore => ValueTypeSet::of_one(ValueType::Ref),
|
&Unpermute | &Differ | &TxAfter | &TxBefore => ValueTypeSet::of_one(ValueType::Ref),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -392,17 +392,17 @@ impl Debug for Inequality {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
use self::Inequality::*;
|
use self::Inequality::*;
|
||||||
f.write_str(match self {
|
f.write_str(match self {
|
||||||
LessThan => "<",
|
&LessThan => "<",
|
||||||
LessThanOrEquals => "<=",
|
&LessThanOrEquals => "<=",
|
||||||
GreaterThan => ">",
|
&GreaterThan => ">",
|
||||||
GreaterThanOrEquals => ">=",
|
&GreaterThanOrEquals => ">=",
|
||||||
NotEquals => "!=", // Datalog uses !=. SQL uses <>.
|
&NotEquals => "!=", // Datalog uses !=. SQL uses <>.
|
||||||
|
|
||||||
Unpermute => "<",
|
&Unpermute => "<",
|
||||||
Differ => "<>",
|
&Differ => "<>",
|
||||||
|
|
||||||
TxAfter => ">",
|
&TxAfter => ">",
|
||||||
TxBefore => "<",
|
&TxBefore => "<",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -534,17 +534,17 @@ impl Debug for ColumnConstraint {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
use self::ColumnConstraint::*;
|
use self::ColumnConstraint::*;
|
||||||
match self {
|
match self {
|
||||||
Equals(ref qa1, ref thing) => write!(f, "{:?} = {:?}", qa1, thing),
|
&Equals(ref qa1, ref thing) => write!(f, "{:?} = {:?}", qa1, thing),
|
||||||
|
|
||||||
Inequality {
|
&Inequality {
|
||||||
operator,
|
operator,
|
||||||
ref left,
|
ref left,
|
||||||
ref right,
|
ref right,
|
||||||
} => write!(f, "{:?} {:?} {:?}", left, operator, right),
|
} => write!(f, "{:?} {:?} {:?}", left, operator, right),
|
||||||
|
|
||||||
Matches(ref qa, ref thing) => write!(f, "{:?} MATCHES {:?}", qa, thing),
|
&Matches(ref qa, ref thing) => write!(f, "{:?} MATCHES {:?}", qa, thing),
|
||||||
|
|
||||||
HasTypes {
|
&HasTypes {
|
||||||
ref value,
|
ref value,
|
||||||
ref value_types,
|
ref value_types,
|
||||||
check_value,
|
check_value,
|
||||||
|
@ -553,7 +553,7 @@ impl Debug for ColumnConstraint {
|
||||||
write!(f, "(")?;
|
write!(f, "(")?;
|
||||||
for value_type in value_types.iter() {
|
for value_type in value_types.iter() {
|
||||||
write!(f, "({:?}.value_type_tag = {:?}", value, value_type)?;
|
write!(f, "({:?}.value_type_tag = {:?}", value, value_type)?;
|
||||||
if *check_value && value_type == ValueType::Double
|
if check_value && value_type == ValueType::Double
|
||||||
|| value_type == ValueType::Long
|
|| value_type == ValueType::Long
|
||||||
{
|
{
|
||||||
write!(
|
write!(
|
||||||
|
@ -573,7 +573,7 @@ impl Debug for ColumnConstraint {
|
||||||
}
|
}
|
||||||
write!(f, "1)")
|
write!(f, "1)")
|
||||||
}
|
}
|
||||||
NotExists(ref ct) => write!(f, "NOT EXISTS {:?}", ct),
|
&NotExists(ref ct) => write!(f, "NOT EXISTS {:?}", ct),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -625,15 +625,15 @@ impl Debug for EmptyBecause {
|
||||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||||
use self::EmptyBecause::*;
|
use self::EmptyBecause::*;
|
||||||
match self {
|
match self {
|
||||||
CachedAttributeHasNoEntity {
|
&CachedAttributeHasNoEntity {
|
||||||
ref value,
|
ref value,
|
||||||
ref attr,
|
ref attr,
|
||||||
} => write!(f, "(?e, {}, {:?}, _) not present in store", attr, value),
|
} => write!(f, "(?e, {}, {:?}, _) not present in store", attr, value),
|
||||||
CachedAttributeHasNoValues {
|
&CachedAttributeHasNoValues {
|
||||||
ref entity,
|
ref entity,
|
||||||
ref attr,
|
ref attr,
|
||||||
} => write!(f, "({}, {}, ?v, _) not present in store", entity, attr),
|
} => write!(f, "({}, {}, ?v, _) not present in store", entity, attr),
|
||||||
ConflictingBindings {
|
&ConflictingBindings {
|
||||||
ref var,
|
ref var,
|
||||||
ref existing,
|
ref existing,
|
||||||
ref desired,
|
ref desired,
|
||||||
|
@ -642,7 +642,7 @@ impl Debug for EmptyBecause {
|
||||||
"Var {:?} can't be {:?} because it's already bound to {:?}",
|
"Var {:?} can't be {:?} because it's already bound to {:?}",
|
||||||
var, desired, existing
|
var, desired, existing
|
||||||
),
|
),
|
||||||
TypeMismatch {
|
&TypeMismatch {
|
||||||
ref var,
|
ref var,
|
||||||
ref existing,
|
ref existing,
|
||||||
ref desired,
|
ref desired,
|
||||||
|
@ -651,7 +651,7 @@ impl Debug for EmptyBecause {
|
||||||
"Type mismatch: {:?} can't be {:?}, because it's already {:?}",
|
"Type mismatch: {:?} can't be {:?}, because it's already {:?}",
|
||||||
var, desired, existing
|
var, desired, existing
|
||||||
),
|
),
|
||||||
KnownTypeMismatch {
|
&KnownTypeMismatch {
|
||||||
ref left,
|
ref left,
|
||||||
ref right,
|
ref right,
|
||||||
} => write!(
|
} => write!(
|
||||||
|
@ -659,25 +659,25 @@ impl Debug for EmptyBecause {
|
||||||
"Type mismatch: {:?} can't be compared to {:?}",
|
"Type mismatch: {:?} can't be compared to {:?}",
|
||||||
left, right
|
left, right
|
||||||
),
|
),
|
||||||
NoValidTypes(ref var) => write!(f, "Type mismatch: {:?} has no valid types", var),
|
&NoValidTypes(ref var) => write!(f, "Type mismatch: {:?} has no valid types", var),
|
||||||
NonAttributeArgument => write!(f, "Non-attribute argument in attribute place"),
|
&NonAttributeArgument => write!(f, "Non-attribute argument in attribute place"),
|
||||||
NonInstantArgument => write!(f, "Non-instant argument in instant place"),
|
&NonInstantArgument => write!(f, "Non-instant argument in instant place"),
|
||||||
NonEntityArgument => write!(f, "Non-entity argument in entity place"),
|
&NonEntityArgument => write!(f, "Non-entity argument in entity place"),
|
||||||
NonNumericArgument => write!(f, "Non-numeric argument in numeric place"),
|
&NonNumericArgument => write!(f, "Non-numeric argument in numeric place"),
|
||||||
NonStringFulltextValue => write!(f, "Non-string argument for fulltext attribute"),
|
&NonStringFulltextValue => write!(f, "Non-string argument for fulltext attribute"),
|
||||||
UnresolvedIdent(ref kw) => write!(f, "Couldn't resolve keyword {}", kw),
|
&UnresolvedIdent(ref kw) => write!(f, "Couldn't resolve keyword {}", kw),
|
||||||
InvalidAttributeIdent(ref kw) => write!(f, "{} does not name an attribute", kw),
|
&InvalidAttributeIdent(ref kw) => write!(f, "{} does not name an attribute", kw),
|
||||||
InvalidAttributeEntid(entid) => write!(f, "{} is not an attribute", entid),
|
&InvalidAttributeEntid(entid) => write!(f, "{} is not an attribute", entid),
|
||||||
NonFulltextAttribute(entid) => write!(f, "{} is not a fulltext attribute", entid),
|
&NonFulltextAttribute(entid) => write!(f, "{} is not a fulltext attribute", entid),
|
||||||
InvalidBinding(ref column, ref tv) => {
|
&InvalidBinding(ref column, ref tv) => {
|
||||||
write!(f, "{:?} cannot name column {:?}", tv, column)
|
write!(f, "{:?} cannot name column {:?}", tv, column)
|
||||||
}
|
}
|
||||||
ValueTypeMismatch(value_type, ref typed_value) => write!(
|
&ValueTypeMismatch(value_type, ref typed_value) => write!(
|
||||||
f,
|
f,
|
||||||
"Type mismatch: {:?} doesn't match attribute type {:?}",
|
"Type mismatch: {:?} doesn't match attribute type {:?}",
|
||||||
typed_value, value_type
|
typed_value, value_type
|
||||||
),
|
),
|
||||||
AttributeLookupFailed => write!(f, "Attribute lookup failed"),
|
&AttributeLookupFailed => write!(f, "Attribute lookup failed"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ use std::collections::BTreeSet;
|
||||||
|
|
||||||
use edn::query::{ContainsVariables, NotJoin, OrJoin, UnifyVars, Variable};
|
use edn::query::{ContainsVariables, NotJoin, OrJoin, UnifyVars, Variable};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||||
|
|
||||||
/// In an `or` expression, every mentioned var is considered 'free'.
|
/// In an `or` expression, every mentioned var is considered 'free'.
|
||||||
/// In an `or-join` expression, every var in the var list is 'required'.
|
/// In an `or-join` expression, every var in the var list is 'required'.
|
||||||
|
@ -47,7 +47,7 @@ pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> {
|
||||||
let template = clauses.next().unwrap().collect_mentioned_variables();
|
let template = clauses.next().unwrap().collect_mentioned_variables();
|
||||||
for clause in clauses {
|
for clause in clauses {
|
||||||
if template != clause.collect_mentioned_variables() {
|
if template != clause.collect_mentioned_variables() {
|
||||||
bail!(AlgebrizerError::NonMatchingVariablesInOrClause)
|
bail!(AlgebrizerErrorKind::NonMatchingVariablesInOrClause)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -58,7 +58,7 @@ pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> {
|
||||||
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
||||||
for clause in &or_join.clauses {
|
for clause in &or_join.clauses {
|
||||||
if !var_set.is_subset(&clause.collect_mentioned_variables()) {
|
if !var_set.is_subset(&clause.collect_mentioned_variables()) {
|
||||||
bail!(AlgebrizerError::NonMatchingVariablesInOrClause)
|
bail!(AlgebrizerErrorKind::NonMatchingVariablesInOrClause)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -74,7 +74,7 @@ pub(crate) fn validate_not_join(not_join: &NotJoin) -> Result<()> {
|
||||||
// The joined vars must each appear somewhere in the clause's mentioned variables.
|
// The joined vars must each appear somewhere in the clause's mentioned variables.
|
||||||
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
||||||
if !var_set.is_subset(¬_join.collect_mentioned_variables()) {
|
if !var_set.is_subset(¬_join.collect_mentioned_variables()) {
|
||||||
bail!(AlgebrizerError::NonMatchingVariablesInNotClause)
|
bail!(AlgebrizerErrorKind::NonMatchingVariablesInNotClause)
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -91,11 +91,11 @@ mod tests {
|
||||||
Variable, WhereClause,
|
Variable, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::clauses::ident;
|
use clauses::ident;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::parse_find_string;
|
use parse_find_string;
|
||||||
use crate::types::FindQuery;
|
use types::FindQuery;
|
||||||
|
|
||||||
fn value_ident(ns: &str, name: &str) -> PatternValuePlace {
|
fn value_ident(ns: &str, name: &str) -> PatternValuePlace {
|
||||||
Keyword::namespaced(ns, name).into()
|
Keyword::namespaced(ns, name).into()
|
||||||
|
@ -112,7 +112,7 @@ mod tests {
|
||||||
match clause {
|
match clause {
|
||||||
WhereClause::OrJoin(or_join) => {
|
WhereClause::OrJoin(or_join) => {
|
||||||
// It's valid: the variables are the same in each branch.
|
// It's valid: the variables are the same in each branch.
|
||||||
validate_or_join(&or_join).unwrap();
|
assert_eq!((), validate_or_join(&or_join).unwrap());
|
||||||
assert_eq!(expected_unify, or_join.unify_vars);
|
assert_eq!(expected_unify, or_join.unify_vars);
|
||||||
or_join.clauses
|
or_join.clauses
|
||||||
}
|
}
|
||||||
|
@ -254,10 +254,10 @@ mod tests {
|
||||||
/// Tests that the top-level form is a valid `not`, returning the clauses.
|
/// Tests that the top-level form is a valid `not`, returning the clauses.
|
||||||
fn valid_not_join(parsed: FindQuery, expected_unify: UnifyVars) -> Vec<WhereClause> {
|
fn valid_not_join(parsed: FindQuery, expected_unify: UnifyVars) -> Vec<WhereClause> {
|
||||||
// Filter out all the clauses that are not `not`s.
|
// Filter out all the clauses that are not `not`s.
|
||||||
let mut nots = parsed
|
let mut nots = parsed.where_clauses.into_iter().filter(|x| match x {
|
||||||
.where_clauses
|
&WhereClause::NotJoin(_) => true,
|
||||||
.into_iter()
|
_ => false,
|
||||||
.filter(|x| matches!(x, WhereClause::NotJoin(_)));
|
});
|
||||||
|
|
||||||
// There should be only one not clause.
|
// There should be only one not clause.
|
||||||
let clause = nots.next().unwrap();
|
let clause = nots.next().unwrap();
|
||||||
|
@ -266,7 +266,7 @@ mod tests {
|
||||||
match clause {
|
match clause {
|
||||||
WhereClause::NotJoin(not_join) => {
|
WhereClause::NotJoin(not_join) => {
|
||||||
// It's valid: the variables are the same in each branch.
|
// It's valid: the variables are the same in each branch.
|
||||||
validate_not_join(¬_join).unwrap();
|
assert_eq!((), validate_not_join(¬_join).unwrap());
|
||||||
assert_eq!(expected_unify, not_join.unify_vars);
|
assert_eq!(expected_unify, not_join.unify_vars);
|
||||||
not_join.clauses
|
not_join.clauses
|
||||||
}
|
}
|
||||||
|
@ -368,10 +368,11 @@ mod tests {
|
||||||
[?release :release/artists "Pink Floyd"]
|
[?release :release/artists "Pink Floyd"]
|
||||||
[?release :release/year 1970])]"#;
|
[?release :release/year 1970])]"#;
|
||||||
let parsed = parse_find_string(query).expect("expected successful parse");
|
let parsed = parse_find_string(query).expect("expected successful parse");
|
||||||
let mut nots = parsed
|
let mut nots = parsed.where_clauses.iter().filter(|&x| match *x {
|
||||||
.where_clauses
|
WhereClause::NotJoin(_) => true,
|
||||||
.iter()
|
_ => false,
|
||||||
.filter(|&x| matches!(*x, WhereClause::NotJoin(_)));
|
});
|
||||||
|
|
||||||
let clause = nots.next().unwrap().clone();
|
let clause = nots.next().unwrap().clone();
|
||||||
assert_eq!(None, nots.next());
|
assert_eq!(None, nots.next());
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ use mentat_core::Schema;
|
||||||
|
|
||||||
use edn::query::Keyword;
|
use edn::query::Keyword;
|
||||||
|
|
||||||
use crate::utils::{add_attribute, alg, associate_ident};
|
use utils::{add_attribute, alg, associate_ident};
|
||||||
|
|
||||||
use mentat_query_algebrizer::Known;
|
use mentat_query_algebrizer::Known;
|
||||||
|
|
||||||
|
|
|
@ -24,11 +24,11 @@ use mentat_core::Schema;
|
||||||
|
|
||||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError};
|
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError};
|
||||||
|
|
||||||
use mentat_query_algebrizer::{ComputedTable, Known, QueryInputs};
|
use mentat_query_algebrizer::{ComputedTable, Known, QueryInputs};
|
||||||
|
|
||||||
use crate::utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
use utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
||||||
|
|
||||||
fn prepopulated_schema() -> Schema {
|
fn prepopulated_schema() -> Schema {
|
||||||
let mut schema = Schema::default();
|
let mut schema = Schema::default();
|
||||||
|
@ -297,7 +297,7 @@ fn test_ground_coll_heterogeneous_types() {
|
||||||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [false 8.5]) [?v ...]]]"#;
|
let q = r#"[:find ?x :where [?x _ ?v] [(ground [false 8.5]) [?v ...]]]"#;
|
||||||
let schema = prepopulated_schema();
|
let schema = prepopulated_schema();
|
||||||
let known = Known::for_schema(&schema);
|
let known = Known::for_schema(&schema);
|
||||||
assert_eq!(bails(known, &q), AlgebrizerError::InvalidGroundConstant);
|
assert_eq!(bails(known, &q), AlgebrizerErrorKind::InvalidGroundConstant);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -305,7 +305,7 @@ fn test_ground_rel_heterogeneous_types() {
|
||||||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [[false] [5]]) [[?v]]]]"#;
|
let q = r#"[:find ?x :where [?x _ ?v] [(ground [[false] [5]]) [[?v]]]]"#;
|
||||||
let schema = prepopulated_schema();
|
let schema = prepopulated_schema();
|
||||||
let known = Known::for_schema(&schema);
|
let known = Known::for_schema(&schema);
|
||||||
assert_eq!(bails(known, &q), AlgebrizerError::InvalidGroundConstant);
|
assert_eq!(bails(known, &q), AlgebrizerErrorKind::InvalidGroundConstant);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -315,7 +315,7 @@ fn test_ground_tuple_duplicate_vars() {
|
||||||
let known = Known::for_schema(&schema);
|
let known = Known::for_schema(&schema);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails(known, &q),
|
bails(known, &q),
|
||||||
AlgebrizerError::InvalidBinding(
|
AlgebrizerErrorKind::InvalidBinding(
|
||||||
PlainSymbol::plain("ground"),
|
PlainSymbol::plain("ground"),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
)
|
)
|
||||||
|
@ -329,7 +329,7 @@ fn test_ground_rel_duplicate_vars() {
|
||||||
let known = Known::for_schema(&schema);
|
let known = Known::for_schema(&schema);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails(known, &q),
|
bails(known, &q),
|
||||||
AlgebrizerError::InvalidBinding(
|
AlgebrizerErrorKind::InvalidBinding(
|
||||||
PlainSymbol::plain("ground"),
|
PlainSymbol::plain("ground"),
|
||||||
BindingError::RepeatedBoundVariable
|
BindingError::RepeatedBoundVariable
|
||||||
)
|
)
|
||||||
|
@ -343,7 +343,7 @@ fn test_ground_nonexistent_variable_invalid() {
|
||||||
let known = Known::for_schema(&schema);
|
let known = Known::for_schema(&schema);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails(known, &q),
|
bails(known, &q),
|
||||||
AlgebrizerError::UnboundVariable(PlainSymbol::plain("?v"))
|
AlgebrizerErrorKind::UnboundVariable(PlainSymbol::plain("?v"))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -362,6 +362,6 @@ fn test_unbound_input_variable_invalid() {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails_with_inputs(known, &q, i),
|
bails_with_inputs(known, &q, i),
|
||||||
AlgebrizerError::UnboundVariable(PlainSymbol::plain("?x"))
|
AlgebrizerErrorKind::UnboundVariable(PlainSymbol::plain("?x"))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,11 +22,11 @@ use mentat_core::{DateTime, Schema, Utc};
|
||||||
|
|
||||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||||
|
|
||||||
use query_algebrizer_traits::errors::AlgebrizerError;
|
use query_algebrizer_traits::errors::AlgebrizerErrorKind;
|
||||||
|
|
||||||
use mentat_query_algebrizer::{EmptyBecause, Known, QueryInputs};
|
use mentat_query_algebrizer::{EmptyBecause, Known, QueryInputs};
|
||||||
|
|
||||||
use crate::utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
use utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
||||||
|
|
||||||
fn prepopulated_schema() -> Schema {
|
fn prepopulated_schema() -> Schema {
|
||||||
let mut schema = Schema::default();
|
let mut schema = Schema::default();
|
||||||
|
@ -75,7 +75,7 @@ fn test_instant_predicates_require_instants() {
|
||||||
[(> ?t "2017-06-16T00:56:41.257Z")]]"#;
|
[(> ?t "2017-06-16T00:56:41.257Z")]]"#;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails(known, query),
|
bails(known, query),
|
||||||
AlgebrizerError::InvalidArgumentType(
|
AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
PlainSymbol::plain(">"),
|
PlainSymbol::plain(">"),
|
||||||
ValueTypeSet::of_numeric_and_instant_types(),
|
ValueTypeSet::of_numeric_and_instant_types(),
|
||||||
1
|
1
|
||||||
|
@ -88,7 +88,7 @@ fn test_instant_predicates_require_instants() {
|
||||||
[(> "2017-06-16T00:56:41.257Z", ?t)]]"#;
|
[(> "2017-06-16T00:56:41.257Z", ?t)]]"#;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bails(known, query),
|
bails(known, query),
|
||||||
AlgebrizerError::InvalidArgumentType(
|
AlgebrizerErrorKind::InvalidArgumentType(
|
||||||
PlainSymbol::plain(">"),
|
PlainSymbol::plain(">"),
|
||||||
ValueTypeSet::of_numeric_and_instant_types(),
|
ValueTypeSet::of_numeric_and_instant_types(),
|
||||||
0
|
0
|
||||||
|
@ -162,7 +162,7 @@ fn test_instant_predicates_accepts_var() {
|
||||||
let cc = alg_with_inputs(
|
let cc = alg_with_inputs(
|
||||||
known,
|
known,
|
||||||
query,
|
query,
|
||||||
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value)]),
|
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value.clone())]),
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.known_type(&instant_var).expect("?time is known"),
|
cc.known_type(&instant_var).expect("?time is known"),
|
||||||
|
@ -202,7 +202,7 @@ fn test_numeric_predicates_accepts_var() {
|
||||||
let cc = alg_with_inputs(
|
let cc = alg_with_inputs(
|
||||||
known,
|
known,
|
||||||
query,
|
query,
|
||||||
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value)]),
|
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value.clone())]),
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cc.known_type(&numeric_var).expect("?long is known"),
|
cc.known_type(&numeric_var).expect("?long is known"),
|
||||||
|
|
|
@ -16,7 +16,7 @@ extern crate query_algebrizer_traits;
|
||||||
|
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use crate::utils::{alg, bails, SchemaBuilder};
|
use utils::{alg, bails, SchemaBuilder};
|
||||||
|
|
||||||
use core_traits::ValueType;
|
use core_traits::ValueType;
|
||||||
|
|
||||||
|
@ -34,7 +34,6 @@ fn prepopulated_schema() -> Schema {
|
||||||
.define_simple_attr("test", "uuid", ValueType::Uuid, false)
|
.define_simple_attr("test", "uuid", ValueType::Uuid, false)
|
||||||
.define_simple_attr("test", "instant", ValueType::Instant, false)
|
.define_simple_attr("test", "instant", ValueType::Instant, false)
|
||||||
.define_simple_attr("test", "ref", ValueType::Ref, false)
|
.define_simple_attr("test", "ref", ValueType::Ref, false)
|
||||||
.define_simple_attr("test", "bytes", ValueType::Bytes, false)
|
|
||||||
.schema
|
.schema
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ use mentat_query_algebrizer::{
|
||||||
// These are helpers that tests use to build Schema instances.
|
// These are helpers that tests use to build Schema instances.
|
||||||
pub fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
pub fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||||
schema.entid_map.insert(e, i.clone());
|
schema.entid_map.insert(e, i.clone());
|
||||||
schema.ident_map.insert(i, e);
|
schema.ident_map.insert(i.clone(), e);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
pub fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "query_projector_traits"
|
name = "query_projector_traits"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -11,12 +11,12 @@ path = "lib.rs"
|
||||||
sqlcipher = ["rusqlite/sqlcipher"]
|
sqlcipher = ["rusqlite/sqlcipher"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
failure_derive = "~0.1"
|
failure_derive = "0.1"
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21"
|
||||||
features = ["limits", "bundled"]
|
features = ["limits"]
|
||||||
|
|
||||||
[dependencies.edn]
|
[dependencies.edn]
|
||||||
path = "../edn"
|
path = "../edn"
|
||||||
|
|
|
@ -16,7 +16,7 @@ use mentat_query_algebrizer::{ColumnName, ConjoiningClauses, VariableColumn};
|
||||||
|
|
||||||
use mentat_query_sql::{ColumnOrExpression, Expression, Name, ProjectedColumn};
|
use mentat_query_sql::{ColumnOrExpression, Expression, Name, ProjectedColumn};
|
||||||
|
|
||||||
use crate::errors::{ProjectorError, Result};
|
use errors::{ProjectorErrorKind, Result};
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||||
pub enum SimpleAggregationOp {
|
pub enum SimpleAggregationOp {
|
||||||
|
@ -28,14 +28,14 @@ pub enum SimpleAggregationOp {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleAggregationOp {
|
impl SimpleAggregationOp {
|
||||||
pub fn to_sql(self) -> &'static str {
|
pub fn to_sql(&self) -> &'static str {
|
||||||
use self::SimpleAggregationOp::*;
|
use self::SimpleAggregationOp::*;
|
||||||
match self {
|
match self {
|
||||||
Avg => "avg",
|
&Avg => "avg",
|
||||||
Count => "count",
|
&Count => "count",
|
||||||
Max => "max",
|
&Max => "max",
|
||||||
Min => "min",
|
&Min => "min",
|
||||||
Sum => "sum",
|
&Sum => "sum",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,29 +57,29 @@ impl SimpleAggregationOp {
|
||||||
/// but invalid to take `Max` of `{Uuid, String}`.
|
/// but invalid to take `Max` of `{Uuid, String}`.
|
||||||
///
|
///
|
||||||
/// The returned type is the type of the result of the aggregation.
|
/// The returned type is the type of the result of the aggregation.
|
||||||
pub fn is_applicable_to_types(self, possibilities: ValueTypeSet) -> Result<ValueType> {
|
pub fn is_applicable_to_types(&self, possibilities: ValueTypeSet) -> Result<ValueType> {
|
||||||
use self::SimpleAggregationOp::*;
|
use self::SimpleAggregationOp::*;
|
||||||
if possibilities.is_empty() {
|
if possibilities.is_empty() {
|
||||||
bail!(ProjectorError::CannotProjectImpossibleBinding(self))
|
bail!(ProjectorErrorKind::CannotProjectImpossibleBinding(*self))
|
||||||
}
|
}
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
// One can always count results.
|
// One can always count results.
|
||||||
Count => Ok(ValueType::Long),
|
&Count => Ok(ValueType::Long),
|
||||||
|
|
||||||
// Only numeric types can be averaged or summed.
|
// Only numeric types can be averaged or summed.
|
||||||
Avg => {
|
&Avg => {
|
||||||
if possibilities.is_only_numeric() {
|
if possibilities.is_only_numeric() {
|
||||||
// The mean of a set of numeric values will always, for our purposes, be a double.
|
// The mean of a set of numeric values will always, for our purposes, be a double.
|
||||||
Ok(ValueType::Double)
|
Ok(ValueType::Double)
|
||||||
} else {
|
} else {
|
||||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||||
self,
|
*self,
|
||||||
possibilities
|
possibilities
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Sum => {
|
&Sum => {
|
||||||
if possibilities.is_only_numeric() {
|
if possibilities.is_only_numeric() {
|
||||||
if possibilities.contains(ValueType::Double) {
|
if possibilities.contains(ValueType::Double) {
|
||||||
Ok(ValueType::Double)
|
Ok(ValueType::Double)
|
||||||
|
@ -88,19 +88,19 @@ impl SimpleAggregationOp {
|
||||||
Ok(ValueType::Long)
|
Ok(ValueType::Long)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||||
self,
|
*self,
|
||||||
possibilities
|
possibilities
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Max | Min => {
|
&Max | &Min => {
|
||||||
if possibilities.is_unit() {
|
if possibilities.is_unit() {
|
||||||
use self::ValueType::*;
|
use self::ValueType::*;
|
||||||
let the_type = possibilities.exemplar().expect("a type");
|
let the_type = possibilities.exemplar().expect("a type");
|
||||||
match the_type {
|
match the_type {
|
||||||
// Numerically ordered types.
|
// These types are numerically ordered.
|
||||||
Double | Long | Instant => Ok(the_type),
|
Double | Long | Instant => Ok(the_type),
|
||||||
|
|
||||||
// Boolean: false < true.
|
// Boolean: false < true.
|
||||||
|
@ -109,10 +109,10 @@ impl SimpleAggregationOp {
|
||||||
// String: lexicographic order.
|
// String: lexicographic order.
|
||||||
String => Ok(the_type),
|
String => Ok(the_type),
|
||||||
|
|
||||||
// Unordered types.
|
// These types are unordered.
|
||||||
Keyword | Ref | Uuid | Bytes => {
|
Keyword | Ref | Uuid => {
|
||||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||||
self,
|
*self,
|
||||||
possibilities
|
possibilities
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
@ -129,8 +129,8 @@ impl SimpleAggregationOp {
|
||||||
Ok(ValueType::Long)
|
Ok(ValueType::Long)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||||
self,
|
*self,
|
||||||
possibilities
|
possibilities
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,14 +15,60 @@ use rusqlite;
|
||||||
use core_traits::ValueTypeSet;
|
use core_traits::ValueTypeSet;
|
||||||
use db_traits::errors::DbError;
|
use db_traits::errors::DbError;
|
||||||
use edn::query::PlainSymbol;
|
use edn::query::PlainSymbol;
|
||||||
|
use failure::{ Backtrace, Context, Fail, };
|
||||||
|
use std::fmt;
|
||||||
use query_pull_traits::errors::PullError;
|
use query_pull_traits::errors::PullError;
|
||||||
|
|
||||||
use crate::aggregates::SimpleAggregationOp;
|
use aggregates::SimpleAggregationOp;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, ProjectorError>;
|
pub type Result<T> = std::result::Result<T, ProjectorError>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ProjectorError(Box<Context<ProjectorErrorKind>>);
|
||||||
|
|
||||||
|
impl Fail for ProjectorError {
|
||||||
|
#[inline]
|
||||||
|
fn cause(&self) -> Option<&Fail> {
|
||||||
|
self.0.cause()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn backtrace(&self) -> Option<&Backtrace> {
|
||||||
|
self.0.backtrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ProjectorError {
|
||||||
|
#[inline]
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&*self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProjectorError {
|
||||||
|
#[inline]
|
||||||
|
pub fn kind(&self) -> &ProjectorErrorKind {
|
||||||
|
&*self.0.get_context()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ProjectorErrorKind> for ProjectorError {
|
||||||
|
#[inline]
|
||||||
|
fn from(kind: ProjectorErrorKind) -> ProjectorError {
|
||||||
|
ProjectorError(Box::new(Context::new(kind)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Context<ProjectorErrorKind>> for ProjectorError {
|
||||||
|
#[inline]
|
||||||
|
fn from(inner: Context<ProjectorErrorKind>) -> ProjectorError {
|
||||||
|
ProjectorError(Box::new(inner))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Fail)]
|
#[derive(Debug, Fail)]
|
||||||
pub enum ProjectorError {
|
pub enum ProjectorErrorKind {
|
||||||
/// We're just not done yet. Message that the feature is recognized but not yet
|
/// We're just not done yet. Message that the feature is recognized but not yet
|
||||||
/// implemented.
|
/// implemented.
|
||||||
#[fail(display = "not yet implemented: {}", _0)]
|
#[fail(display = "not yet implemented: {}", _0)]
|
||||||
|
@ -70,6 +116,24 @@ pub enum ProjectorError {
|
||||||
PullError(#[cause] PullError),
|
PullError(#[cause] PullError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<rusqlite::Error> for ProjectorErrorKind {
|
||||||
|
fn from(error: rusqlite::Error) -> ProjectorErrorKind {
|
||||||
|
ProjectorErrorKind::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_db::DbError> for ProjectorErrorKind {
|
||||||
|
fn from(error: mentat_db::DbError) -> ProjectorErrorKind {
|
||||||
|
ProjectorErrorKind::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mentat_query_pull::PullError> for ProjectorErrorKind {
|
||||||
|
fn from(error: mentat_query_pull::PullError) -> ProjectorErrorKind {
|
||||||
|
ProjectorErrorKind::from(error).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<rusqlite::Error> for ProjectorError {
|
impl From<rusqlite::Error> for ProjectorError {
|
||||||
fn from(error: rusqlite::Error) -> ProjectorError {
|
fn from(error: rusqlite::Error) -> ProjectorError {
|
||||||
ProjectorError::RusqliteError(error.to_string())
|
ProjectorError::RusqliteError(error.to_string())
|
||||||
|
|
|
@ -28,7 +28,7 @@ use mentat_query_projector::query_projection;
|
||||||
// These are helpers that tests use to build Schema instances.
|
// These are helpers that tests use to build Schema instances.
|
||||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||||
schema.entid_map.insert(e, i.clone());
|
schema.entid_map.insert(e, i.clone());
|
||||||
schema.ident_map.insert(i, e);
|
schema.ident_map.insert(i.clone(), e);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||||
|
@ -101,9 +101,9 @@ fn test_the_without_max_or_min() {
|
||||||
// … when we look at the projection list, we cannot reconcile the types.
|
// … when we look at the projection list, we cannot reconcile the types.
|
||||||
let projection = query_projection(&schema, &algebrized);
|
let projection = query_projection(&schema, &algebrized);
|
||||||
assert!(projection.is_err());
|
assert!(projection.is_err());
|
||||||
use query_projector_traits::errors::ProjectorError;
|
use query_projector_traits::errors::ProjectorErrorKind;
|
||||||
match projection.err().expect("expected failure") {
|
match projection.err().expect("expected failure") {
|
||||||
ProjectorError::InvalidProjection(s) => {
|
ProjectorErrorKind::InvalidProjection(s) => {
|
||||||
assert_eq!(s.as_str(), "Warning: used `the` without `min` or `max`.");
|
assert_eq!(s.as_str(), "Warning: used `the` without `min` or `max`.");
|
||||||
}
|
}
|
||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
[package]
|
[package]
|
||||||
name = "mentat_query_projector"
|
name = "mentat_query_projector"
|
||||||
version = "0.0.2"
|
version = "0.0.1"
|
||||||
workspace = ".."
|
workspace = ".."
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
sqlcipher = ["rusqlite/sqlcipher"]
|
sqlcipher = ["rusqlite/sqlcipher"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
failure = "~0.1"
|
failure = "0.1"
|
||||||
indexmap = "~1.9"
|
indexmap = "1.3"
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "~0.29"
|
version = "0.21"
|
||||||
features = ["limits", "bundled"]
|
features = ["limits"]
|
||||||
|
|
||||||
[dependencies.core_traits]
|
[dependencies.core_traits]
|
||||||
path = "../core-traits"
|
path = "../core-traits"
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use core_traits::Binding;
|
use core_traits::Binding;
|
||||||
|
|
||||||
use query_projector_traits::errors::{ProjectorError, Result};
|
use query_projector_traits::errors::{ProjectorErrorKind, Result};
|
||||||
|
|
||||||
/// A `BindingTuple` is any type that can accommodate a Mentat tuple query result of fixed length.
|
/// A `BindingTuple` is any type that can accommodate a Mentat tuple query result of fixed length.
|
||||||
///
|
///
|
||||||
|
@ -27,7 +27,7 @@ impl BindingTuple for Vec<Binding> {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -43,13 +43,13 @@ impl BindingTuple for Vec<Binding> {
|
||||||
impl BindingTuple for (Binding,) {
|
impl BindingTuple for (Binding,) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 1 {
|
if expected != 1 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(1, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(1, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -65,13 +65,13 @@ impl BindingTuple for (Binding,) {
|
||||||
impl BindingTuple for (Binding, Binding) {
|
impl BindingTuple for (Binding, Binding) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 2 {
|
if expected != 2 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(2, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(2, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -87,13 +87,13 @@ impl BindingTuple for (Binding, Binding) {
|
||||||
impl BindingTuple for (Binding, Binding, Binding) {
|
impl BindingTuple for (Binding, Binding, Binding) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 3 {
|
if expected != 3 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(3, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(3, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -113,13 +113,13 @@ impl BindingTuple for (Binding, Binding, Binding) {
|
||||||
impl BindingTuple for (Binding, Binding, Binding, Binding) {
|
impl BindingTuple for (Binding, Binding, Binding, Binding) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 4 {
|
if expected != 4 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(4, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(4, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -140,13 +140,13 @@ impl BindingTuple for (Binding, Binding, Binding, Binding) {
|
||||||
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding) {
|
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 5 {
|
if expected != 5 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(5, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(5, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
@ -170,13 +170,13 @@ impl BindingTuple for (Binding, Binding, Binding, Binding, Binding) {
|
||||||
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding, Binding) {
|
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding, Binding) {
|
||||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||||
if expected != 6 {
|
if expected != 6 {
|
||||||
return Err(ProjectorError::UnexpectedResultsTupleLength(6, expected));
|
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(6, expected));
|
||||||
}
|
}
|
||||||
match vec {
|
match vec {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
if expected != vec.len() {
|
if expected != vec.len() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||||
expected,
|
expected,
|
||||||
vec.len(),
|
vec.len(),
|
||||||
))
|
))
|
||||||
|
|
|
@ -50,26 +50,26 @@ use mentat_query_sql::{GroupBy, Projection};
|
||||||
pub mod translate;
|
pub mod translate;
|
||||||
|
|
||||||
mod binding_tuple;
|
mod binding_tuple;
|
||||||
pub use crate::binding_tuple::BindingTuple;
|
pub use binding_tuple::BindingTuple;
|
||||||
mod project;
|
mod project;
|
||||||
mod projectors;
|
mod projectors;
|
||||||
mod pull;
|
mod pull;
|
||||||
mod relresult;
|
mod relresult;
|
||||||
|
|
||||||
use crate::project::{project_elements, ProjectedElements};
|
use project::{project_elements, ProjectedElements};
|
||||||
|
|
||||||
pub use crate::project::projected_column_for_var;
|
pub use project::projected_column_for_var;
|
||||||
|
|
||||||
pub use crate::projectors::{ConstantProjector, Projector};
|
pub use projectors::{ConstantProjector, Projector};
|
||||||
|
|
||||||
use crate::projectors::{
|
use projectors::{
|
||||||
CollProjector, CollTwoStagePullProjector, RelProjector, RelTwoStagePullProjector,
|
CollProjector, CollTwoStagePullProjector, RelProjector, RelTwoStagePullProjector,
|
||||||
ScalarProjector, ScalarTwoStagePullProjector, TupleProjector, TupleTwoStagePullProjector,
|
ScalarProjector, ScalarTwoStagePullProjector, TupleProjector, TupleTwoStagePullProjector,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use crate::relresult::{RelResult, StructuredRelResult};
|
pub use relresult::{RelResult, StructuredRelResult};
|
||||||
|
|
||||||
use query_projector_traits::errors::{ProjectorError, Result};
|
use query_projector_traits::errors::{ProjectorErrorKind, Result};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct QueryOutput {
|
pub struct QueryOutput {
|
||||||
|
@ -94,11 +94,11 @@ impl From<QueryOutput> for QueryResults {
|
||||||
impl QueryOutput {
|
impl QueryOutput {
|
||||||
pub fn empty_factory(spec: &FindSpec) -> Box<dyn Fn() -> QueryResults> {
|
pub fn empty_factory(spec: &FindSpec) -> Box<dyn Fn() -> QueryResults> {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
match *spec {
|
match spec {
|
||||||
FindScalar(_) => Box::new(|| QueryResults::Scalar(None)),
|
&FindScalar(_) => Box::new(|| QueryResults::Scalar(None)),
|
||||||
FindTuple(_) => Box::new(|| QueryResults::Tuple(None)),
|
&FindTuple(_) => Box::new(|| QueryResults::Tuple(None)),
|
||||||
FindColl(_) => Box::new(|| QueryResults::Coll(vec![])),
|
&FindColl(_) => Box::new(|| QueryResults::Coll(vec![])),
|
||||||
FindRel(ref es) => {
|
&FindRel(ref es) => {
|
||||||
let width = es.len();
|
let width = es.len();
|
||||||
Box::new(move || QueryResults::Rel(RelResult::empty(width)))
|
Box::new(move || QueryResults::Rel(RelResult::empty(width)))
|
||||||
}
|
}
|
||||||
|
@ -115,48 +115,48 @@ impl QueryOutput {
|
||||||
|
|
||||||
pub fn empty(spec: &Rc<FindSpec>) -> QueryOutput {
|
pub fn empty(spec: &Rc<FindSpec>) -> QueryOutput {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
let results = match **spec {
|
let results = match &**spec {
|
||||||
FindScalar(_) => QueryResults::Scalar(None),
|
&FindScalar(_) => QueryResults::Scalar(None),
|
||||||
FindTuple(_) => QueryResults::Tuple(None),
|
&FindTuple(_) => QueryResults::Tuple(None),
|
||||||
FindColl(_) => QueryResults::Coll(vec![]),
|
&FindColl(_) => QueryResults::Coll(vec![]),
|
||||||
FindRel(ref es) => QueryResults::Rel(RelResult::empty(es.len())),
|
&FindRel(ref es) => QueryResults::Rel(RelResult::empty(es.len())),
|
||||||
};
|
};
|
||||||
QueryOutput {
|
QueryOutput {
|
||||||
spec: spec.clone(),
|
spec: spec.clone(),
|
||||||
results,
|
results: results,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_constants(spec: &Rc<FindSpec>, bindings: VariableBindings) -> QueryResults {
|
pub fn from_constants(spec: &Rc<FindSpec>, bindings: VariableBindings) -> QueryResults {
|
||||||
use self::FindSpec::*;
|
use self::FindSpec::*;
|
||||||
match **spec {
|
match &**spec {
|
||||||
FindScalar(Element::Variable(ref var))
|
&FindScalar(Element::Variable(ref var))
|
||||||
| FindScalar(Element::Corresponding(ref var)) => {
|
| &FindScalar(Element::Corresponding(ref var)) => {
|
||||||
let val = bindings.get(var).cloned().map(|v| v.into());
|
let val = bindings.get(var).cloned().map(|v| v.into());
|
||||||
QueryResults::Scalar(val)
|
QueryResults::Scalar(val)
|
||||||
}
|
}
|
||||||
FindScalar(Element::Aggregate(ref _agg)) => {
|
&FindScalar(Element::Aggregate(ref _agg)) => {
|
||||||
// TODO: static aggregates.
|
// TODO: static aggregates.
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
FindScalar(Element::Pull(ref _pull)) => {
|
&FindScalar(Element::Pull(ref _pull)) => {
|
||||||
// TODO: static pull.
|
// TODO: static pull.
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
FindTuple(ref elements) => {
|
&FindTuple(ref elements) => {
|
||||||
let values = elements
|
let values = elements
|
||||||
.iter()
|
.iter()
|
||||||
.map(|e| match *e {
|
.map(|e| match e {
|
||||||
Element::Variable(ref var) | Element::Corresponding(ref var) => bindings
|
&Element::Variable(ref var) | &Element::Corresponding(ref var) => bindings
|
||||||
.get(var)
|
.get(var)
|
||||||
.cloned()
|
.cloned()
|
||||||
.expect("every var to have a binding")
|
.expect("every var to have a binding")
|
||||||
.into(),
|
.into(),
|
||||||
Element::Pull(ref _pull) => {
|
&Element::Pull(ref _pull) => {
|
||||||
// TODO: static pull.
|
// TODO: static pull.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
Element::Aggregate(ref _agg) => {
|
&Element::Aggregate(ref _agg) => {
|
||||||
// TODO: static computation of aggregates, then
|
// TODO: static computation of aggregates, then
|
||||||
// implement the condition in `is_fully_bound`.
|
// implement the condition in `is_fully_bound`.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -165,7 +165,7 @@ impl QueryOutput {
|
||||||
.collect();
|
.collect();
|
||||||
QueryResults::Tuple(Some(values))
|
QueryResults::Tuple(Some(values))
|
||||||
}
|
}
|
||||||
FindColl(Element::Variable(ref var)) | FindColl(Element::Corresponding(ref var)) => {
|
&FindColl(Element::Variable(ref var)) | &FindColl(Element::Corresponding(ref var)) => {
|
||||||
let val = bindings
|
let val = bindings
|
||||||
.get(var)
|
.get(var)
|
||||||
.cloned()
|
.cloned()
|
||||||
|
@ -173,32 +173,32 @@ impl QueryOutput {
|
||||||
.into();
|
.into();
|
||||||
QueryResults::Coll(vec![val])
|
QueryResults::Coll(vec![val])
|
||||||
}
|
}
|
||||||
FindColl(Element::Pull(ref _pull)) => {
|
&FindColl(Element::Pull(ref _pull)) => {
|
||||||
// TODO: static pull.
|
// TODO: static pull.
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
FindColl(Element::Aggregate(ref _agg)) => {
|
&FindColl(Element::Aggregate(ref _agg)) => {
|
||||||
// Does it even make sense to write
|
// Does it even make sense to write
|
||||||
// [:find [(max ?x) ...] :where [_ :foo/bar ?x]]
|
// [:find [(max ?x) ...] :where [_ :foo/bar ?x]]
|
||||||
// ?
|
// ?
|
||||||
// TODO
|
// TODO
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
FindRel(ref elements) => {
|
&FindRel(ref elements) => {
|
||||||
let width = elements.len();
|
let width = elements.len();
|
||||||
let values = elements
|
let values = elements
|
||||||
.iter()
|
.iter()
|
||||||
.map(|e| match *e {
|
.map(|e| match e {
|
||||||
Element::Variable(ref var) | Element::Corresponding(ref var) => bindings
|
&Element::Variable(ref var) | &Element::Corresponding(ref var) => bindings
|
||||||
.get(var)
|
.get(var)
|
||||||
.cloned()
|
.cloned()
|
||||||
.expect("every var to have a binding")
|
.expect("every var to have a binding")
|
||||||
.into(),
|
.into(),
|
||||||
Element::Pull(ref _pull) => {
|
&Element::Pull(ref _pull) => {
|
||||||
// TODO: static pull.
|
// TODO: static pull.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
Element::Aggregate(ref _agg) => {
|
&Element::Aggregate(ref _agg) => {
|
||||||
// TODO: static computation of aggregates, then
|
// TODO: static computation of aggregates, then
|
||||||
// implement the condition in `is_fully_bound`.
|
// implement the condition in `is_fully_bound`.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -241,77 +241,77 @@ impl QueryOutput {
|
||||||
|
|
||||||
impl QueryResults {
|
impl QueryResults {
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
use crate::QueryResults::*;
|
use QueryResults::*;
|
||||||
match *self {
|
match self {
|
||||||
Scalar(ref o) => {
|
&Scalar(ref o) => {
|
||||||
if o.is_some() {
|
if o.is_some() {
|
||||||
1
|
1
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Tuple(ref o) => {
|
&Tuple(ref o) => {
|
||||||
if o.is_some() {
|
if o.is_some() {
|
||||||
1
|
1
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Coll(ref v) => v.len(),
|
&Coll(ref v) => v.len(),
|
||||||
Rel(ref r) => r.row_count(),
|
&Rel(ref r) => r.row_count(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
use crate::QueryResults::*;
|
use QueryResults::*;
|
||||||
match *self {
|
match self {
|
||||||
Scalar(ref o) => o.is_none(),
|
&Scalar(ref o) => o.is_none(),
|
||||||
Tuple(ref o) => o.is_none(),
|
&Tuple(ref o) => o.is_none(),
|
||||||
Coll(ref v) => v.is_empty(),
|
&Coll(ref v) => v.is_empty(),
|
||||||
Rel(ref r) => r.is_empty(),
|
&Rel(ref r) => r.is_empty(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_scalar(self) -> Result<Option<Binding>> {
|
pub fn into_scalar(self) -> Result<Option<Binding>> {
|
||||||
match self {
|
match self {
|
||||||
QueryResults::Scalar(o) => Ok(o),
|
QueryResults::Scalar(o) => Ok(o),
|
||||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "scalar")),
|
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "scalar")),
|
||||||
QueryResults::Tuple(_) => {
|
QueryResults::Tuple(_) => {
|
||||||
bail!(ProjectorError::UnexpectedResultsType("tuple", "scalar"))
|
bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "scalar"))
|
||||||
}
|
}
|
||||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "scalar")),
|
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "scalar")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_coll(self) -> Result<Vec<Binding>> {
|
pub fn into_coll(self) -> Result<Vec<Binding>> {
|
||||||
match self {
|
match self {
|
||||||
QueryResults::Scalar(_) => {
|
QueryResults::Scalar(_) => {
|
||||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "coll"))
|
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "coll"))
|
||||||
}
|
}
|
||||||
QueryResults::Coll(c) => Ok(c),
|
QueryResults::Coll(c) => Ok(c),
|
||||||
QueryResults::Tuple(_) => bail!(ProjectorError::UnexpectedResultsType("tuple", "coll")),
|
QueryResults::Tuple(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "coll")),
|
||||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "coll")),
|
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "coll")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_tuple(self) -> Result<Option<Vec<Binding>>> {
|
pub fn into_tuple(self) -> Result<Option<Vec<Binding>>> {
|
||||||
match self {
|
match self {
|
||||||
QueryResults::Scalar(_) => {
|
QueryResults::Scalar(_) => {
|
||||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "tuple"))
|
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "tuple"))
|
||||||
}
|
}
|
||||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "tuple")),
|
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "tuple")),
|
||||||
QueryResults::Tuple(t) => Ok(t),
|
QueryResults::Tuple(t) => Ok(t),
|
||||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "tuple")),
|
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "tuple")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_rel(self) -> Result<RelResult<Binding>> {
|
pub fn into_rel(self) -> Result<RelResult<Binding>> {
|
||||||
match self {
|
match self {
|
||||||
QueryResults::Scalar(_) => {
|
QueryResults::Scalar(_) => {
|
||||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "rel"))
|
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "rel"))
|
||||||
}
|
}
|
||||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "rel")),
|
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "rel")),
|
||||||
QueryResults::Tuple(_) => bail!(ProjectorError::UnexpectedResultsType("tuple", "rel")),
|
QueryResults::Tuple(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "rel")),
|
||||||
QueryResults::Rel(r) => Ok(r),
|
QueryResults::Rel(r) => Ok(r),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -339,16 +339,16 @@ impl TypedIndex {
|
||||||
/// This function will return a runtime error if the type tag is unknown, or the value is
|
/// This function will return a runtime error if the type tag is unknown, or the value is
|
||||||
/// otherwise not convertible by the DB layer.
|
/// otherwise not convertible by the DB layer.
|
||||||
fn lookup<'a>(&self, row: &Row<'a>) -> Result<Binding> {
|
fn lookup<'a>(&self, row: &Row<'a>) -> Result<Binding> {
|
||||||
use crate::TypedIndex::*;
|
use TypedIndex::*;
|
||||||
|
|
||||||
match *self {
|
match self {
|
||||||
Known(value_index, value_type) => {
|
&Known(value_index, value_type) => {
|
||||||
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
||||||
TypedValue::from_sql_value_pair(v, value_type)
|
TypedValue::from_sql_value_pair(v, value_type)
|
||||||
.map(|v| v.into())
|
.map(|v| v.into())
|
||||||
.map_err(|e| e.into())
|
.map_err(|e| e.into())
|
||||||
}
|
}
|
||||||
Unknown(value_index, type_index) => {
|
&Unknown(value_index, type_index) => {
|
||||||
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
||||||
let value_type_tag: i32 = row.get(type_index).unwrap();
|
let value_type_tag: i32 = row.get(type_index).unwrap();
|
||||||
TypedValue::from_sql_value_pair(v, value_type_tag)
|
TypedValue::from_sql_value_pair(v, value_type_tag)
|
||||||
|
@ -403,7 +403,10 @@ trait IsPull {
|
||||||
|
|
||||||
impl IsPull for Element {
|
impl IsPull for Element {
|
||||||
fn is_pull(&self) -> bool {
|
fn is_pull(&self) -> bool {
|
||||||
matches!(*self, Element::Pull(_))
|
match self {
|
||||||
|
&Element::Pull(_) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -427,16 +430,16 @@ pub fn query_projection(
|
||||||
|
|
||||||
let variables: BTreeSet<Variable> = spec
|
let variables: BTreeSet<Variable> = spec
|
||||||
.columns()
|
.columns()
|
||||||
.map(|e| match *e {
|
.map(|e| match e {
|
||||||
Element::Variable(ref var) | Element::Corresponding(ref var) => var.clone(),
|
&Element::Variable(ref var) | &Element::Corresponding(ref var) => var.clone(),
|
||||||
|
|
||||||
// Pull expressions can never be fully bound.
|
// Pull expressions can never be fully bound.
|
||||||
// TODO: but the interior can be, in which case we
|
// TODO: but the interior can be, in which case we
|
||||||
// can handle this and simply project.
|
// can handle this and simply project.
|
||||||
Element::Pull(_) => {
|
&Element::Pull(_) => {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
Element::Aggregate(ref _agg) => {
|
&Element::Aggregate(ref _agg) => {
|
||||||
// TODO: static computation of aggregates, then
|
// TODO: static computation of aggregates, then
|
||||||
// implement the condition in `is_fully_bound`.
|
// implement the condition in `is_fully_bound`.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -522,14 +525,12 @@ fn test_into_tuple() {
|
||||||
))
|
))
|
||||||
);
|
);
|
||||||
|
|
||||||
match query_output.into_tuple() {
|
match query_output.clone().into_tuple() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(expected, got)) => {
|
||||||
assert_eq!((expected, got), (3, 2));
|
assert_eq!((expected, got), (3, 2));
|
||||||
}
|
}
|
||||||
// This forces the result type.
|
// This forces the result type.
|
||||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||||
#[allow(clippy::wildcard_in_or_patterns)]
|
|
||||||
_ => panic!("expected error"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let query_output = QueryOutput {
|
let query_output = QueryOutput {
|
||||||
|
@ -543,18 +544,14 @@ fn test_into_tuple() {
|
||||||
match query_output.clone().into_tuple() {
|
match query_output.clone().into_tuple() {
|
||||||
Ok(None) => {}
|
Ok(None) => {}
|
||||||
// This forces the result type.
|
// This forces the result type.
|
||||||
Ok(Some((_, _))) => panic!("expected error"),
|
Ok(Some((_, _))) | _ => panic!("expected error"),
|
||||||
#[allow(clippy::wildcard_in_or_patterns)]
|
|
||||||
_ => panic!("expected error"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match query_output.into_tuple() {
|
match query_output.clone().into_tuple() {
|
||||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(expected, got)) => {
|
||||||
assert_eq!((expected, got), (3, 2));
|
assert_eq!((expected, got), (3, 2));
|
||||||
}
|
}
|
||||||
// This forces the result type.
|
// This forces the result type.
|
||||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||||
#[allow(clippy::wildcard_in_or_patterns)]
|
|
||||||
_ => panic!("expected error"),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue