Compare commits
118 commits
moz-pr/798
...
master
Author | SHA1 | Date | |
---|---|---|---|
201ec39dd2 | |||
216f078d44 | |||
8ab11d3503 | |||
92eab3692f | |||
02ebaf5bae | |||
517b781da1 | |||
6b269a660d | |||
92f400a553 | |||
ff527ad220 | |||
73240913cc | |||
|
c10575e04d | ||
|
5fdb9a4970 | ||
|
8f226ca050 | ||
|
aa6b634e64 | ||
986b439fb9 | |||
|
0d55e6acba | ||
d39f8aad4e | |||
7cfff34602 | |||
|
8175b98a7c | ||
|
b19a994c68 | ||
|
9a4ba44060 | ||
|
124bf54385 | ||
|
3df00eb63a | ||
|
8041c704dc | ||
|
4aa70567b8 | ||
|
c9a46327bc | ||
|
d22bf451a4 | ||
|
e73effb7d2 | ||
|
eae76e6f43 | ||
|
bd818ba1f1 | ||
|
73feb622cd | ||
|
d3821432bc | ||
|
179c123061 | ||
|
1500d4348c | ||
|
479fbc4572 | ||
|
97628a251f | ||
|
903ac24589 | ||
|
1f6620bf87 | ||
|
e64e2cf2f2 | ||
|
08694dc45a | ||
|
64bb6284d0 | ||
|
5f376a8664 | ||
|
ad3d7157a5 | ||
|
46ddac347e | ||
|
fba46fb1f2 | ||
|
071a916981 | ||
|
d4736a83e4 | ||
|
15df38fc8f | ||
|
614ce63e2b | ||
|
5a7caf7488 | ||
|
a02570fd5e | ||
|
4ec3c3cddc | ||
|
8e8e7b9739 | ||
|
abcdad5976 | ||
|
9d4f328af1 | ||
|
f918dcd915 | ||
|
7185d5ee13 | ||
|
c8c7dda27a | ||
|
2f299fde6c | ||
|
3a62dbc122 | ||
|
0d79eeed8f | ||
|
ca9d8c0096 | ||
|
ffaba698e0 | ||
|
8446a1bc4a | ||
|
722f7fb782 | ||
|
75b5a66a91 | ||
|
ac532be358 | ||
|
44036160d0 | ||
|
c8c1363b14 | ||
|
32ce6d2129 | ||
|
380945a655 | ||
|
af9bb1fcfe | ||
|
c295d82872 | ||
|
c2e39eeb5c | ||
|
985fd0bbdf | ||
|
c02c06ce2b | ||
|
b138c7e257 | ||
|
88df3c4d8d | ||
|
feb9665299 | ||
|
19cb2870da | ||
|
5c2a7261a1 | ||
|
0f015b2f10 | ||
|
da89cfc797 | ||
|
9a6ae48d8e | ||
|
d97e882a4a | ||
5a65cd38c9 | |||
5e700133f5 | |||
4a63ca98df | |||
2e28e87af8 | |||
5998ef73fb | |||
9bcd0955ba | |||
39219af1ff | |||
6d88abfb44 | |||
|
31ec02afd3 | ||
|
1622978acf | ||
|
26cd399e3a | ||
949386a43f | |||
5b0cb80b32 | |||
8039183097 | |||
9c472eff41 | |||
324929a02a | |||
|
526c9c3928 | ||
4b1583473e | |||
125306e108 | |||
0e63167aab | |||
5899bf8624 | |||
bf1ac14d32 | |||
b428579865 | |||
9eb6bc6220 | |||
41f1ff2393 | |||
5979fa5844 | |||
dfb5866174 | |||
58e06742fd | |||
a8223d11c9 | |||
b41bcf40f3 | |||
18a0c15320 | |||
6b7343a893 | |||
|
4f81c4e15b |
160 changed files with 3287 additions and 5470 deletions
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
liberapay: svartalf
|
||||
patreon: svartalf
|
||||
custom: ["https://svartalf.info/donate/", "https://www.buymeacoffee.com/svartalf"]
|
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "daily"
|
20
.github/workflows/audit.yml
vendored
Normal file
20
.github/workflows/audit.yml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
name: Security audit
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 * *'
|
||||
push:
|
||||
paths:
|
||||
- '**/Cargo.toml'
|
||||
- '**/Cargo.lock'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/audit-check@issue-104
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
13
.github/workflows/clippy-ng.yml
vendored
Normal file
13
.github/workflows/clippy-ng.yml
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
on: [push, pull_request]
|
||||
name: Clippy (new version test, don't use it!)
|
||||
jobs:
|
||||
clippy_check_ng:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy
|
||||
override: true
|
||||
- uses: actions-rs/clippy@master
|
16
.github/workflows/clippy_check.yml
vendored
Normal file
16
.github/workflows/clippy_check.yml
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
on: [push, pull_request]
|
||||
name: Clippy check
|
||||
jobs:
|
||||
clippy_check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy
|
||||
override: true
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
args: --all-targets --all-features -- -D warnings
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
28
.github/workflows/cross_compile.yml
vendored
Normal file
28
.github/workflows/cross_compile.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
# We could use `@actions-rs/cargo` Action ability to automatically install `cross` tool
|
||||
# in order to compile our application for some unusual targets.
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: Cross-compile
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- powerpc64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: true
|
||||
command: build
|
||||
args: --release --target=${{ matrix.target }}
|
66
.github/workflows/grcov.yml
vendored
Normal file
66
.github/workflows/grcov.yml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
name: Code coverage with grcov
|
||||
|
||||
jobs:
|
||||
grcov:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
# - windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
|
||||
- name: Execute tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests"
|
||||
|
||||
# Note that `actions-rs/grcov` Action can install `grcov` too,
|
||||
# but can't use faster installation methods yet.
|
||||
# As a temporary experiment `actions-rs/install` Action plugged in here.
|
||||
# Consider **NOT** to copy that into your workflow,
|
||||
# but use `actions-rs/grcov` only
|
||||
- name: Pre-installing grcov
|
||||
uses: actions-rs/install@v0.1
|
||||
with:
|
||||
crate: grcov
|
||||
use-tool-cache: true
|
||||
|
||||
- name: Gather coverage data
|
||||
id: coverage
|
||||
uses: actions-rs/grcov@v0.1
|
||||
with:
|
||||
coveralls-token: ${{ secrets.COVERALLS_TOKEN }}
|
||||
|
||||
- name: Coveralls upload
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
parallel: true
|
||||
path-to-lcov: ${{ steps.coverage.outputs.report }}
|
||||
|
||||
grcov_finalize:
|
||||
runs-on: ubuntu-latest
|
||||
needs: grcov
|
||||
steps:
|
||||
- name: Coveralls finalization
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
parallel-finished: true
|
110
.github/workflows/msrv.yml
vendored
Normal file
110
.github/workflows/msrv.yml
vendored
Normal file
|
@ -0,0 +1,110 @@
|
|||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/msrv.md
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: MSRV
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: check
|
||||
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: test
|
||||
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Install clippy
|
||||
run: rustup component add clippy
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
78
.github/workflows/nightly_lints.yml
vendored
Normal file
78
.github/workflows/nightly_lints.yml
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
name: Nightly lints
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain with clippy available
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: clippy
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
rustfmt:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain with rustfmt available
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
combo:
|
||||
name: Clippy + rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
79
.github/workflows/quickstart.yml
vendored
Normal file
79
.github/workflows/quickstart.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/quickstart.md
|
||||
#
|
||||
# While our "example" application has the platform-specific code,
|
||||
# for simplicity we are compiling and testing everything on the Ubuntu environment only.
|
||||
# For multi-OS testing see the `cross.yml` workflow.
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: Quickstart
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: check
|
||||
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: test
|
||||
|
||||
lints:
|
||||
name: Lints
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -3,7 +3,7 @@
|
|||
*.jar
|
||||
*jar
|
||||
*~
|
||||
*.rs.bk
|
||||
**/*.rs.bk
|
||||
.s*
|
||||
.*.sw*
|
||||
*.rs.bak
|
||||
|
@ -15,6 +15,8 @@
|
|||
.lein-plugins/
|
||||
.lein-repl-history
|
||||
.nrepl-port
|
||||
.bundle/
|
||||
docs/vendor/
|
||||
/.lein-*
|
||||
/.nrepl-port
|
||||
Cargo.lock
|
||||
|
@ -22,7 +24,7 @@ Cargo.lock
|
|||
/classes/
|
||||
/node_modules/
|
||||
/out/
|
||||
target/
|
||||
/target
|
||||
pom.xml
|
||||
pom.xml.asc
|
||||
/.cljs_node_repl/
|
||||
|
|
55
.travis.yml
55
.travis.yml
|
@ -1,11 +1,53 @@
|
|||
language: rust
|
||||
env:
|
||||
- CARGO_INCREMENTAL=0
|
||||
# https://bheisler.github.io/post/efficient-use-of-travis-ci-cache-for-rust/
|
||||
before_cache:
|
||||
# Delete loose files in the debug directory
|
||||
- find ./target/debug -maxdepth 1 -type f -delete
|
||||
# Delete the test and benchmark executables. Finding these all might take some
|
||||
# experimentation.
|
||||
- rm -rf ./target/debug/deps/criterion*
|
||||
- rm -rf ./target/debug/deps/bench*
|
||||
# Delete the associated metadata files for those executables
|
||||
- rm -rf ./target/debug/.fingerprint/criterion*
|
||||
- rm -rf ./target/debug/.fingerprint/bench*
|
||||
# Note that all of the above need to be repeated for `release/` instead of
|
||||
# `debug/` if your build script builds artifacts in release mode.
|
||||
# This is just more metadata
|
||||
- rm -f ./target/.rustc_info.json
|
||||
# Also delete the saved benchmark data from the test benchmarks. If you
|
||||
# have Criterion.rs benchmarks, you'll probably want to do this as well, or set
|
||||
# the CRITERION_HOME environment variable to move that data out of the
|
||||
# `target/` directory.
|
||||
- rm -rf ./target/criterion
|
||||
# Also delete cargo's registry index. This is updated on every build, but it's
|
||||
# way cheaper to re-download than the whole cache is.
|
||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/index/"
|
||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/src"
|
||||
cache:
|
||||
directories:
|
||||
- ./target
|
||||
- $TRAVIS_HOME/.cache/sccache
|
||||
- $TRAVIS_HOME/.cargo/
|
||||
- $TRAVIS_HOME/.rustup/
|
||||
before_script:
|
||||
- cargo install --force cargo-audit
|
||||
- cargo generate-lockfile
|
||||
- rustup component add clippy-preview
|
||||
script:
|
||||
- cargo audit
|
||||
# We use OSX so that we can get a reasonably up to date version of SQLCipher.
|
||||
# (The version in Travis's default Ubuntu Trusty is much too old).
|
||||
os: osx
|
||||
before_install:
|
||||
- brew install sqlcipher --with-fts
|
||||
- brew install sqlcipher
|
||||
rust:
|
||||
- 1.25.0
|
||||
- 1.43.0
|
||||
- 1.44.0
|
||||
- 1.45.0
|
||||
- 1.46.0
|
||||
- 1.47.0
|
||||
- stable
|
||||
- beta
|
||||
- nightly
|
||||
|
@ -16,20 +58,21 @@ matrix:
|
|||
jobs:
|
||||
include:
|
||||
- stage: "Test iOS"
|
||||
rust: 1.25.0
|
||||
rust: 1.47.0
|
||||
script: ./scripts/test-ios.sh
|
||||
- stage: "Docs"
|
||||
rust: 1.25.0
|
||||
rust: 1.47.0
|
||||
script: ./scripts/cargo-doc.sh
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo clippy --all-targets --all-features -- -D warnings -A clippy::comparison-chain -A clippy::many-single-char-names # Check tests and non-default crate features.
|
||||
- cargo test --verbose --all
|
||||
- cargo test --features edn/serde_support --verbose --all
|
||||
# We can't pick individual features out with `cargo test --all` (At the time of this writing, this
|
||||
# works but does the wrong thing because of a bug in cargo, but its fix will be to disallow doing
|
||||
# this all-together, see https://github.com/rust-lang/cargo/issues/5364 for more information). To
|
||||
# work around this, we run tests individually for subcrates that rely on `rusqlite`.
|
||||
# work around this, we run tests individually for sub-crates that rely on `rusqlite`.
|
||||
- |
|
||||
for crate in "" "db" "db-traits" "ffi" "public-traits" "query-projector" "query-projector-traits" "query-pull" "sql" "tolstoy" "tolstoy-traits" "transaction" "tools/cli"; do
|
||||
cargo test --manifest-path ./$crate/Cargo.toml --verbose --no-default-features --features sqlcipher
|
||||
done
|
||||
cache: cargo
|
||||
|
|
42
Cargo.toml
42
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
authors = [
|
||||
"Richard Newman <rnewman@twinql.com>",
|
||||
"Nicholas Alexander <nalexander@mozilla.com>",
|
||||
|
@ -11,9 +11,10 @@ authors = [
|
|||
"Kit Cambridge <kit@yakshaving.ninja>",
|
||||
"Edouard Oger <eoger@fastmail.com>",
|
||||
"Thom Chiovoloni <tchiovoloni@mozilla.com>",
|
||||
"Gregory Burd <greg@burd.me>",
|
||||
]
|
||||
name = "mentat"
|
||||
version = "0.11.2"
|
||||
version = "0.14.0"
|
||||
build = "build/version.rs"
|
||||
|
||||
[features]
|
||||
|
@ -23,24 +24,37 @@ sqlcipher = ["rusqlite/sqlcipher", "mentat_db/sqlcipher"]
|
|||
syncable = ["mentat_tolstoy", "tolstoy_traits", "mentat_db/syncable"]
|
||||
|
||||
[workspace]
|
||||
members = ["tools/cli", "ffi"]
|
||||
members = [
|
||||
"tools/cli",
|
||||
"ffi", "core", "core-traits","db", "db-traits", "edn", "public-traits", "query-algebrizer",
|
||||
"query-algebrizer-traits", "query-projector", "query-projector-traits","query-pull",
|
||||
"query-sql", "sql", "sql-traits", "tolstoy-traits", "tolstoy", "transaction"
|
||||
]
|
||||
|
||||
[build-dependencies]
|
||||
rustc_version = "0.2"
|
||||
rustc_version = "~0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_approx_eq = "~1.1"
|
||||
|
||||
#[dev-dependencies.cargo-husky]
|
||||
#version = "1"
|
||||
#default-features = false # Disable features which are enabled by default
|
||||
#features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
||||
#cargo audit
|
||||
#cargo outdated
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4"
|
||||
failure = "0.1.6"
|
||||
lazy_static = "1.4.0"
|
||||
time = "0.2"
|
||||
log = "0.4"
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
|
||||
chrono = "~0.4"
|
||||
failure = "~0.1"
|
||||
lazy_static = "~1.4"
|
||||
time = "0.3.1"
|
||||
log = "~0.4"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21.0"
|
||||
# System sqlite might be very old.
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "edn"
|
||||
|
|
11
Makefile
Normal file
11
Makefile
Normal file
|
@ -0,0 +1,11 @@
|
|||
.PHONY: outdated fix
|
||||
|
||||
outdated:
|
||||
for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo outdated -R); done
|
||||
|
||||
|
||||
fix:
|
||||
$(for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo fix --allow-dirty --broken-code --edition-idioms); done)
|
||||
|
||||
upgrades:
|
||||
cargo upgrades
|
29
NOTES
Normal file
29
NOTES
Normal file
|
@ -0,0 +1,29 @@
|
|||
* sqlite -> monetdb-lite-c + fts5 + bayesdb
|
||||
* fts5 + regex + tre/fuzzy + codesearch/trigram filters, streaming bloom filters https://arxiv.org/abs/2001.03147
|
||||
* datalog to "goblin relational engine" (gtk)
|
||||
* branching distributed wal (chain replication) and CRDTs
|
||||
* alf:fn query language
|
||||
* datatypes via bit syntax+some code?
|
||||
* pure lang?
|
||||
|
||||
* https://github.com/dahjelle/pouch-datalog
|
||||
* https://github.com/edn-query-language/eql
|
||||
* https://github.com/borkdude/jet
|
||||
* https://github.com/walmartlabs/dyn-edn
|
||||
* https://github.com/go-edn/edn
|
||||
* https://github.com/smothers/cause
|
||||
* https://github.com/oscaro/eq
|
||||
* https://github.com/clojure-emacs/parseedn
|
||||
* https://github.com/exoscale/seql
|
||||
* https://github.com/axboe/liburing
|
||||
|
||||
* (EAVtf) - entity attribute value type flags
|
||||
|
||||
* distributed, replicated WAL
|
||||
* https://github.com/mirage/irmin
|
||||
|
||||
* What if facts had "confidence" [0-1)?
|
||||
* entity attribute value type flags
|
||||
* https://github.com/probcomp/BayesDB
|
||||
* https://github.com/probcomp/bayeslite
|
||||
* http://probcomp.csail.mit.edu/software/bayesdb/
|
22
README.md
22
README.md
|
@ -1,17 +1,13 @@
|
|||
# Project Mentat
|
||||
[![Build Status](https://travis-ci.org/mozilla/mentat.svg?branch=master)](https://travis-ci.org/mozilla/mentat)
|
||||
|
||||
**Project Mentat is [no longer being developed or actively maintained by Mozilla](https://mail.mozilla.org/pipermail/firefox-dev/2018-September/006780.html).** This repository will be marked read-only in the near future. You are, of course, welcome to fork the repository and use the existing code.
|
||||
[![Build Status](https://travis-ci.org/qpdb/mentat.svg?branch=master)](https://travis-ci.org/qpdb/mentat)
|
||||
|
||||
Project Mentat is a persistent, embedded knowledge base. It draws heavily on [DataScript](https://github.com/tonsky/datascript) and [Datomic](http://datomic.com).
|
||||
|
||||
Mentat is implemented in Rust.
|
||||
This project was started by Mozilla, but [is no longer being developed or actively maintained by them](https://mail.mozilla.org/pipermail/firefox-dev/2018-September/006780.html). [Their repository](https://github.com/mozilla/mentat) was marked read-only, [this fork](https://github.com/qpdb/mentat) is an attempt to revive and continue that interesting work. We owe the team at Mozilla more than words can express for inspiring us all and for this project in particular.
|
||||
|
||||
The first version of Project Mentat, named Datomish, [was written in ClojureScript](https://github.com/mozilla/mentat/tree/clojure), targeting both Node (on top of `promise_sqlite`) and Firefox (on top of `Sqlite.jsm`). It also worked in pure Clojure on the JVM on top of `jdbc-sqlite`. The name was changed to avoid confusion with [Datomic](http://datomic.com).
|
||||
*Thank you*.
|
||||
|
||||
The Rust implementation gives us a smaller compiled output, better performance, more type safety, better tooling, and easier deployment into Firefox and mobile platforms.
|
||||
|
||||
[Documentation](https://mozilla.github.io/mentat)
|
||||
[Documentation](https://docs.rs/mentat)
|
||||
|
||||
---
|
||||
|
||||
|
@ -77,9 +73,11 @@ We've observed that data storage is a particular area of difficulty for software
|
|||
|
||||
DataScript asks the question: "What if creating a database were as cheap as creating a Hashmap?"
|
||||
|
||||
Mentat is not interested in that. Instead, it's strongly interested in persistence and performance, with very little interest in immutable databases/databases as values or throwaway use.
|
||||
Mentat is not interested in that. Instead, it's focused on persistence and performance, with very little interest in immutable databases/databases as values or throwaway use.
|
||||
|
||||
One might say that Mentat's question is: "What if an SQLite database could store arbitrary relations, for arbitrary consumers, without them having to coordinate an up-front storage-level schema?"
|
||||
One might say that Mentat's question is: "What if a database could store arbitrary relations, for arbitrary consumers, without them having to coordinate an up-front storage-level schema?"
|
||||
|
||||
Consider this a practical approach to facts, to knowledge its storage and access, much like SQLite is a practical RDBMS.
|
||||
|
||||
(Note that [domain-level schemas are very valuable](http://martinfowler.com/articles/schemaless/).)
|
||||
|
||||
|
@ -89,7 +87,7 @@ Some thought has been given to how databases as values — long-term references
|
|||
|
||||
Just like DataScript, Mentat speaks Datalog for querying and takes additions and retractions as input to a transaction.
|
||||
|
||||
Unlike DataScript, Mentat exposes free-text indexing, thanks to SQLite.
|
||||
Unlike DataScript, Mentat exposes free-text indexing, thanks to SQLite/FTS.
|
||||
|
||||
|
||||
## Comparison to Datomic
|
||||
|
@ -98,8 +96,6 @@ Datomic is a server-side, enterprise-grade data storage system. Datomic has a be
|
|||
|
||||
Many of these design decisions are inapplicable to deployed desktop software; indeed, the use of multiple JVM processes makes Datomic's use in a small desktop app, or a mobile device, prohibitive.
|
||||
|
||||
Mentat was designed for embedding, initially in an experimental Electron app ([Tofino](https://github.com/mozilla/tofino)). It is less concerned with exposing consistent database states outside transaction boundaries, because that's less important here, and dropping some of these requirements allows us to leverage SQLite itself.
|
||||
|
||||
|
||||
## Comparison to SQLite
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::process::exit;
|
|||
|
||||
/// MIN_VERSION should be changed when there's a new minimum version of rustc required
|
||||
/// to build the project.
|
||||
static MIN_VERSION: &'static str = "1.41.0";
|
||||
static MIN_VERSION: &str = "1.69.0";
|
||||
|
||||
fn main() {
|
||||
let ver = version().unwrap();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "core_traits"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -8,14 +8,15 @@ name = "core_traits"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
enum-set = "0.0.8"
|
||||
lazy_static = "1.4.0"
|
||||
indexmap = "1.3.1"
|
||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "1.0", features = ["rc"] }
|
||||
serde_derive = "1.0"
|
||||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0.8"
|
||||
lazy_static = "~1.4"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", features = ["rc"] }
|
||||
serde_derive = "~1.0"
|
||||
bytes = { version = "1.0.1", features = ["serde"] }
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -14,6 +14,7 @@ extern crate indexmap;
|
|||
extern crate ordered_float;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate bytes;
|
||||
extern crate edn;
|
||||
extern crate uuid;
|
||||
#[macro_use]
|
||||
|
@ -33,6 +34,7 @@ use std::sync::Arc;
|
|||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use bytes::Bytes;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use enum_set::EnumSet;
|
||||
|
@ -52,7 +54,7 @@ use edn::entities::{
|
|||
mod value_type_set;
|
||||
pub mod values;
|
||||
|
||||
pub use value_type_set::ValueTypeSet;
|
||||
pub use crate::value_type_set::ValueTypeSet;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! bail {
|
||||
|
@ -102,14 +104,14 @@ impl<V: TransactableValueMarker> Into<ValuePlace<V>> for KnownEntid {
|
|||
/// When moving to a more concrete table, such as `datoms`, they are expanded out
|
||||
/// via these flags and put into their own column rather than a bit field.
|
||||
pub enum AttributeBitFlags {
|
||||
IndexAVET = 1 << 0,
|
||||
IndexAVET = 1,
|
||||
IndexVAET = 1 << 1,
|
||||
IndexFulltext = 1 << 2,
|
||||
UniqueValue = 1 << 3,
|
||||
}
|
||||
|
||||
pub mod attribute {
|
||||
use TypedValue;
|
||||
use crate::TypedValue;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
pub enum Unique {
|
||||
|
@ -280,6 +282,7 @@ pub enum ValueType {
|
|||
String,
|
||||
Keyword,
|
||||
Uuid,
|
||||
Bytes,
|
||||
}
|
||||
|
||||
impl ValueType {
|
||||
|
@ -294,6 +297,7 @@ impl ValueType {
|
|||
s.insert(ValueType::String);
|
||||
s.insert(ValueType::Keyword);
|
||||
s.insert(ValueType::Uuid);
|
||||
s.insert(ValueType::Bytes);
|
||||
s
|
||||
}
|
||||
}
|
||||
|
@ -321,16 +325,16 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_keyword(keyword: &Keyword) -> Option<Self> {
|
||||
if keyword.namespace() != Some("db.type") {
|
||||
return None;
|
||||
}
|
||||
|
||||
return match keyword.name() {
|
||||
None
|
||||
} else {
|
||||
match keyword.name() {
|
||||
"ref" => Some(ValueType::Ref),
|
||||
"boolean" => Some(ValueType::Boolean),
|
||||
"instant" => Some(ValueType::Instant),
|
||||
|
@ -339,8 +343,10 @@ impl ValueType {
|
|||
"string" => Some(ValueType::String),
|
||||
"keyword" => Some(ValueType::Keyword),
|
||||
"uuid" => Some(ValueType::Uuid),
|
||||
"bytes" => Some(ValueType::Bytes),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_typed_value(self) -> TypedValue {
|
||||
|
@ -355,6 +361,7 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -369,14 +376,12 @@ impl ValueType {
|
|||
ValueType::String => values::DB_TYPE_STRING.clone(),
|
||||
ValueType::Keyword => values::DB_TYPE_KEYWORD.clone(),
|
||||
ValueType::Uuid => values::DB_TYPE_UUID.clone(),
|
||||
ValueType::Bytes => values::DB_TYPE_BYTES.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_numeric(&self) -> bool {
|
||||
match self {
|
||||
&ValueType::Long | &ValueType::Double => true,
|
||||
_ => false,
|
||||
}
|
||||
pub fn is_numeric(self) -> bool {
|
||||
matches!(self, ValueType::Long | ValueType::Double)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,6 +399,7 @@ impl fmt::Display for ValueType {
|
|||
ValueType::String => ":db.type/string",
|
||||
ValueType::Keyword => ":db.type/keyword",
|
||||
ValueType::Uuid => ":db.type/uuid",
|
||||
ValueType::Bytes => ":db.type/bytes",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -417,6 +423,7 @@ pub enum TypedValue {
|
|||
String(ValueRc<String>),
|
||||
Keyword(ValueRc<Keyword>),
|
||||
Uuid(Uuid), // It's only 128 bits, so this should be acceptable to clone.
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
impl From<KnownEntid> for TypedValue {
|
||||
|
@ -440,14 +447,15 @@ impl TypedValue {
|
|||
|
||||
pub fn value_type(&self) -> ValueType {
|
||||
match self {
|
||||
&TypedValue::Ref(_) => ValueType::Ref,
|
||||
&TypedValue::Boolean(_) => ValueType::Boolean,
|
||||
&TypedValue::Long(_) => ValueType::Long,
|
||||
&TypedValue::Instant(_) => ValueType::Instant,
|
||||
&TypedValue::Double(_) => ValueType::Double,
|
||||
&TypedValue::String(_) => ValueType::String,
|
||||
&TypedValue::Keyword(_) => ValueType::Keyword,
|
||||
&TypedValue::Uuid(_) => ValueType::Uuid,
|
||||
TypedValue::Ref(_) => ValueType::Ref,
|
||||
TypedValue::Boolean(_) => ValueType::Boolean,
|
||||
TypedValue::Long(_) => ValueType::Long,
|
||||
TypedValue::Instant(_) => ValueType::Instant,
|
||||
TypedValue::Double(_) => ValueType::Double,
|
||||
TypedValue::String(_) => ValueType::String,
|
||||
TypedValue::Keyword(_) => ValueType::Keyword,
|
||||
TypedValue::Uuid(_) => ValueType::Uuid,
|
||||
TypedValue::Bytes(_) => ValueType::Bytes,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -574,7 +582,7 @@ impl TypedValue {
|
|||
match self {
|
||||
TypedValue::Uuid(v) => {
|
||||
// Get an independent copy of the string.
|
||||
let s: String = v.to_hyphenated().to_string();
|
||||
let s: String = v.hyphenated().to_string();
|
||||
|
||||
// Make a CString out of the new bytes.
|
||||
let c: CString = CString::new(s).expect("String conversion failed!");
|
||||
|
@ -595,7 +603,14 @@ impl TypedValue {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
TypedValue::Uuid(v) => Some(v.to_hyphenated().to_string()),
|
||||
TypedValue::Uuid(v) => Some(v.hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_bytes(self) -> Option<Bytes> {
|
||||
match self {
|
||||
TypedValue::Bytes(b) => Some(b),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -689,6 +704,12 @@ impl From<f64> for TypedValue {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&[u8]> for TypedValue {
|
||||
fn from(bslice: &[u8]) -> Self {
|
||||
TypedValue::Bytes(Bytes::copy_from_slice(bslice))
|
||||
}
|
||||
}
|
||||
|
||||
trait MicrosecondPrecision {
|
||||
/// Truncate the provided `DateTime` to microsecond precision.
|
||||
fn microsecond_precision(self) -> Self;
|
||||
|
@ -770,21 +791,21 @@ impl Binding {
|
|||
|
||||
pub fn as_scalar(&self) -> Option<&TypedValue> {
|
||||
match self {
|
||||
&Binding::Scalar(ref v) => Some(v),
|
||||
Binding::Scalar(ref v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_vec(&self) -> Option<&Vec<Binding>> {
|
||||
match self {
|
||||
&Binding::Vec(ref v) => Some(v),
|
||||
Binding::Vec(ref v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_map(&self) -> Option<&StructuredMap> {
|
||||
match self {
|
||||
&Binding::Map(ref v) => Some(v),
|
||||
Binding::Map(ref v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -856,10 +877,10 @@ impl Binding {
|
|||
|
||||
pub fn value_type(&self) -> Option<ValueType> {
|
||||
match self {
|
||||
&Binding::Scalar(ref v) => Some(v.value_type()),
|
||||
Binding::Scalar(ref v) => Some(v.value_type()),
|
||||
|
||||
&Binding::Map(_) => None,
|
||||
&Binding::Vec(_) => None,
|
||||
Binding::Map(_) => None,
|
||||
Binding::Vec(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -942,7 +963,7 @@ impl Binding {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.to_hyphenated().to_string()),
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -970,56 +991,56 @@ impl Binding {
|
|||
|
||||
pub fn as_entid(&self) -> Option<&Entid> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Ref(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Ref(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_kw(&self) -> Option<&ValueRc<Keyword>> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Keyword(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Keyword(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_boolean(&self) -> Option<&bool> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Boolean(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Boolean(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_long(&self) -> Option<&i64> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Long(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Long(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_double(&self) -> Option<&f64> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Double(ref v)) => Some(&v.0),
|
||||
Binding::Scalar(TypedValue::Double(ref v)) => Some(&v.0),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_instant(&self) -> Option<&DateTime<Utc>> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Instant(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Instant(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> Option<&ValueRc<String>> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::String(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::String(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_uuid(&self) -> Option<&Uuid> {
|
||||
match self {
|
||||
&Binding::Scalar(TypedValue::Uuid(ref v)) => Some(v),
|
||||
Binding::Scalar(TypedValue::Uuid(ref v)) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use enum_set::EnumSet;
|
||||
|
||||
use ValueType;
|
||||
use crate::ValueType;
|
||||
|
||||
trait EnumSetExtensions<T: ::enum_set::CLike + Clone> {
|
||||
/// Return a set containing both `x` and `y`.
|
||||
|
@ -92,53 +92,53 @@ impl ValueTypeSet {
|
|||
self.0.insert(vt)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
pub fn len(self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Returns a set containing all the types in this set and `other`.
|
||||
pub fn union(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||
pub fn union(self, other: ValueTypeSet) -> ValueTypeSet {
|
||||
ValueTypeSet(self.0.union(other.0))
|
||||
}
|
||||
|
||||
pub fn intersection(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||
pub fn intersection(self, other: ValueTypeSet) -> ValueTypeSet {
|
||||
ValueTypeSet(self.0.intersection(other.0))
|
||||
}
|
||||
|
||||
/// Returns the set difference between `self` and `other`, which is the
|
||||
/// set of items in `self` that are not in `other`.
|
||||
pub fn difference(&self, other: &ValueTypeSet) -> ValueTypeSet {
|
||||
pub fn difference(self, other: ValueTypeSet) -> ValueTypeSet {
|
||||
ValueTypeSet(self.0 - other.0)
|
||||
}
|
||||
|
||||
/// Return an arbitrary type that's part of this set.
|
||||
/// For a set containing a single type, this will be that type.
|
||||
pub fn exemplar(&self) -> Option<ValueType> {
|
||||
pub fn exemplar(self) -> Option<ValueType> {
|
||||
self.0.iter().next()
|
||||
}
|
||||
|
||||
pub fn is_subset(&self, other: &ValueTypeSet) -> bool {
|
||||
pub fn is_subset(self, other: ValueTypeSet) -> bool {
|
||||
self.0.is_subset(&other.0)
|
||||
}
|
||||
|
||||
/// Returns true if `self` and `other` contain no items in common.
|
||||
pub fn is_disjoint(&self, other: &ValueTypeSet) -> bool {
|
||||
pub fn is_disjoint(self, other: ValueTypeSet) -> bool {
|
||||
self.0.is_disjoint(&other.0)
|
||||
}
|
||||
|
||||
pub fn contains(&self, vt: ValueType) -> bool {
|
||||
pub fn contains(self, vt: ValueType) -> bool {
|
||||
self.0.contains(&vt)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn is_unit(&self) -> bool {
|
||||
pub fn is_unit(self) -> bool {
|
||||
self.0.len() == 1
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> ::enum_set::Iter<ValueType> {
|
||||
pub fn iter(self) -> ::enum_set::Iter<ValueType> {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
@ -150,8 +150,8 @@ impl From<ValueType> for ValueTypeSet {
|
|||
}
|
||||
|
||||
impl ValueTypeSet {
|
||||
pub fn is_only_numeric(&self) -> bool {
|
||||
self.is_subset(&ValueTypeSet::of_numeric_types())
|
||||
pub fn is_only_numeric(self) -> bool {
|
||||
self.is_subset(ValueTypeSet::of_numeric_types())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,6 +58,7 @@ lazy_static_namespaced_keyword_value!(DB_TYPE_REF, "db.type", "ref");
|
|||
lazy_static_namespaced_keyword_value!(DB_TYPE_STRING, "db.type", "string");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_URI, "db.type", "uri");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_UUID, "db.type", "uuid");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_BYTES, "db.type", "bytes");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE, "db", "unique");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_IDENTITY, "db.unique", "identity");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_VALUE, "db.unique", "value");
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
[package]
|
||||
name = "mentat_core"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
enum-set = "0.0"
|
||||
failure = "0.1"
|
||||
indexmap = "1.3"
|
||||
ordered-float = { version = "1.0", features = ["serde"] }
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0"
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -13,7 +13,7 @@ use std::collections::BTreeSet;
|
|||
|
||||
use core_traits::{Entid, TypedValue};
|
||||
|
||||
use Schema;
|
||||
use crate::Schema;
|
||||
|
||||
pub trait CachedAttributes {
|
||||
fn is_attribute_cached_reverse(&self, entid: Entid) -> bool;
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
use std::cell::Cell;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Default)]
|
||||
pub struct RcCounter {
|
||||
c: Rc<Cell<usize>>,
|
||||
}
|
||||
|
|
|
@ -35,18 +35,18 @@ pub use chrono::{
|
|||
pub use edn::parse::parse_query;
|
||||
pub use edn::{Cloned, FromMicros, FromRc, Keyword, ToMicros, Utc, ValueRc};
|
||||
|
||||
pub use cache::{CachedAttributes, UpdateableCache};
|
||||
pub use crate::cache::{CachedAttributes, UpdateableCache};
|
||||
|
||||
mod sql_types;
|
||||
mod tx_report;
|
||||
/// Core types defining a Mentat knowledge base.
|
||||
mod types;
|
||||
|
||||
pub use tx_report::TxReport;
|
||||
pub use crate::tx_report::TxReport;
|
||||
|
||||
pub use types::ValueTypeTag;
|
||||
pub use crate::types::ValueTypeTag;
|
||||
|
||||
pub use sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
||||
pub use crate::sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
||||
|
||||
/// Map `Keyword` idents (`:db/ident`) to positive integer entids (`1`).
|
||||
pub type IdentMap = BTreeMap<Keyword, Entid>;
|
||||
|
@ -135,7 +135,7 @@ impl Schema {
|
|||
}
|
||||
|
||||
fn get_raw_entid(&self, x: &Keyword) -> Option<Entid> {
|
||||
self.ident_map.get(x).map(|x| *x)
|
||||
self.ident_map.get(x).copied()
|
||||
}
|
||||
|
||||
pub fn update_component_attributes(&mut self) {
|
||||
|
|
|
@ -12,7 +12,7 @@ use std::collections::BTreeSet;
|
|||
|
||||
use core_traits::{ValueType, ValueTypeSet};
|
||||
|
||||
use types::ValueTypeTag;
|
||||
use crate::types::ValueTypeTag;
|
||||
|
||||
/// Type safe representation of the possible return values from SQLite's `typeof`
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -51,6 +51,7 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => (10, None),
|
||||
ValueType::Uuid => (11, None),
|
||||
ValueType::Keyword => (13, None),
|
||||
ValueType::Bytes => (15, Some(SQLTypeAffinity::Blob)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,7 +63,7 @@ impl SQLValueType for ValueType {
|
|||
/// Returns true if the provided integer is in the SQLite value space of this type. For
|
||||
/// example, `1` is how we encode `true`.
|
||||
fn accommodates_integer(&self, int: i64) -> bool {
|
||||
use ValueType::*;
|
||||
use crate::ValueType::*;
|
||||
match *self {
|
||||
Instant => false, // Always use #inst.
|
||||
Long | Double => true,
|
||||
|
@ -71,6 +72,7 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => false,
|
||||
Keyword => false,
|
||||
Uuid => false,
|
||||
Bytes => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -123,8 +125,8 @@ impl SQLValueTypeSet for ValueTypeSet {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::sql_types::SQLValueType;
|
||||
use core_traits::ValueType;
|
||||
use sql_types::SQLValueType;
|
||||
|
||||
#[test]
|
||||
fn test_accommodates_integer() {
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::BTreeMap;
|
|||
|
||||
use core_traits::Entid;
|
||||
|
||||
use {DateTime, Utc};
|
||||
use crate::{DateTime, Utc};
|
||||
|
||||
/// A transaction report summarizes an applied transaction.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "db_traits"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -11,8 +11,8 @@ path = "lib.rs"
|
|||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -21,5 +21,5 @@ path = "../edn"
|
|||
path = "../core-traits"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
|
|
@ -69,7 +69,7 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
|||
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
||||
use self::SchemaConstraintViolation::*;
|
||||
match self {
|
||||
&ConflictingUpserts {
|
||||
ConflictingUpserts {
|
||||
ref conflicting_upserts,
|
||||
} => {
|
||||
writeln!(f, "conflicting upserts:")?;
|
||||
|
@ -78,7 +78,7 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
&TypeDisagreements {
|
||||
TypeDisagreements {
|
||||
ref conflicting_datoms,
|
||||
} => {
|
||||
writeln!(f, "type disagreements:")?;
|
||||
|
@ -91,9 +91,9 @@ impl ::std::fmt::Display for SchemaConstraintViolation {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
&CardinalityConflicts { ref conflicts } => {
|
||||
CardinalityConflicts { ref conflicts } => {
|
||||
writeln!(f, "cardinality conflicts:")?;
|
||||
for ref conflict in conflicts {
|
||||
for conflict in conflicts {
|
||||
writeln!(f, " {:?}", conflict)?;
|
||||
}
|
||||
Ok(())
|
||||
|
@ -116,12 +116,12 @@ impl ::std::fmt::Display for InputError {
|
|||
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
||||
use self::InputError::*;
|
||||
match self {
|
||||
&BadDbId => {
|
||||
BadDbId => {
|
||||
writeln!(f, ":db/id in map notation must either not be present or be an entid, an ident, or a tempid")
|
||||
},
|
||||
&BadEntityPlace => {
|
||||
}
|
||||
BadEntityPlace => {
|
||||
writeln!(f, "cannot convert value place into entity place")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ impl From<DbErrorKind> for DbError {
|
|||
|
||||
impl From<Context<DbErrorKind>> for DbError {
|
||||
fn from(inner: Context<DbErrorKind>) -> Self {
|
||||
DbError { inner: inner }
|
||||
DbError { inner }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,8 +177,7 @@ impl From<rusqlite::Error> for DbError {
|
|||
|
||||
#[derive(Clone, PartialEq, Debug, Fail)]
|
||||
pub enum DbErrorKind {
|
||||
/// We're just not done yet. Message that the feature is recognized but not yet
|
||||
/// implemented.
|
||||
/// We're just not done yet. Recognized a feature that is not yet implemented.
|
||||
#[fail(display = "not yet implemented: {}", _0)]
|
||||
NotYetImplemented(String),
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "mentat_db"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[features]
|
||||
|
@ -9,21 +9,21 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
syncable = ["serde", "serde_json", "serde_derive"]
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1.6"
|
||||
indexmap = "1.3.1"
|
||||
itertools = "0.8"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4"
|
||||
ordered-float = "1.0.2"
|
||||
time = "0.2"
|
||||
petgraph = "0.5"
|
||||
serde = { version = "1.0", optional = true }
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
serde_derive = { version = "1.0", optional = true }
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
itertools = "~0.10"
|
||||
lazy_static = "~1.4"
|
||||
log = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
time = "~0.3"
|
||||
petgraph = "~0.6"
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_json = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -40,9 +40,10 @@ path = "../db-traits"
|
|||
[dependencies.mentat_sql]
|
||||
path = "../sql"
|
||||
|
||||
# Should be dev-dependencies.
|
||||
# TODO: This should be in dev-dependencies.
|
||||
[dependencies.tabwriter]
|
||||
version = "1.2.1"
|
||||
version = "~1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.7"
|
||||
env_logger = "0.9"
|
||||
#tabwriter = { version = "1.2.1" }
|
||||
|
|
|
@ -48,14 +48,12 @@ where
|
|||
} else {
|
||||
self.asserted.insert(key, value);
|
||||
}
|
||||
} else {
|
||||
if let Some(asserted_value) = self.asserted.remove(&key) {
|
||||
} else if let Some(asserted_value) = self.asserted.remove(&key) {
|
||||
self.altered.insert(key, (value, asserted_value));
|
||||
} else {
|
||||
self.retracted.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -10,24 +10,24 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use db::TypedSQLValue;
|
||||
use crate::db::TypedSQLValue;
|
||||
use crate::entids;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn;
|
||||
use edn::entities::Entity;
|
||||
use edn::symbols;
|
||||
use edn::types::Value;
|
||||
use entids;
|
||||
|
||||
use core_traits::{values, TypedValue};
|
||||
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::types::{Partition, PartitionMap};
|
||||
use mentat_core::{IdentMap, Schema};
|
||||
use schema::SchemaBuilding;
|
||||
use types::{Partition, PartitionMap};
|
||||
|
||||
/// The first transaction ID applied to the knowledge base.
|
||||
///
|
||||
/// This is the start of the :db.part/tx partition.
|
||||
pub const TX0: i64 = 0x10000000;
|
||||
pub const TX0: i64 = 0x1000_0000;
|
||||
|
||||
/// This is the start of the :db.part/user partition.
|
||||
pub const USER0: i64 = 0x10000;
|
||||
|
@ -206,14 +206,14 @@ lazy_static! {
|
|||
/// Convert (ident, entid) pairs into [:db/add IDENT :db/ident IDENT] `Value` instances.
|
||||
fn idents_to_assertions(idents: &[(symbols::Keyword, i64)]) -> Vec<Value> {
|
||||
idents
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|&(ref ident, _)| {
|
||||
let value = Value::Keyword(ident.clone());
|
||||
Value::Vector(vec![
|
||||
values::DB_ADD.clone(),
|
||||
value.clone(),
|
||||
values::DB_IDENT.clone(),
|
||||
value.clone(),
|
||||
value,
|
||||
])
|
||||
})
|
||||
.collect()
|
||||
|
@ -225,7 +225,7 @@ fn schema_attrs_to_assertions(version: u32, idents: &[symbols::Keyword]) -> Vec<
|
|||
let schema_attr = Value::Keyword(ns_keyword!("db.schema", "attribute"));
|
||||
let schema_version = Value::Keyword(ns_keyword!("db.schema", "version"));
|
||||
idents
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|ident| {
|
||||
let value = Value::Keyword(ident.clone());
|
||||
Value::Vector(vec![
|
||||
|
@ -260,7 +260,7 @@ fn symbolic_schema_to_triples(
|
|||
Value::Map(ref m) => {
|
||||
for (ident, mp) in m {
|
||||
let ident = match ident {
|
||||
&Value::Keyword(ref ident) => ident,
|
||||
Value::Keyword(ref ident) => ident,
|
||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||
"Expected namespaced keyword for ident but got '{:?}'",
|
||||
ident
|
||||
|
@ -270,7 +270,7 @@ fn symbolic_schema_to_triples(
|
|||
Value::Map(ref mpp) => {
|
||||
for (attr, value) in mpp {
|
||||
let attr = match attr {
|
||||
&Value::Keyword(ref attr) => attr,
|
||||
Value::Keyword(ref attr) => attr,
|
||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||
"Expected namespaced keyword for attr but got '{:?}'",
|
||||
attr
|
||||
|
@ -289,7 +289,7 @@ fn symbolic_schema_to_triples(
|
|||
Some(TypedValue::Keyword(ref k)) => ident_map
|
||||
.get(k)
|
||||
.map(|entid| TypedValue::Ref(*entid))
|
||||
.ok_or(DbErrorKind::UnrecognizedIdent(k.to_string()))?,
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(k.to_string()))?,
|
||||
Some(v) => v,
|
||||
_ => bail!(DbErrorKind::BadBootstrapDefinition(format!(
|
||||
"Expected Mentat typed value for value but got '{:?}'",
|
||||
|
@ -377,8 +377,6 @@ pub(crate) fn bootstrap_entities() -> Vec<Entity<edn::ValueAndSpan>> {
|
|||
);
|
||||
|
||||
// Failure here is a coding error (since the inputs are fixed), not a runtime error.
|
||||
// TODO: represent these bootstrap data errors rather than just panicing.
|
||||
let bootstrap_entities: Vec<Entity<edn::ValueAndSpan>> =
|
||||
edn::parse::entities(&bootstrap_assertions.to_string()).expect("bootstrap assertions");
|
||||
return bootstrap_entities;
|
||||
// TODO: represent these bootstrap entity data errors rather than just panicing.
|
||||
edn::parse::entities(&bootstrap_assertions.to_string()).expect("bootstrap assertions")
|
||||
}
|
||||
|
|
|
@ -54,8 +54,6 @@ use std::collections::btree_map::Entry::{Occupied, Vacant};
|
|||
|
||||
use std::iter::once;
|
||||
|
||||
use std::mem;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use std::iter::Peekable;
|
||||
|
@ -63,6 +61,7 @@ use std::iter::Peekable;
|
|||
use failure::ResultExt;
|
||||
|
||||
use rusqlite;
|
||||
use rusqlite::params_from_iter;
|
||||
|
||||
use core_traits::{Binding, Entid, TypedValue};
|
||||
|
||||
|
@ -74,11 +73,11 @@ use mentat_sql::{QueryBuilder, SQLQuery, SQLiteQueryBuilder};
|
|||
|
||||
use edn::entities::OpType;
|
||||
|
||||
use db::TypedSQLValue;
|
||||
use crate::db::TypedSQLValue;
|
||||
|
||||
use db_traits::errors::{DbError, DbErrorKind, Result};
|
||||
|
||||
use watcher::TransactWatcher;
|
||||
use crate::watcher::TransactWatcher;
|
||||
|
||||
// Right now we use BTreeMap, because we expect few cached attributes.
|
||||
pub type CacheMap<K, V> = BTreeMap<K, V>;
|
||||
|
@ -190,7 +189,7 @@ impl AevFactory {
|
|||
return existing;
|
||||
}
|
||||
self.strings.insert(rc.clone());
|
||||
return TypedValue::String(rc);
|
||||
TypedValue::String(rc)
|
||||
}
|
||||
t => t,
|
||||
}
|
||||
|
@ -200,9 +199,7 @@ impl AevFactory {
|
|||
let a: Entid = row.get_unwrap(0);
|
||||
let e: Entid = row.get_unwrap(1);
|
||||
let value_type_tag: i32 = row.get_unwrap(3);
|
||||
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag)
|
||||
.map(|x| x)
|
||||
.unwrap();
|
||||
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag).unwrap();
|
||||
(a, e, self.intern(v))
|
||||
}
|
||||
}
|
||||
|
@ -377,7 +374,7 @@ impl RemoveFromCache for MultiValAttributeCache {
|
|||
|
||||
impl CardinalityManyCache for MultiValAttributeCache {
|
||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||
self.e_vs.entry(e).or_insert(vec![]).push(v)
|
||||
self.e_vs.entry(e).or_insert_with(Vec::new).push(v)
|
||||
}
|
||||
|
||||
fn set(&mut self, e: Entid, vs: Vec<TypedValue>) {
|
||||
|
@ -439,7 +436,7 @@ impl UniqueReverseAttributeCache {
|
|||
}
|
||||
|
||||
fn get_e(&self, v: &TypedValue) -> Option<Entid> {
|
||||
self.v_e.get(v).and_then(|o| o.clone())
|
||||
self.v_e.get(v).and_then(|o| *o)
|
||||
}
|
||||
|
||||
fn lookup(&self, v: &TypedValue) -> Option<Option<Entid>> {
|
||||
|
@ -494,7 +491,7 @@ impl RemoveFromCache for NonUniqueReverseAttributeCache {
|
|||
|
||||
impl NonUniqueReverseAttributeCache {
|
||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||
self.v_es.entry(v).or_insert(BTreeSet::new()).insert(e);
|
||||
self.v_es.entry(v).or_insert_with(BTreeSet::new).insert(e);
|
||||
}
|
||||
|
||||
fn get_es(&self, v: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
||||
|
@ -643,9 +640,9 @@ enum AccumulationBehavior {
|
|||
}
|
||||
|
||||
impl AccumulationBehavior {
|
||||
fn is_replacing(&self) -> bool {
|
||||
fn is_replacing(self) -> bool {
|
||||
match self {
|
||||
&AccumulationBehavior::Add { replacing } => replacing,
|
||||
AccumulationBehavior::Add { replacing } => replacing,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -1006,7 +1003,7 @@ impl AttributeCaches {
|
|||
}
|
||||
}
|
||||
|
||||
// We need this block for fallback.
|
||||
// We need this block for fall-back.
|
||||
impl AttributeCaches {
|
||||
fn get_entid_for_value_if_present(
|
||||
&self,
|
||||
|
@ -1075,8 +1072,10 @@ impl AttributeCaches {
|
|||
replacing: bool,
|
||||
) -> Result<()> {
|
||||
let mut aev_factory = AevFactory::new();
|
||||
let rows = statement.query_map(&args, |row| Ok(aev_factory.row_to_aev(row)))?;
|
||||
let aevs = AevRows { rows: rows };
|
||||
let rows = statement.query_map(params_from_iter(&args), |row| {
|
||||
Ok(aev_factory.row_to_aev(row))
|
||||
})?;
|
||||
let aevs = AevRows { rows };
|
||||
self.accumulate_into_cache(
|
||||
None,
|
||||
schema,
|
||||
|
@ -1132,7 +1131,7 @@ impl AttributeCaches {
|
|||
schema: &'s Schema,
|
||||
sqlite: &'c rusqlite::Connection,
|
||||
attrs: AttributeSpec,
|
||||
entities: &Vec<Entid>,
|
||||
entities: &[Entid],
|
||||
) -> Result<()> {
|
||||
// Mark the attributes as cached as we go. We do this because we're going in through the
|
||||
// back door here, and the usual caching API won't have taken care of this for us.
|
||||
|
@ -1229,17 +1228,17 @@ impl AttributeCaches {
|
|||
schema: &'s Schema,
|
||||
sqlite: &'c rusqlite::Connection,
|
||||
mut attrs: AttributeSpec,
|
||||
entities: &Vec<Entid>,
|
||||
entities: &[Entid],
|
||||
) -> Result<()> {
|
||||
// TODO: Exclude any entities for which every attribute is known.
|
||||
// TODO: initialize from an existing (complete) AttributeCache.
|
||||
|
||||
// Exclude any attributes for which every entity's value is already known.
|
||||
match &mut attrs {
|
||||
&mut AttributeSpec::All => {
|
||||
AttributeSpec::All => {
|
||||
// If we're caching all attributes, there's nothing we can exclude.
|
||||
}
|
||||
&mut AttributeSpec::Specified {
|
||||
AttributeSpec::Specified {
|
||||
ref mut non_fts,
|
||||
ref mut fts,
|
||||
} => {
|
||||
|
@ -1285,7 +1284,7 @@ impl AttributeCaches {
|
|||
schema: &'s Schema,
|
||||
sqlite: &'c rusqlite::Connection,
|
||||
attrs: AttributeSpec,
|
||||
entities: &Vec<Entid>,
|
||||
entities: &[Entid],
|
||||
) -> Result<AttributeCaches> {
|
||||
let mut cache = AttributeCaches::default();
|
||||
cache.populate_cache_for_entities_and_attributes(schema, sqlite, attrs, entities)?;
|
||||
|
@ -1450,7 +1449,7 @@ pub struct SQLiteAttributeCache {
|
|||
}
|
||||
|
||||
impl SQLiteAttributeCache {
|
||||
fn make_mut<'s>(&'s mut self) -> &'s mut AttributeCaches {
|
||||
fn make_mut(&mut self) -> &mut AttributeCaches {
|
||||
Arc::make_mut(&mut self.inner)
|
||||
}
|
||||
|
||||
|
@ -1628,7 +1627,7 @@ impl InProgressSQLiteAttributeCache {
|
|||
let overlay = inner.make_override();
|
||||
InProgressSQLiteAttributeCache {
|
||||
inner: inner.inner,
|
||||
overlay: overlay,
|
||||
overlay,
|
||||
unregistered_forward: Default::default(),
|
||||
unregistered_reverse: Default::default(),
|
||||
}
|
||||
|
@ -1818,9 +1817,7 @@ impl CachedAttributes for InProgressSQLiteAttributeCache {
|
|||
.inner
|
||||
.forward_cached_attributes
|
||||
.iter()
|
||||
.filter(|a| !self.unregistered_forward.contains(a))
|
||||
.next()
|
||||
.is_some()
|
||||
.any(|a| !self.unregistered_forward.contains(a))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@ -1828,9 +1825,7 @@ impl CachedAttributes for InProgressSQLiteAttributeCache {
|
|||
self.inner
|
||||
.reverse_cached_attributes
|
||||
.iter()
|
||||
.filter(|a| !self.unregistered_reverse.contains(a))
|
||||
.next()
|
||||
.is_some()
|
||||
.any(|a| !self.unregistered_reverse.contains(a))
|
||||
}
|
||||
|
||||
fn get_entids_for_value(
|
||||
|
@ -1944,7 +1939,7 @@ impl<'a> InProgressCacheTransactWatcher<'a> {
|
|||
let mut w = InProgressCacheTransactWatcher {
|
||||
collected_assertions: Default::default(),
|
||||
collected_retractions: Default::default(),
|
||||
cache: cache,
|
||||
cache,
|
||||
active: true,
|
||||
};
|
||||
|
||||
|
@ -1977,10 +1972,10 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
|||
}
|
||||
Entry::Occupied(mut entry) => {
|
||||
match entry.get_mut() {
|
||||
&mut Either::Left(_) => {
|
||||
Either::Left(_) => {
|
||||
// Nothing to do.
|
||||
}
|
||||
&mut Either::Right(ref mut vec) => {
|
||||
Either::Right(ref mut vec) => {
|
||||
vec.push((e, v.clone()));
|
||||
}
|
||||
}
|
||||
|
@ -1989,14 +1984,12 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
|||
}
|
||||
|
||||
fn done(&mut self, _t: &Entid, schema: &Schema) -> Result<()> {
|
||||
// Oh, I wish we had impl trait. Without it we have a six-line type signature if we
|
||||
// Oh, how I wish we had `impl trait`. Without it we have a six-line type signature if we
|
||||
// try to break this out as a helper function.
|
||||
let collected_retractions =
|
||||
mem::replace(&mut self.collected_retractions, Default::default());
|
||||
let collected_assertions = mem::replace(&mut self.collected_assertions, Default::default());
|
||||
let collected_retractions = std::mem::take(&mut self.collected_retractions);
|
||||
let collected_assertions = std::mem::take(&mut self.collected_assertions);
|
||||
let mut intermediate_expansion = once(collected_retractions)
|
||||
.chain(once(collected_assertions))
|
||||
.into_iter()
|
||||
.map(move |tree| {
|
||||
tree.into_iter()
|
||||
.filter_map(move |(a, evs)| {
|
||||
|
@ -2018,7 +2011,7 @@ impl<'a> TransactWatcher for InProgressCacheTransactWatcher<'a> {
|
|||
}
|
||||
|
||||
impl InProgressSQLiteAttributeCache {
|
||||
pub fn transact_watcher<'a>(&'a mut self) -> InProgressCacheTransactWatcher<'a> {
|
||||
pub fn transact_watcher(&mut self) -> InProgressCacheTransactWatcher {
|
||||
InProgressCacheTransactWatcher::new(self)
|
||||
}
|
||||
}
|
||||
|
|
139
db/src/db.rs
139
db/src/db.rs
|
@ -22,15 +22,16 @@ use itertools;
|
|||
use itertools::Itertools;
|
||||
use rusqlite;
|
||||
use rusqlite::limits::Limit;
|
||||
use rusqlite::params_from_iter;
|
||||
use rusqlite::types::{ToSql, ToSqlOutput};
|
||||
use rusqlite::TransactionBehavior;
|
||||
|
||||
use bootstrap;
|
||||
use {repeat_values, to_namespaced_keyword};
|
||||
use crate::bootstrap;
|
||||
use crate::{repeat_values, to_namespaced_keyword};
|
||||
|
||||
use edn::{DateTime, Utc, Uuid, Value};
|
||||
|
||||
use entids;
|
||||
use crate::entids;
|
||||
|
||||
use core_traits::{attribute, Attribute, AttributeBitFlags, Entid, TypedValue, ValueType};
|
||||
|
||||
|
@ -38,13 +39,13 @@ use mentat_core::{AttributeMap, FromMicros, IdentMap, Schema, ToMicros, ValueRc}
|
|||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
use metadata;
|
||||
use schema::SchemaBuilding;
|
||||
use tx::transact;
|
||||
use types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
||||
use crate::metadata;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx::transact;
|
||||
use crate::types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
||||
|
||||
use crate::watcher::NullWatcher;
|
||||
use std::convert::TryInto;
|
||||
use watcher::NullWatcher;
|
||||
|
||||
// In PRAGMA foo='bar', `'bar'` must be a constant string (it cannot be a
|
||||
// bound parameter), so we need to escape manually. According to
|
||||
|
@ -66,10 +67,9 @@ fn make_connection(
|
|||
let page_size = 32768;
|
||||
|
||||
let initial_pragmas = if let Some(encryption_key) = maybe_encryption_key {
|
||||
assert!(
|
||||
cfg!(feature = "sqlcipher"),
|
||||
"This function shouldn't be called with a key unless we have sqlcipher support"
|
||||
);
|
||||
if !cfg!(feature = "sqlcipher") {
|
||||
panic!("This function shouldn't be called with a key unless we have sqlcipher support");
|
||||
}
|
||||
// Important: The `cipher_page_size` cannot be changed without breaking
|
||||
// the ability to open databases that were written when using a
|
||||
// different `cipher_page_size`. Additionally, it (AFAICT) must be a
|
||||
|
@ -147,10 +147,10 @@ pub const CURRENT_VERSION: i32 = 1;
|
|||
|
||||
/// MIN_SQLITE_VERSION should be changed when there's a new minimum version of sqlite required
|
||||
/// for the project to work.
|
||||
const MIN_SQLITE_VERSION: i32 = 3008000;
|
||||
const MIN_SQLITE_VERSION: i32 = 3_008_000;
|
||||
|
||||
const TRUE: &'static bool = &true;
|
||||
const FALSE: &'static bool = &false;
|
||||
const TRUE: &bool = &true;
|
||||
const FALSE: &bool = &false;
|
||||
|
||||
/// Turn an owned bool into a static reference to a bool.
|
||||
///
|
||||
|
@ -315,7 +315,7 @@ fn create_current_partition_view(conn: &rusqlite::Connection) -> Result<()> {
|
|||
max(e) + 1 AS idx
|
||||
FROM timelined_transactions WHERE timeline = {} GROUP BY part",
|
||||
case.join(" "),
|
||||
::TIMELINE_MAIN
|
||||
crate::TIMELINE_MAIN
|
||||
);
|
||||
|
||||
conn.execute(&view_stmt, rusqlite::params![])?;
|
||||
|
@ -360,9 +360,10 @@ pub fn create_current_version(conn: &mut rusqlite::Connection) -> Result<DB> {
|
|||
// TODO: validate metadata mutations that aren't schema related, like additional partitions.
|
||||
if let Some(next_schema) = next_schema {
|
||||
if next_schema != db.schema {
|
||||
bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
bail!(DbErrorKind::NotYetImplemented(
|
||||
"Initial bootstrap transaction did not produce expected bootstrap schema"
|
||||
)));
|
||||
.to_string()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -396,7 +397,7 @@ pub trait TypedSQLValue {
|
|||
value: rusqlite::types::Value,
|
||||
value_type_tag: i32,
|
||||
) -> Result<TypedValue>;
|
||||
fn to_sql_value_pair<'a>(&'a self) -> (ToSqlOutput<'a>, i32);
|
||||
fn to_sql_value_pair(&self) -> (ToSqlOutput, i32);
|
||||
fn from_edn_value(value: &Value) -> Option<TypedValue>;
|
||||
fn to_edn_value_pair(&self) -> (Value, ValueType);
|
||||
}
|
||||
|
@ -433,6 +434,7 @@ impl TypedSQLValue for TypedValue {
|
|||
Ok(TypedValue::Uuid(u))
|
||||
}
|
||||
(13, rusqlite::types::Value::Text(x)) => to_namespaced_keyword(&x).map(|k| k.into()),
|
||||
(15, rusqlite::types::Value::Blob(x)) => Ok(TypedValue::Bytes(x.into())),
|
||||
(_, value) => bail!(DbErrorKind::BadSQLValuePair(value, value_type_tag)),
|
||||
}
|
||||
}
|
||||
|
@ -446,43 +448,46 @@ impl TypedSQLValue for TypedValue {
|
|||
/// This function is deterministic.
|
||||
fn from_edn_value(value: &Value) -> Option<TypedValue> {
|
||||
match value {
|
||||
&Value::Boolean(x) => Some(TypedValue::Boolean(x)),
|
||||
&Value::Instant(x) => Some(TypedValue::Instant(x)),
|
||||
&Value::Integer(x) => Some(TypedValue::Long(x)),
|
||||
&Value::Uuid(x) => Some(TypedValue::Uuid(x)),
|
||||
&Value::Float(ref x) => Some(TypedValue::Double(x.clone())),
|
||||
&Value::Text(ref x) => Some(x.clone().into()),
|
||||
&Value::Keyword(ref x) => Some(x.clone().into()),
|
||||
Value::Boolean(x) => Some(TypedValue::Boolean(*x)),
|
||||
Value::Instant(x) => Some(TypedValue::Instant(*x)),
|
||||
Value::Integer(x) => Some(TypedValue::Long(*x)),
|
||||
Value::Uuid(x) => Some(TypedValue::Uuid(*x)),
|
||||
Value::Float(ref x) => Some(TypedValue::Double(*x)),
|
||||
Value::Text(ref x) => Some(x.clone().into()),
|
||||
Value::Keyword(ref x) => Some(x.clone().into()),
|
||||
Value::Bytes(b) => Some(TypedValue::Bytes(b.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the corresponding SQLite `value` and `value_type_tag` pair.
|
||||
fn to_sql_value_pair<'a>(&'a self) -> (ToSqlOutput<'a>, i32) {
|
||||
fn to_sql_value_pair(&self) -> (ToSqlOutput, i32) {
|
||||
match self {
|
||||
&TypedValue::Ref(x) => (x.into(), 0),
|
||||
&TypedValue::Boolean(x) => ((if x { 1 } else { 0 }).into(), 1),
|
||||
&TypedValue::Instant(x) => (x.to_micros().into(), 4),
|
||||
TypedValue::Ref(x) => ((*x).into(), 0),
|
||||
TypedValue::Boolean(x) => ((if *x { 1 } else { 0 }).into(), 1),
|
||||
TypedValue::Instant(x) => (x.to_micros().into(), 4),
|
||||
// SQLite distinguishes integral from decimal types, allowing long and double to share a tag.
|
||||
&TypedValue::Long(x) => (x.into(), 5),
|
||||
&TypedValue::Double(x) => (x.into_inner().into(), 5),
|
||||
&TypedValue::String(ref x) => (x.as_str().into(), 10),
|
||||
&TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
||||
&TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
||||
TypedValue::Long(x) => ((*x).into(), 5),
|
||||
TypedValue::Double(x) => (x.into_inner().into(), 5),
|
||||
TypedValue::String(ref x) => (x.as_str().into(), 10),
|
||||
TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
||||
TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
||||
TypedValue::Bytes(b) => (b.to_vec().into(), 15),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the corresponding EDN `value` and `value_type` pair.
|
||||
fn to_edn_value_pair(&self) -> (Value, ValueType) {
|
||||
match self {
|
||||
&TypedValue::Ref(x) => (Value::Integer(x), ValueType::Ref),
|
||||
&TypedValue::Boolean(x) => (Value::Boolean(x), ValueType::Boolean),
|
||||
&TypedValue::Instant(x) => (Value::Instant(x), ValueType::Instant),
|
||||
&TypedValue::Long(x) => (Value::Integer(x), ValueType::Long),
|
||||
&TypedValue::Double(x) => (Value::Float(x), ValueType::Double),
|
||||
&TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
||||
&TypedValue::Uuid(ref u) => (Value::Uuid(u.clone()), ValueType::Uuid),
|
||||
&TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
||||
TypedValue::Ref(x) => (Value::Integer(*x), ValueType::Ref),
|
||||
TypedValue::Boolean(x) => (Value::Boolean(*x), ValueType::Boolean),
|
||||
TypedValue::Instant(x) => (Value::Instant(*x), ValueType::Instant),
|
||||
TypedValue::Long(x) => (Value::Integer(*x), ValueType::Long),
|
||||
TypedValue::Double(x) => (Value::Float(*x), ValueType::Double),
|
||||
TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
||||
TypedValue::Uuid(ref u) => (Value::Uuid(*u), ValueType::Uuid),
|
||||
TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
||||
TypedValue::Bytes(b) => (Value::Bytes(b.clone()), ValueType::Bytes),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -510,7 +515,7 @@ pub fn read_partition_map(conn: &rusqlite::Connection) -> Result<PartitionMap> {
|
|||
// First part of the union sprinkles 'allow_excision' into the 'parts' view.
|
||||
// Second part of the union takes care of partitions which are known
|
||||
// but don't have any transactions.
|
||||
let mut stmt: rusqlite::Statement = conn.prepare(
|
||||
conn.prepare(
|
||||
"
|
||||
SELECT
|
||||
known_parts.part,
|
||||
|
@ -536,16 +541,14 @@ pub fn read_partition_map(conn: &rusqlite::Connection) -> Result<PartitionMap> {
|
|||
known_parts
|
||||
WHERE
|
||||
part NOT IN (SELECT part FROM parts)",
|
||||
)?;
|
||||
let m = stmt
|
||||
)?
|
||||
.query_and_then(rusqlite::params![], |row| -> Result<(String, Partition)> {
|
||||
Ok((
|
||||
row.get(0)?,
|
||||
Partition::new(row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?),
|
||||
))
|
||||
})?
|
||||
.collect();
|
||||
m
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Read the ident map materialized view from the given SQL store.
|
||||
|
@ -767,7 +770,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
//
|
||||
// TODO: `collect` into a HashSet so that any (a, v) is resolved at most once.
|
||||
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
||||
let chunks: itertools::IntoChunks<_> = avs.into_iter().enumerate().chunks(max_vars / 4);
|
||||
let chunks: itertools::IntoChunks<_> = avs.iter().enumerate().chunks(max_vars / 4);
|
||||
|
||||
// We'd like to `flat_map` here, but it's not obvious how to `flat_map` across `Result`.
|
||||
// Alternatively, this is a `fold`, and it might be wise to express it as such.
|
||||
|
@ -806,7 +809,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
values);
|
||||
let mut stmt: rusqlite::Statement = self.prepare(s.as_str())?;
|
||||
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(¶ms, |row| -> Result<(i64, Entid)> {
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(params_from_iter(¶ms), |row| -> Result<(i64, Entid)> {
|
||||
Ok((row.get(0)?, row.get(1)?))
|
||||
})?.collect();
|
||||
m
|
||||
|
@ -900,9 +903,8 @@ impl MentatStoring for rusqlite::Connection {
|
|||
let bindings_per_statement = 6;
|
||||
|
||||
let max_vars = self.limit(Limit::SQLITE_LIMIT_VARIABLE_NUMBER) as usize;
|
||||
let chunks: itertools::IntoChunks<_> = entities
|
||||
.into_iter()
|
||||
.chunks(max_vars / bindings_per_statement);
|
||||
let chunks: itertools::IntoChunks<_> =
|
||||
entities.iter().chunks(max_vars / bindings_per_statement);
|
||||
|
||||
// We'd like to flat_map here, but it's not obvious how to flat_map across Result.
|
||||
let results: Result<Vec<()>> = chunks.into_iter().map(|chunk| -> Result<()> {
|
||||
|
@ -911,6 +913,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
// We must keep these computed values somewhere to reference them later, so we can't
|
||||
// combine this map and the subsequent flat_map.
|
||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||
#[allow(clippy::type_complexity)]
|
||||
let block: Result<Vec<(i64 /* e */,
|
||||
i64 /* a */,
|
||||
ToSqlOutput<'a> /* value */,
|
||||
|
@ -950,7 +953,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(¶ms)
|
||||
stmt.execute(params_from_iter(¶ms))
|
||||
.context(DbErrorKind::NonFtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
|
@ -973,9 +976,8 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
let mut outer_searchid = 2000;
|
||||
|
||||
let chunks: itertools::IntoChunks<_> = entities
|
||||
.into_iter()
|
||||
.chunks(max_vars / bindings_per_statement);
|
||||
let chunks: itertools::IntoChunks<_> =
|
||||
entities.iter().chunks(max_vars / bindings_per_statement);
|
||||
|
||||
// From string to (searchid, value_type_tag).
|
||||
let mut seen: HashMap<ValueRc<String>, (i64, i32)> = HashMap::with_capacity(entities.len());
|
||||
|
@ -988,6 +990,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
// We must keep these computed values somewhere to reference them later, so we can't
|
||||
// combine this map and the subsequent flat_map.
|
||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||
#[allow(clippy::type_complexity)]
|
||||
let block: Result<Vec<(i64 /* e */,
|
||||
i64 /* a */,
|
||||
Option<ToSqlOutput<'a>> /* value */,
|
||||
|
@ -996,7 +999,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
u8 /* flags0 */,
|
||||
i64 /* searchid */)>> = chunk.map(|&(e, a, ref attribute, ref typed_value, added)| {
|
||||
match typed_value {
|
||||
&TypedValue::String(ref rc) => {
|
||||
TypedValue::String(ref rc) => {
|
||||
datom_count += 1;
|
||||
let entry = seen.entry(rc.clone());
|
||||
match entry {
|
||||
|
@ -1044,7 +1047,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(fts_s.as_str())?;
|
||||
stmt.execute(&fts_params).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
stmt.execute(params_from_iter(&fts_params)).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
|
||||
// Second, insert searches.
|
||||
// `params` reference computed values in `block`.
|
||||
|
@ -1072,7 +1075,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(¶ms).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
stmt.execute(params_from_iter(¶ms)).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
}).collect::<Result<Vec<()>>>();
|
||||
|
@ -1178,7 +1181,7 @@ pub fn update_metadata(
|
|||
new_schema: &Schema,
|
||||
metadata_report: &metadata::MetadataReport,
|
||||
) -> Result<()> {
|
||||
use metadata::AttributeAlteration::*;
|
||||
use crate::metadata::AttributeAlteration::*;
|
||||
|
||||
// Populate the materialized view directly from datoms (and, potentially in the future,
|
||||
// transactions). This might generalize nicely as we expand the set of materialized views.
|
||||
|
@ -1186,7 +1189,10 @@ pub fn update_metadata(
|
|||
// TODO: use concat! to avoid creating String instances.
|
||||
if !metadata_report.idents_altered.is_empty() {
|
||||
// Idents is the materialized view of the [entid :db/ident ident] slice of datoms.
|
||||
conn.execute(format!("DELETE FROM idents").as_str(), rusqlite::params![])?;
|
||||
conn.execute(
|
||||
"DELETE FROM idents".to_string().as_str(),
|
||||
rusqlite::params![],
|
||||
)?;
|
||||
conn.execute(
|
||||
format!(
|
||||
"INSERT INTO idents SELECT e, a, v, value_type_tag FROM datoms WHERE a IN {}",
|
||||
|
@ -1208,7 +1214,10 @@ pub fn update_metadata(
|
|||
|| !metadata_report.attributes_altered.is_empty()
|
||||
|| !metadata_report.idents_altered.is_empty()
|
||||
{
|
||||
conn.execute(format!("DELETE FROM schema").as_str(), rusqlite::params![])?;
|
||||
conn.execute(
|
||||
"DELETE FROM schema".to_string().as_str(),
|
||||
rusqlite::params![],
|
||||
)?;
|
||||
// NB: we're using :db/valueType as a placeholder for the entire schema-defining set.
|
||||
let s = format!(
|
||||
r#"
|
||||
|
@ -1329,12 +1338,12 @@ mod tests {
|
|||
use std::borrow::Borrow;
|
||||
|
||||
use super::*;
|
||||
use crate::debug::{tempids, TestConn};
|
||||
use crate::internal_types::Term;
|
||||
use core_traits::{attribute, KnownEntid};
|
||||
use db_traits::errors;
|
||||
use debug::{tempids, TestConn};
|
||||
use edn::entities::OpType;
|
||||
use edn::{self, InternSet};
|
||||
use internal_types::Term;
|
||||
use mentat_core::util::Either::*;
|
||||
use mentat_core::{HasSchema, Keyword};
|
||||
use std::collections::BTreeMap;
|
||||
|
|
|
@ -66,23 +66,23 @@ use rusqlite::types::ToSql;
|
|||
use rusqlite::TransactionBehavior;
|
||||
use tabwriter::TabWriter;
|
||||
|
||||
use bootstrap;
|
||||
use db::*;
|
||||
use db::{read_attribute_map, read_ident_map};
|
||||
use crate::bootstrap;
|
||||
use crate::db::*;
|
||||
use crate::db::{read_attribute_map, read_ident_map};
|
||||
use crate::entids;
|
||||
use db_traits::errors::Result;
|
||||
use edn;
|
||||
use entids;
|
||||
|
||||
use core_traits::{Entid, TypedValue, ValueType};
|
||||
|
||||
use crate::internal_types::TermWithTempIds;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx::{transact, transact_terms};
|
||||
use crate::types::*;
|
||||
use crate::watcher::NullWatcher;
|
||||
use edn::entities::{EntidOrIdent, TempId};
|
||||
use edn::InternSet;
|
||||
use internal_types::TermWithTempIds;
|
||||
use mentat_core::{HasSchema, SQLValueType, TxReport};
|
||||
use schema::SchemaBuilding;
|
||||
use tx::{transact, transact_terms};
|
||||
use types::*;
|
||||
use watcher::NullWatcher;
|
||||
|
||||
/// Represents a *datom* (assertion) in the store.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -117,7 +117,7 @@ impl Datom {
|
|||
pub fn to_edn(&self) -> edn::Value {
|
||||
let f = |entid: &EntidOrIdent| -> edn::Value {
|
||||
match *entid {
|
||||
EntidOrIdent::Entid(ref y) => edn::Value::Integer(y.clone()),
|
||||
EntidOrIdent::Entid(ref y) => edn::Value::Integer(*y),
|
||||
EntidOrIdent::Ident(ref y) => edn::Value::Keyword(y.clone()),
|
||||
}
|
||||
};
|
||||
|
@ -134,13 +134,13 @@ impl Datom {
|
|||
|
||||
impl Datoms {
|
||||
pub fn to_edn(&self) -> edn::Value {
|
||||
edn::Value::Vector((&self.0).into_iter().map(|x| x.to_edn()).collect())
|
||||
edn::Value::Vector((&self.0).iter().map(|x| x.to_edn()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl Transactions {
|
||||
pub fn to_edn(&self) -> edn::Value {
|
||||
edn::Value::Vector((&self.0).into_iter().map(|x| x.to_edn()).collect())
|
||||
edn::Value::Vector((&self.0).iter().map(|x| x.to_edn()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,7 +148,7 @@ impl FulltextValues {
|
|||
pub fn to_edn(&self) -> edn::Value {
|
||||
edn::Value::Vector(
|
||||
(&self.0)
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|&(x, ref y)| {
|
||||
edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])
|
||||
})
|
||||
|
@ -238,7 +238,7 @@ pub fn datoms_after<S: Borrow<Schema>>(
|
|||
e: EntidOrIdent::Entid(e),
|
||||
a: to_entid(borrowed_schema, a),
|
||||
v: value,
|
||||
tx: tx,
|
||||
tx,
|
||||
added: None,
|
||||
}))
|
||||
})?
|
||||
|
@ -286,7 +286,7 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
|||
e: EntidOrIdent::Entid(e),
|
||||
a: to_entid(borrowed_schema, a),
|
||||
v: value,
|
||||
tx: tx,
|
||||
tx,
|
||||
added: Some(added),
|
||||
})
|
||||
})?
|
||||
|
@ -306,10 +306,9 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
|||
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
|
||||
let mut stmt: rusqlite::Statement =
|
||||
conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
|
||||
let params: &[i32; 0] = &[];
|
||||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then(params, |row| {
|
||||
.query_and_then([], |row| {
|
||||
let rowid: i64 = row.get(0)?;
|
||||
let text: String = row.get(1)?;
|
||||
Ok((rowid, text))
|
||||
|
@ -332,20 +331,20 @@ pub fn dump_sql_query(
|
|||
let mut stmt: rusqlite::Statement = conn.prepare(sql)?;
|
||||
|
||||
let mut tw = TabWriter::new(Vec::new()).padding(2);
|
||||
write!(&mut tw, "{}\n", sql).unwrap();
|
||||
writeln!(&mut tw, "{}", sql).unwrap();
|
||||
|
||||
for column_name in stmt.column_names() {
|
||||
write!(&mut tw, "{}\t", column_name).unwrap();
|
||||
}
|
||||
write!(&mut tw, "\n").unwrap();
|
||||
writeln!(&mut tw).unwrap();
|
||||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then(params, |row| {
|
||||
for i in 0..row.column_count() {
|
||||
for i in 0..row.as_ref().column_count() {
|
||||
let value: rusqlite::types::Value = row.get(i)?;
|
||||
write!(&mut tw, "{:?}\t", value).unwrap();
|
||||
}
|
||||
write!(&mut tw, "\n").unwrap();
|
||||
writeln!(&mut tw).unwrap();
|
||||
Ok(())
|
||||
})?
|
||||
.collect();
|
||||
|
@ -381,8 +380,9 @@ impl TestConn {
|
|||
I: Borrow<str>,
|
||||
{
|
||||
// Failure to parse the transaction is a coding error, so we unwrap.
|
||||
let entities = edn::parse::entities(transaction.borrow())
|
||||
.expect(format!("to be able to parse {} into entities", transaction.borrow()).as_str());
|
||||
let entities = edn::parse::entities(transaction.borrow()).unwrap_or_else(|_| {
|
||||
panic!("to be able to parse {} into entities", transaction.borrow())
|
||||
});
|
||||
|
||||
let details = {
|
||||
// The block scopes the borrow of self.sqlite.
|
||||
|
|
|
@ -63,7 +63,8 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
|||
if attribute >= DB_DOC {
|
||||
return false;
|
||||
}
|
||||
match attribute {
|
||||
matches!(
|
||||
attribute,
|
||||
// Idents.
|
||||
DB_IDENT |
|
||||
// Schema.
|
||||
|
@ -72,19 +73,22 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
|||
DB_INDEX |
|
||||
DB_IS_COMPONENT |
|
||||
DB_UNIQUE |
|
||||
DB_VALUE_TYPE =>
|
||||
true,
|
||||
_ => false,
|
||||
}
|
||||
DB_VALUE_TYPE
|
||||
)
|
||||
}
|
||||
|
||||
/// Return 'false' if the given attribute might be used to describe a schema attribute.
|
||||
pub fn is_a_schema_attribute(attribute: Entid) -> bool {
|
||||
match attribute {
|
||||
DB_IDENT | DB_CARDINALITY | DB_FULLTEXT | DB_INDEX | DB_IS_COMPONENT | DB_UNIQUE
|
||||
| DB_VALUE_TYPE => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(
|
||||
attribute,
|
||||
DB_IDENT
|
||||
| DB_CARDINALITY
|
||||
| DB_FULLTEXT
|
||||
| DB_INDEX
|
||||
| DB_IS_COMPONENT
|
||||
| DB_UNIQUE
|
||||
| DB_VALUE_TYPE
|
||||
)
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
|
|
|
@ -23,10 +23,10 @@ use edn::entities;
|
|||
use edn::entities::{EntityPlace, OpType, TempId, TxFunction};
|
||||
use edn::{SpannedValue, ValueAndSpan, ValueRc};
|
||||
|
||||
use crate::schema::SchemaTypeChecking;
|
||||
use crate::types::{AVMap, AVPair, Schema, TransactableValue};
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use schema::SchemaTypeChecking;
|
||||
use types::{AVMap, AVPair, Schema, TransactableValue};
|
||||
|
||||
impl TransactableValue for ValueAndSpan {
|
||||
fn into_typed_value(self, schema: &Schema, value_type: ValueType) -> Result<TypedValue> {
|
||||
|
@ -75,18 +75,14 @@ impl TransactableValue for ValueAndSpan {
|
|||
}
|
||||
}
|
||||
Nil | Boolean(_) | Instant(_) | BigInteger(_) | Float(_) | Uuid(_) | PlainSymbol(_)
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) => {
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) | Bytes(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_tempid(&self) -> Option<TempId> {
|
||||
self.inner
|
||||
.as_text()
|
||||
.cloned()
|
||||
.map(TempId::External)
|
||||
.map(|v| v.into())
|
||||
self.inner.as_text().cloned().map(TempId::External)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,7 +105,8 @@ impl TransactableValue for TypedValue {
|
|||
| TypedValue::Long(_)
|
||||
| TypedValue::Double(_)
|
||||
| TypedValue::Instant(_)
|
||||
| TypedValue::Uuid(_) => {
|
||||
| TypedValue::Uuid(_)
|
||||
| TypedValue::Bytes(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +114,7 @@ impl TransactableValue for TypedValue {
|
|||
|
||||
fn as_tempid(&self) -> Option<TempId> {
|
||||
match self {
|
||||
&TypedValue::String(ref s) => Some(TempId::External((**s).clone()).into()),
|
||||
TypedValue::String(ref s) => Some(TempId::External((**s).clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,30 +60,30 @@ mod upsert_resolution;
|
|||
mod watcher;
|
||||
|
||||
// Export these for reference from sync code and tests.
|
||||
pub use bootstrap::{TX0, USER0, V1_PARTS};
|
||||
pub use crate::bootstrap::{TX0, USER0, V1_PARTS};
|
||||
|
||||
pub static TIMELINE_MAIN: i64 = 0;
|
||||
|
||||
pub use schema::{AttributeBuilder, AttributeValidation};
|
||||
pub use crate::schema::{AttributeBuilder, AttributeValidation};
|
||||
|
||||
pub use bootstrap::CORE_SCHEMA_VERSION;
|
||||
pub use crate::bootstrap::CORE_SCHEMA_VERSION;
|
||||
|
||||
use edn::symbols;
|
||||
|
||||
pub use entids::DB_SCHEMA_CORE;
|
||||
pub use crate::entids::DB_SCHEMA_CORE;
|
||||
|
||||
pub use db::{new_connection, TypedSQLValue};
|
||||
pub use crate::db::{new_connection, TypedSQLValue};
|
||||
|
||||
#[cfg(feature = "sqlcipher")]
|
||||
pub use db::{change_encryption_key, new_connection_with_key};
|
||||
|
||||
pub use watcher::TransactWatcher;
|
||||
pub use crate::watcher::TransactWatcher;
|
||||
|
||||
pub use tx::{transact, transact_terms};
|
||||
pub use crate::tx::{transact, transact_terms};
|
||||
|
||||
pub use tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
||||
pub use crate::tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
||||
|
||||
pub use types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
||||
pub use crate::types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
||||
|
||||
pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
||||
let splits = [':', '/'];
|
||||
|
@ -95,7 +95,7 @@ pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
|||
_ => None,
|
||||
};
|
||||
|
||||
nsk.ok_or(DbErrorKind::NotYetImplemented(format!("InvalidKeyword: {}", s)).into())
|
||||
nsk.ok_or_else(|| DbErrorKind::NotYetImplemented(format!("InvalidKeyword: {}", s)).into())
|
||||
}
|
||||
|
||||
/// Prepare an SQL `VALUES` block, like (?, ?, ?), (?, ?, ?).
|
||||
|
|
|
@ -29,18 +29,18 @@ use failure::ResultExt;
|
|||
use std::collections::btree_map::Entry;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use add_retract_alter_set::AddRetractAlterSet;
|
||||
use crate::add_retract_alter_set::AddRetractAlterSet;
|
||||
use crate::entids;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::symbols;
|
||||
use entids;
|
||||
|
||||
use core_traits::{attribute, Entid, TypedValue, ValueType};
|
||||
|
||||
use mentat_core::{AttributeMap, Schema};
|
||||
|
||||
use schema::{AttributeBuilder, AttributeValidation};
|
||||
use crate::schema::{AttributeBuilder, AttributeValidation};
|
||||
|
||||
use types::EAV;
|
||||
use crate::types::EAV;
|
||||
|
||||
/// An alteration to an attribute.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -111,7 +111,7 @@ fn update_attribute_map_from_schema_retractions(
|
|||
let mut eas = BTreeMap::new();
|
||||
for (e, a, v) in retractions.into_iter() {
|
||||
if entids::is_a_schema_attribute(a) {
|
||||
eas.entry(e).or_insert(vec![]).push(a);
|
||||
eas.entry(e).or_insert_with(Vec::new).push(a);
|
||||
suspect_retractions.push((e, a, v));
|
||||
} else {
|
||||
filtered_retractions.push((e, a, v));
|
||||
|
@ -145,7 +145,7 @@ fn update_attribute_map_from_schema_retractions(
|
|||
// Remove attributes corresponding to retracted attribute.
|
||||
attribute_map.remove(&e);
|
||||
} else {
|
||||
bail!(DbErrorKind::BadSchemaAssertion(format!("Retracting defining attributes of a schema without retracting its :db/ident is not permitted.")));
|
||||
bail!(DbErrorKind::BadSchemaAssertion("Retracting defining attributes of a schema without retracting its :db/ident is not permitted.".to_string()));
|
||||
}
|
||||
} else {
|
||||
filtered_retractions.push((e, a, v));
|
||||
|
@ -172,7 +172,7 @@ pub fn update_attribute_map_from_entid_triples(
|
|||
) -> AttributeBuilder {
|
||||
existing
|
||||
.get(&attribute_id)
|
||||
.map(AttributeBuilder::to_modify_attribute)
|
||||
.map(AttributeBuilder::modify_attribute)
|
||||
.unwrap_or_else(AttributeBuilder::default)
|
||||
}
|
||||
|
||||
|
@ -248,6 +248,7 @@ pub fn update_attribute_map_from_entid_triples(
|
|||
TypedValue::Ref(entids::DB_TYPE_REF) => { builder.value_type(ValueType::Ref); },
|
||||
TypedValue::Ref(entids::DB_TYPE_STRING) => { builder.value_type(ValueType::String); },
|
||||
TypedValue::Ref(entids::DB_TYPE_UUID) => { builder.value_type(ValueType::Uuid); },
|
||||
TypedValue::Ref(entids::DB_TYPE_BYTES) => { builder.value_type(ValueType::Bytes); },
|
||||
_ => bail!(DbErrorKind::BadSchemaAssertion(format!("Expected [... :db/valueType :db.type/*] but got [... :db/valueType {:?}] for entid {} and attribute {}", value, entid, attr)))
|
||||
}
|
||||
},
|
||||
|
@ -337,8 +338,8 @@ pub fn update_attribute_map_from_entid_triples(
|
|||
}
|
||||
|
||||
Ok(MetadataReport {
|
||||
attributes_installed: attributes_installed,
|
||||
attributes_altered: attributes_altered,
|
||||
attributes_installed,
|
||||
attributes_altered,
|
||||
idents_altered: BTreeMap::default(),
|
||||
})
|
||||
}
|
||||
|
@ -439,12 +440,12 @@ where
|
|||
// component_attributes up-to-date: most of the time we'll rebuild it
|
||||
// even though it's not necessary (e.g. a schema attribute that's _not_
|
||||
// a component was removed, or a non-component related attribute changed).
|
||||
if report.attributes_did_change() || ident_set.retracted.len() > 0 {
|
||||
if report.attributes_did_change() || !ident_set.retracted.is_empty() {
|
||||
schema.update_component_attributes();
|
||||
}
|
||||
|
||||
Ok(MetadataReport {
|
||||
idents_altered: idents_altered,
|
||||
idents_altered,
|
||||
..report
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,16 +10,16 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use db::TypedSQLValue;
|
||||
use crate::db::TypedSQLValue;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn;
|
||||
use edn::symbols;
|
||||
|
||||
use core_traits::{attribute, Attribute, Entid, KnownEntid, TypedValue, ValueType};
|
||||
|
||||
use crate::metadata;
|
||||
use crate::metadata::AttributeAlteration;
|
||||
use mentat_core::{AttributeMap, EntidMap, HasSchema, IdentMap, Schema};
|
||||
use metadata;
|
||||
use metadata::AttributeAlteration;
|
||||
|
||||
pub trait AttributeValidation {
|
||||
fn validate<F>(&self, ident: F) -> Result<()>
|
||||
|
@ -77,7 +77,7 @@ fn validate_attribute_map(entid_map: &EntidMap, attribute_map: &AttributeMap) ->
|
|||
entid_map
|
||||
.get(entid)
|
||||
.map(|ident| ident.to_string())
|
||||
.unwrap_or(entid.to_string())
|
||||
.unwrap_or_else(|| entid.to_string())
|
||||
};
|
||||
attribute.validate(ident)?;
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ impl AttributeBuilder {
|
|||
|
||||
/// Make a new AttributeBuilder from an existing Attribute. This is important to allow
|
||||
/// retraction. Only attributes that we allow to change are duplicated here.
|
||||
pub fn to_modify_attribute(attribute: &Attribute) -> Self {
|
||||
pub fn modify_attribute(attribute: &Attribute) -> Self {
|
||||
let mut ab = AttributeBuilder::default();
|
||||
ab.multival = Some(attribute.multival);
|
||||
ab.unique = Some(attribute.unique);
|
||||
|
@ -116,22 +116,22 @@ impl AttributeBuilder {
|
|||
ab
|
||||
}
|
||||
|
||||
pub fn value_type<'a>(&'a mut self, value_type: ValueType) -> &'a mut Self {
|
||||
pub fn value_type(&mut self, value_type: ValueType) -> &mut Self {
|
||||
self.value_type = Some(value_type);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn multival<'a>(&'a mut self, multival: bool) -> &'a mut Self {
|
||||
pub fn multival(&mut self, multival: bool) -> &mut Self {
|
||||
self.multival = Some(multival);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn non_unique<'a>(&'a mut self) -> &'a mut Self {
|
||||
pub fn non_unique(&mut self) -> &mut Self {
|
||||
self.unique = Some(None);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn unique<'a>(&'a mut self, unique: attribute::Unique) -> &'a mut Self {
|
||||
pub fn unique(&mut self, unique: attribute::Unique) -> &mut Self {
|
||||
if self.helpful && unique == attribute::Unique::Identity {
|
||||
self.index = Some(true);
|
||||
}
|
||||
|
@ -139,12 +139,12 @@ impl AttributeBuilder {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn index<'a>(&'a mut self, index: bool) -> &'a mut Self {
|
||||
pub fn index(&mut self, index: bool) -> &mut Self {
|
||||
self.index = Some(index);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn fulltext<'a>(&'a mut self, fulltext: bool) -> &'a mut Self {
|
||||
pub fn fulltext(&mut self, fulltext: bool) -> &mut Self {
|
||||
self.fulltext = Some(fulltext);
|
||||
if self.helpful && fulltext {
|
||||
self.index = Some(true);
|
||||
|
@ -152,12 +152,12 @@ impl AttributeBuilder {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn component<'a>(&'a mut self, component: bool) -> &'a mut Self {
|
||||
pub fn component(&mut self, component: bool) -> &mut Self {
|
||||
self.component = Some(component);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn no_history<'a>(&'a mut self, no_history: bool) -> &'a mut Self {
|
||||
pub fn no_history(&mut self, no_history: bool) -> &mut Self {
|
||||
self.no_history = Some(no_history);
|
||||
self
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ impl AttributeBuilder {
|
|||
attribute.multival = multival;
|
||||
}
|
||||
if let Some(ref unique) = self.unique {
|
||||
attribute.unique = unique.clone();
|
||||
attribute.unique = *unique;
|
||||
}
|
||||
if let Some(index) = self.index {
|
||||
attribute.index = index;
|
||||
|
@ -223,15 +223,13 @@ impl AttributeBuilder {
|
|||
|
||||
if let Some(ref unique) = self.unique {
|
||||
if *unique != attribute.unique {
|
||||
attribute.unique = unique.clone();
|
||||
attribute.unique = *unique;
|
||||
mutations.push(AttributeAlteration::Unique);
|
||||
}
|
||||
} else {
|
||||
if attribute.unique != None {
|
||||
} else if attribute.unique != None {
|
||||
attribute.unique = None;
|
||||
mutations.push(AttributeAlteration::Unique);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(index) = self.index {
|
||||
if index != attribute.index {
|
||||
|
@ -272,17 +270,17 @@ pub trait SchemaBuilding {
|
|||
impl SchemaBuilding for Schema {
|
||||
fn require_ident(&self, entid: Entid) -> Result<&symbols::Keyword> {
|
||||
self.get_ident(entid)
|
||||
.ok_or(DbErrorKind::UnrecognizedEntid(entid).into())
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedEntid(entid).into())
|
||||
}
|
||||
|
||||
fn require_entid(&self, ident: &symbols::Keyword) -> Result<KnownEntid> {
|
||||
self.get_entid(&ident)
|
||||
.ok_or(DbErrorKind::UnrecognizedIdent(ident.to_string()).into())
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(ident.to_string()).into())
|
||||
}
|
||||
|
||||
fn require_attribute_for_entid(&self, entid: Entid) -> Result<&Attribute> {
|
||||
self.attribute_for_entid(entid)
|
||||
.ok_or(DbErrorKind::UnrecognizedEntid(entid).into())
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedEntid(entid).into())
|
||||
}
|
||||
|
||||
/// Create a valid `Schema` from the constituent maps.
|
||||
|
@ -290,10 +288,7 @@ impl SchemaBuilding for Schema {
|
|||
ident_map: IdentMap,
|
||||
attribute_map: AttributeMap,
|
||||
) -> Result<Schema> {
|
||||
let entid_map: EntidMap = ident_map
|
||||
.iter()
|
||||
.map(|(k, v)| (v.clone(), k.clone()))
|
||||
.collect();
|
||||
let entid_map: EntidMap = ident_map.iter().map(|(k, v)| (*v, k.clone())).collect();
|
||||
|
||||
validate_attribute_map(&entid_map, &attribute_map)?;
|
||||
Ok(Schema::new(ident_map, entid_map, attribute_map))
|
||||
|
@ -309,10 +304,10 @@ impl SchemaBuilding for Schema {
|
|||
.map(|(symbolic_ident, symbolic_attr, value)| {
|
||||
let ident: i64 = *ident_map
|
||||
.get(&symbolic_ident)
|
||||
.ok_or(DbErrorKind::UnrecognizedIdent(symbolic_ident.to_string()))?;
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(symbolic_ident.to_string()))?;
|
||||
let attr: i64 = *ident_map
|
||||
.get(&symbolic_attr)
|
||||
.ok_or(DbErrorKind::UnrecognizedIdent(symbolic_attr.to_string()))?;
|
||||
.ok_or_else(|| DbErrorKind::UnrecognizedIdent(symbolic_attr.to_string()))?;
|
||||
Ok((ident, attr, value))
|
||||
})
|
||||
.collect();
|
||||
|
@ -367,6 +362,7 @@ impl SchemaTypeChecking for Schema {
|
|||
(ValueType::Uuid, tv @ TypedValue::Uuid(_)) => Ok(tv),
|
||||
(ValueType::Instant, tv @ TypedValue::Instant(_)) => Ok(tv),
|
||||
(ValueType::Keyword, tv @ TypedValue::Keyword(_)) => Ok(tv),
|
||||
(ValueType::Bytes, tv @ TypedValue::Bytes(_)) => Ok(tv),
|
||||
// Ref coerces a little: we interpret some things depending on the schema as a Ref.
|
||||
(ValueType::Ref, TypedValue::Long(x)) => Ok(TypedValue::Ref(x)),
|
||||
(ValueType::Ref, TypedValue::Keyword(ref x)) => {
|
||||
|
@ -384,6 +380,7 @@ impl SchemaTypeChecking for Schema {
|
|||
| (vt @ ValueType::Uuid, _)
|
||||
| (vt @ ValueType::Instant, _)
|
||||
| (vt @ ValueType::Keyword, _)
|
||||
| (vt @ ValueType::Bytes, _)
|
||||
| (vt @ ValueType::Ref, _) => {
|
||||
bail!(DbErrorKind::BadValuePair(format!("{}", value), vt))
|
||||
}
|
||||
|
@ -399,7 +396,7 @@ mod test {
|
|||
|
||||
fn add_attribute(schema: &mut Schema, ident: Keyword, entid: Entid, attribute: Attribute) {
|
||||
schema.entid_map.insert(entid, ident.clone());
|
||||
schema.ident_map.insert(ident.clone(), entid);
|
||||
schema.ident_map.insert(ident, entid);
|
||||
|
||||
if attribute.component {
|
||||
schema.component_attributes.push(entid);
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::ops::RangeFrom;
|
||||
|
||||
use rusqlite;
|
||||
use rusqlite::{self, params_from_iter};
|
||||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
|
@ -22,16 +22,16 @@ use edn::InternSet;
|
|||
|
||||
use edn::entities::OpType;
|
||||
|
||||
use db;
|
||||
use db::TypedSQLValue;
|
||||
use crate::db;
|
||||
use crate::db::TypedSQLValue;
|
||||
|
||||
use tx::{transact_terms_with_action, TransactorAction};
|
||||
use crate::tx::{transact_terms_with_action, TransactorAction};
|
||||
|
||||
use types::PartitionMap;
|
||||
use crate::types::PartitionMap;
|
||||
|
||||
use internal_types::{Term, TermWithoutTempIds};
|
||||
use crate::internal_types::{Term, TermWithoutTempIds};
|
||||
|
||||
use watcher::NullWatcher;
|
||||
use crate::watcher::NullWatcher;
|
||||
|
||||
/// Collects a supplied tx range into an DESC ordered Vec of valid txs,
|
||||
/// ensuring they all belong to the same timeline.
|
||||
|
@ -58,7 +58,7 @@ fn collect_ordered_txs_to_move(
|
|||
None => bail!(DbErrorKind::TimelinesInvalidRange),
|
||||
};
|
||||
|
||||
while let Some(t) = rows.next() {
|
||||
for t in rows {
|
||||
let t = t?;
|
||||
txs.push(t.0);
|
||||
if t.1 != timeline {
|
||||
|
@ -79,12 +79,9 @@ fn move_transactions_to(
|
|||
&format!(
|
||||
"UPDATE timelined_transactions SET timeline = {} WHERE tx IN {}",
|
||||
new_timeline,
|
||||
::repeat_values(tx_ids.len(), 1)
|
||||
crate::repeat_values(tx_ids.len(), 1)
|
||||
),
|
||||
&(tx_ids
|
||||
.iter()
|
||||
.map(|x| x as &dyn rusqlite::types::ToSql)
|
||||
.collect::<Vec<_>>()),
|
||||
params_from_iter(tx_ids.iter()),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -108,12 +105,13 @@ fn reversed_terms_for(
|
|||
tx_id: Entid,
|
||||
) -> Result<Vec<TermWithoutTempIds>> {
|
||||
let mut stmt = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM timelined_transactions WHERE tx = ? AND timeline = ? ORDER BY tx DESC")?;
|
||||
let mut rows = stmt.query_and_then(
|
||||
&[&tx_id, &::TIMELINE_MAIN],
|
||||
let rows = stmt.query_and_then(
|
||||
&[&tx_id, &crate::TIMELINE_MAIN],
|
||||
|row| -> Result<TermWithoutTempIds> {
|
||||
let op = match row.get(5)? {
|
||||
true => OpType::Retract,
|
||||
false => OpType::Add,
|
||||
let op = if row.get(5)? {
|
||||
OpType::Retract
|
||||
} else {
|
||||
OpType::Add
|
||||
};
|
||||
Ok(Term::AddOrRetract(
|
||||
op,
|
||||
|
@ -126,7 +124,7 @@ fn reversed_terms_for(
|
|||
|
||||
let mut terms = vec![];
|
||||
|
||||
while let Some(row) = rows.next() {
|
||||
for row in rows {
|
||||
terms.push(row?);
|
||||
}
|
||||
Ok(terms)
|
||||
|
@ -140,10 +138,10 @@ pub fn move_from_main_timeline(
|
|||
txs_from: RangeFrom<Entid>,
|
||||
new_timeline: Entid,
|
||||
) -> Result<(Option<Schema>, PartitionMap)> {
|
||||
if new_timeline == ::TIMELINE_MAIN {
|
||||
bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
"Can't move transactions to main timeline"
|
||||
)));
|
||||
if new_timeline == crate::TIMELINE_MAIN {
|
||||
bail!(DbErrorKind::NotYetImplemented(
|
||||
"Can't move transactions to main timeline".to_string()
|
||||
));
|
||||
}
|
||||
|
||||
// We don't currently ensure that moving transactions onto a non-empty timeline
|
||||
|
@ -153,7 +151,7 @@ pub fn move_from_main_timeline(
|
|||
bail!(DbErrorKind::TimelinesMoveToNonEmpty);
|
||||
}
|
||||
|
||||
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, ::TIMELINE_MAIN)?;
|
||||
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, crate::TIMELINE_MAIN)?;
|
||||
|
||||
let mut last_schema = None;
|
||||
for tx_id in &txs_to_move {
|
||||
|
@ -198,16 +196,16 @@ mod tests {
|
|||
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use debug::TestConn;
|
||||
use crate::debug::TestConn;
|
||||
|
||||
use bootstrap;
|
||||
use crate::bootstrap;
|
||||
|
||||
// For convenience during testing.
|
||||
// Real consumers will perform similar operations when appropriate.
|
||||
fn update_conn(conn: &mut TestConn, schema: &Option<Schema>, pmap: &PartitionMap) {
|
||||
match schema {
|
||||
&Some(ref s) => conn.schema = s.clone(),
|
||||
&None => (),
|
||||
Some(ref s) => conn.schema = s.clone(),
|
||||
None => (),
|
||||
};
|
||||
conn.partition_map = pmap.clone();
|
||||
}
|
||||
|
@ -240,7 +238,7 @@ mod tests {
|
|||
assert_matches!(conn.transactions(), "[]");
|
||||
assert_eq!(new_partition_map, partition_map0);
|
||||
|
||||
conn.partition_map = partition_map0.clone();
|
||||
conn.partition_map = partition_map0;
|
||||
let report2 = assert_transact!(conn, t);
|
||||
let partition_map2 = conn.partition_map.clone();
|
||||
|
||||
|
|
80
db/src/tx.rs
80
db/src/tx.rs
|
@ -49,17 +49,17 @@ use std::borrow::Cow;
|
|||
use std::collections::{BTreeMap, BTreeSet, VecDeque};
|
||||
use std::iter::once;
|
||||
|
||||
use db;
|
||||
use db::MentatStoring;
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::{InternSet, Keyword};
|
||||
use entids;
|
||||
use internal_types::{
|
||||
use crate::db;
|
||||
use crate::db::MentatStoring;
|
||||
use crate::entids;
|
||||
use crate::internal_types::{
|
||||
replace_lookup_ref, AEVTrie, AddAndRetract, KnownEntidOr, LookupRef, LookupRefOrTempId,
|
||||
TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithTempIdsAndLookupRefs,
|
||||
TermWithoutTempIds, TypedValueOr,
|
||||
};
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::{InternSet, Keyword};
|
||||
|
||||
use mentat_core::util::Either;
|
||||
|
||||
|
@ -67,15 +67,15 @@ use core_traits::{attribute, now, Attribute, Entid, KnownEntid, TypedValue, Valu
|
|||
|
||||
use mentat_core::{DateTime, Schema, TxReport, Utc};
|
||||
|
||||
use crate::metadata;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx_checking;
|
||||
use crate::types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
||||
use crate::upsert_resolution::{FinalPopulations, Generation};
|
||||
use crate::watcher::TransactWatcher;
|
||||
use edn::entities as entmod;
|
||||
use edn::entities::{AttributePlace, Entity, OpType, TempId};
|
||||
use metadata;
|
||||
use rusqlite;
|
||||
use schema::SchemaBuilding;
|
||||
use tx_checking;
|
||||
use types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
||||
use upsert_resolution::{FinalPopulations, Generation};
|
||||
use watcher::TransactWatcher;
|
||||
|
||||
/// Defines transactor's high level behaviour.
|
||||
pub(crate) enum TransactorAction {
|
||||
|
@ -163,12 +163,12 @@ where
|
|||
tx_id: Entid,
|
||||
) -> Tx<'conn, 'a, W> {
|
||||
Tx {
|
||||
store: store,
|
||||
partition_map: partition_map,
|
||||
store,
|
||||
partition_map,
|
||||
schema_for_mutation: Cow::Borrowed(schema_for_mutation),
|
||||
schema: schema,
|
||||
watcher: watcher,
|
||||
tx_id: tx_id,
|
||||
schema,
|
||||
watcher,
|
||||
tx_id,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -185,8 +185,8 @@ where
|
|||
|
||||
// Map [a v]->entid.
|
||||
let mut av_pairs: Vec<&AVPair> = vec![];
|
||||
for i in 0..temp_id_avs.len() {
|
||||
av_pairs.push(&temp_id_avs[i].1);
|
||||
for temp_id_av in temp_id_avs {
|
||||
av_pairs.push(&temp_id_av.1);
|
||||
}
|
||||
|
||||
// Lookup in the store.
|
||||
|
@ -208,14 +208,14 @@ where
|
|||
av_map.get(&av_pair)
|
||||
);
|
||||
if let Some(entid) = av_map.get(&av_pair).cloned().map(KnownEntid) {
|
||||
tempids.insert(tempid.clone(), entid).map(|previous| {
|
||||
if let Some(previous) = tempids.insert(tempid.clone(), entid) {
|
||||
if entid != previous {
|
||||
conflicting_upserts
|
||||
.entry((**tempid).clone())
|
||||
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
||||
.insert(entid);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -340,7 +340,7 @@ where
|
|||
entmod::EntityPlace::TxFunction(ref tx_function) => {
|
||||
match tx_function.op.0.as_str() {
|
||||
"transaction-tx" => Ok(Either::Left(self.tx_id)),
|
||||
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
unknown => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
"Unknown transaction function {}",
|
||||
unknown
|
||||
))),
|
||||
|
@ -372,7 +372,7 @@ where
|
|||
) -> Result<KnownEntidOr<LookupRefOrTempId>> {
|
||||
match backward_a.unreversed() {
|
||||
None => {
|
||||
bail!(DbErrorKind::NotYetImplemented(format!("Cannot explode map notation value in :attr/_reversed notation for forward attribute")));
|
||||
bail!(DbErrorKind::NotYetImplemented("Cannot explode map notation value in :attr/_reversed notation for forward attribute".to_string()));
|
||||
}
|
||||
Some(forward_a) => {
|
||||
let forward_a = self.entity_a_into_term_a(forward_a)?;
|
||||
|
@ -412,7 +412,7 @@ where
|
|||
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
||||
match tx_function.op.0.as_str() {
|
||||
"transaction-tx" => Ok(Either::Left(KnownEntid(self.tx_id.0))),
|
||||
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!("Unknown transaction function {}", unknown))),
|
||||
unknown=> bail!(DbErrorKind::NotYetImplemented(format!("Unknown transaction function {}", unknown))),
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -456,7 +456,7 @@ where
|
|||
op: OpType::Add,
|
||||
e: db_id.clone(),
|
||||
a: AttributePlace::Entid(a),
|
||||
v: v,
|
||||
v,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -519,7 +519,7 @@ where
|
|||
entmod::ValuePlace::TxFunction(ref tx_function) => {
|
||||
let typed_value = match tx_function.op.0.as_str() {
|
||||
"transaction-tx" => TypedValue::Ref(self.tx_id),
|
||||
unknown @ _ => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
unknown => bail!(DbErrorKind::NotYetImplemented(format!(
|
||||
"Unknown transaction function {}",
|
||||
unknown
|
||||
))),
|
||||
|
@ -546,7 +546,7 @@ where
|
|||
|
||||
for vv in vs {
|
||||
deque.push_front(Entity::AddOrRetract {
|
||||
op: op.clone(),
|
||||
op,
|
||||
e: e.clone(),
|
||||
a: AttributePlace::Entid(entmod::EntidOrIdent::Entid(a)),
|
||||
v: vv,
|
||||
|
@ -667,8 +667,8 @@ where
|
|||
|term: TermWithTempIdsAndLookupRefs| -> Result<TermWithTempIds> {
|
||||
match term {
|
||||
Term::AddOrRetract(op, e, a, v) => {
|
||||
let e = replace_lookup_ref(&lookup_ref_map, e, |x| KnownEntid(x))?;
|
||||
let v = replace_lookup_ref(&lookup_ref_map, v, |x| TypedValue::Ref(x))?;
|
||||
let e = replace_lookup_ref(&lookup_ref_map, e, KnownEntid)?;
|
||||
let v = replace_lookup_ref(&lookup_ref_map, v, TypedValue::Ref)?;
|
||||
Ok(Term::AddOrRetract(op, e, a, v))
|
||||
}
|
||||
}
|
||||
|
@ -757,14 +757,14 @@ where
|
|||
for (tempid, entid) in temp_id_map {
|
||||
// Since `UpsertEV` instances always transition to `UpsertE` instances, it might be
|
||||
// that a tempid resolves in two generations, and those resolutions might conflict.
|
||||
tempids.insert((*tempid).clone(), entid).map(|previous| {
|
||||
if let Some(previous) = tempids.insert((*tempid).clone(), entid) {
|
||||
if entid != previous {
|
||||
conflicting_upserts
|
||||
.entry((*tempid).clone())
|
||||
.or_insert_with(|| once(previous).collect::<BTreeSet<_>>())
|
||||
.insert(entid);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if !conflicting_upserts.is_empty() {
|
||||
|
@ -891,10 +891,7 @@ where
|
|||
.map(|v| (true, v))
|
||||
.chain(ars.retract.into_iter().map(|v| (false, v)))
|
||||
{
|
||||
let op = match added {
|
||||
true => OpType::Add,
|
||||
false => OpType::Retract,
|
||||
};
|
||||
let op = if added { OpType::Add } else { OpType::Retract };
|
||||
self.watcher.datom(op, e, a, &v);
|
||||
queue.push((e, a, attribute, v, added));
|
||||
}
|
||||
|
@ -967,7 +964,7 @@ where
|
|||
Ok(TxReport {
|
||||
tx_id: self.tx_id,
|
||||
tx_instant,
|
||||
tempids: tempids,
|
||||
tempids,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1061,6 +1058,7 @@ where
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn transact_terms_with_action<'conn, 'a, I, W>(
|
||||
conn: &'conn rusqlite::Connection,
|
||||
partition_map: PartitionMap,
|
||||
|
@ -1093,9 +1091,9 @@ where
|
|||
|
||||
let a_and_r = trie
|
||||
.entry((a, attribute))
|
||||
.or_insert(BTreeMap::default())
|
||||
.or_insert_with(BTreeMap::default)
|
||||
.entry(e)
|
||||
.or_insert(AddAndRetract::default());
|
||||
.or_insert_with(AddAndRetract::default);
|
||||
|
||||
match op {
|
||||
OpType::Add => a_and_r.add.insert(v),
|
||||
|
@ -1136,9 +1134,9 @@ fn get_or_insert_tx_instant<'schema>(
|
|||
entids::DB_TX_INSTANT,
|
||||
schema.require_attribute_for_entid(entids::DB_TX_INSTANT)?,
|
||||
))
|
||||
.or_insert(BTreeMap::default())
|
||||
.or_insert_with(BTreeMap::default)
|
||||
.entry(tx_id)
|
||||
.or_insert(AddAndRetract::default());
|
||||
.or_insert_with(AddAndRetract::default);
|
||||
if !ars.retract.is_empty() {
|
||||
// Cannot retract :db/txInstant!
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use core_traits::{Entid, TypedValue, ValueType};
|
|||
|
||||
use db_traits::errors::CardinalityConflict;
|
||||
|
||||
use internal_types::AEVTrie;
|
||||
use crate::internal_types::AEVTrie;
|
||||
|
||||
/// Map from found [e a v] to expected type.
|
||||
pub(crate) type TypeDisagreements = BTreeMap<(Entid, Entid, TypedValue), ValueType>;
|
||||
|
|
|
@ -24,11 +24,12 @@ use edn::entities::OpType;
|
|||
|
||||
use db_traits::errors::Result;
|
||||
|
||||
use types::AttributeSet;
|
||||
use crate::types::AttributeSet;
|
||||
|
||||
use watcher::TransactWatcher;
|
||||
use crate::watcher::TransactWatcher;
|
||||
|
||||
pub struct TxObserver {
|
||||
#[allow(clippy::type_complexity)]
|
||||
notify_fn: Arc<Box<dyn Fn(&str, IndexMap<&Entid, &AttributeSet>) + Send + Sync>>,
|
||||
attributes: AttributeSet,
|
||||
}
|
||||
|
@ -82,17 +83,18 @@ impl TxCommand {
|
|||
|
||||
impl Command for TxCommand {
|
||||
fn execute(&mut self) {
|
||||
self.observers.upgrade().map(|observers| {
|
||||
if let Some(observers) = self.observers.upgrade() {
|
||||
for (key, observer) in observers.iter() {
|
||||
let applicable_reports = observer.applicable_reports(&self.reports);
|
||||
if !applicable_reports.is_empty() {
|
||||
observer.notify(&key, applicable_reports);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TxObservationService {
|
||||
observers: Arc<IndexMap<String, Arc<TxObserver>>>,
|
||||
executor: Option<Sender<Box<dyn Command + Send>>>,
|
||||
|
@ -107,7 +109,7 @@ impl TxObservationService {
|
|||
}
|
||||
|
||||
// For testing purposes
|
||||
pub fn is_registered(&self, key: &String) -> bool {
|
||||
pub fn is_registered(&self, key: &str) -> bool {
|
||||
self.observers.contains_key(key)
|
||||
}
|
||||
|
||||
|
@ -115,7 +117,7 @@ impl TxObservationService {
|
|||
Arc::make_mut(&mut self.observers).insert(key, observer);
|
||||
}
|
||||
|
||||
pub fn deregister(&mut self, key: &String) {
|
||||
pub fn deregister(&mut self, key: &str) {
|
||||
Arc::make_mut(&mut self.observers).remove(key);
|
||||
}
|
||||
|
||||
|
@ -130,6 +132,7 @@ impl TxObservationService {
|
|||
}
|
||||
|
||||
let executor = self.executor.get_or_insert_with(|| {
|
||||
#[allow(clippy::type_complexity)]
|
||||
let (tx, rx): (
|
||||
Sender<Box<dyn Command + Send>>,
|
||||
Receiver<Box<dyn Command + Send>>,
|
||||
|
@ -154,6 +157,7 @@ impl Drop for TxObservationService {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct InProgressObserverTransactWatcher {
|
||||
collected_attributes: AttributeSet,
|
||||
pub txes: IndexMap<Entid, AttributeSet>,
|
||||
|
@ -174,8 +178,7 @@ impl TransactWatcher for InProgressObserverTransactWatcher {
|
|||
}
|
||||
|
||||
fn done(&mut self, t: &Entid, _schema: &Schema) -> Result<()> {
|
||||
let collected_attributes =
|
||||
::std::mem::replace(&mut self.collected_attributes, Default::default());
|
||||
let collected_attributes = ::std::mem::take(&mut self.collected_attributes);
|
||||
self.txes.insert(*t, collected_attributes);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -127,8 +127,8 @@ pub struct DB {
|
|||
impl DB {
|
||||
pub fn new(partition_map: PartitionMap, schema: Schema) -> DB {
|
||||
DB {
|
||||
partition_map: partition_map,
|
||||
schema: schema,
|
||||
partition_map,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,19 +18,19 @@ use std::collections::{BTreeMap, BTreeSet};
|
|||
use indexmap;
|
||||
use petgraph::unionfind;
|
||||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use internal_types::{
|
||||
use crate::internal_types::{
|
||||
Population, TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithoutTempIds, TypedValueOr,
|
||||
};
|
||||
use types::AVPair;
|
||||
use crate::types::AVPair;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
use mentat_core::util::Either::*;
|
||||
|
||||
use core_traits::{attribute, Attribute, Entid, TypedValue};
|
||||
|
||||
use crate::schema::SchemaBuilding;
|
||||
use edn::entities::OpType;
|
||||
use mentat_core::Schema;
|
||||
use schema::SchemaBuilding;
|
||||
|
||||
/// A "Simple upsert" that looks like [:db/add TEMPID a v], where a is :db.unique/identity.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -227,7 +227,7 @@ impl Generation {
|
|||
}
|
||||
|
||||
// Collect id->[a v] pairs that might upsert at this evolutionary step.
|
||||
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
|
||||
pub(crate) fn temp_id_avs(&self) -> Vec<(TempIdHandle, AVPair)> {
|
||||
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
|
||||
// TODO: map/collect.
|
||||
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
|
||||
|
@ -269,32 +269,32 @@ impl Generation {
|
|||
|
||||
for term in self.allocations.iter() {
|
||||
match term {
|
||||
&Term::AddOrRetract(OpType::Add, Right(ref t1), a, Right(ref t2)) => {
|
||||
Term::AddOrRetract(OpType::Add, Right(ref t1), a, Right(ref t2)) => {
|
||||
temp_ids.insert(t1.clone());
|
||||
temp_ids.insert(t2.clone());
|
||||
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
|
||||
let attribute: &Attribute = schema.require_attribute_for_entid(*a)?;
|
||||
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||
tempid_avs
|
||||
.entry((a, Right(t2.clone())))
|
||||
.or_insert(vec![])
|
||||
.entry((*a, Right(t2.clone())))
|
||||
.or_insert_with(Vec::new)
|
||||
.push(t1.clone());
|
||||
}
|
||||
}
|
||||
&Term::AddOrRetract(OpType::Add, Right(ref t), a, ref x @ Left(_)) => {
|
||||
Term::AddOrRetract(OpType::Add, Right(ref t), a, ref x @ Left(_)) => {
|
||||
temp_ids.insert(t.clone());
|
||||
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
|
||||
let attribute: &Attribute = schema.require_attribute_for_entid(*a)?;
|
||||
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||
tempid_avs
|
||||
.entry((a, x.clone()))
|
||||
.or_insert(vec![])
|
||||
.entry((*a, x.clone()))
|
||||
.or_insert_with(Vec::new)
|
||||
.push(t.clone());
|
||||
}
|
||||
}
|
||||
&Term::AddOrRetract(OpType::Add, Left(_), _, Right(ref t)) => {
|
||||
Term::AddOrRetract(OpType::Add, Left(_), _, Right(ref t)) => {
|
||||
temp_ids.insert(t.clone());
|
||||
}
|
||||
&Term::AddOrRetract(OpType::Add, Left(_), _, Left(_)) => unreachable!(),
|
||||
&Term::AddOrRetract(OpType::Retract, _, _, _) => {
|
||||
Term::AddOrRetract(OpType::Add, Left(_), _, Left(_)) => unreachable!(),
|
||||
Term::AddOrRetract(OpType::Retract, _, _, _) => {
|
||||
// [:db/retract ...] entities never allocate entids; they have to resolve due to
|
||||
// other upserts (or they fail the transaction).
|
||||
}
|
||||
|
@ -319,13 +319,11 @@ impl Generation {
|
|||
);
|
||||
|
||||
for vs in tempid_avs.values() {
|
||||
vs.first()
|
||||
.and_then(|first| temp_ids.get(first))
|
||||
.map(|&first_index| {
|
||||
if let Some(&first_index) = vs.first().and_then(|first| temp_ids.get(first)) {
|
||||
for tempid in vs {
|
||||
temp_ids.get(tempid).map(|&i| uf.union(first_index, i));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
debug!("union-find aggregation {:?}", uf.clone().into_labeling());
|
||||
|
|
|
@ -67,6 +67,11 @@ fn test_from_sql_value_pair() {
|
|||
.unwrap(),
|
||||
TypedValue::typed_ns_keyword("db", "keyword")
|
||||
);
|
||||
assert_eq!(
|
||||
TypedValue::from_sql_value_pair(rusqlite::types::Value::Blob(vec![1, 2, 3, 42]), 15)
|
||||
.unwrap(),
|
||||
TypedValue::Bytes((vec![1, 2, 3, 42]).into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -11,7 +11,7 @@ source "https://rubygems.org"
|
|||
# gem "jekyll", "~> 3.7.3"
|
||||
|
||||
# This is the default theme for new Jekyll sites. You may change this to anything you like.
|
||||
gem "minima", "~> 2.0"
|
||||
gem "minima", "~> 2.5.1"
|
||||
|
||||
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
||||
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
||||
|
@ -19,9 +19,9 @@ gem "minima", "~> 2.0"
|
|||
|
||||
# If you have any plugins, put them here!
|
||||
group :jekyll_plugins do
|
||||
gem "jekyll-feed", "~> 0.9.3"
|
||||
gem "github-pages", "~> 186"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.5"
|
||||
gem "jekyll-feed", "~> 0.15.1"
|
||||
gem "github-pages", "~> 215"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.6"
|
||||
end
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
|
|
|
@ -1,148 +1,161 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (4.2.10)
|
||||
i18n (~> 0.7)
|
||||
activesupport (6.0.4)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.5.2)
|
||||
public_suffix (>= 2.0.2, < 4.0)
|
||||
zeitwerk (~> 2.2, >= 2.2.2)
|
||||
addressable (2.8.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.11.1)
|
||||
colorator (1.1.0)
|
||||
commonmarker (0.17.9)
|
||||
commonmarker (0.17.13)
|
||||
ruby-enum (~> 0.5)
|
||||
concurrent-ruby (1.0.5)
|
||||
dnsruby (1.60.2)
|
||||
em-websocket (0.5.1)
|
||||
concurrent-ruby (1.1.9)
|
||||
dnsruby (1.61.7)
|
||||
simpleidn (~> 0.1)
|
||||
em-websocket (0.5.2)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
ethon (0.11.0)
|
||||
ffi (>= 1.3.0)
|
||||
ethon (0.14.0)
|
||||
ffi (>= 1.15.0)
|
||||
eventmachine (1.2.7)
|
||||
execjs (2.7.0)
|
||||
faraday (0.15.2)
|
||||
execjs (2.8.1)
|
||||
faraday (1.4.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.1)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
ffi (1.9.25)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-net_http (1.0.1)
|
||||
faraday-net_http_persistent (1.1.0)
|
||||
ffi (1.15.3)
|
||||
forwardable-extended (2.6.0)
|
||||
gemoji (3.0.0)
|
||||
github-pages (186)
|
||||
activesupport (= 4.2.10)
|
||||
github-pages-health-check (= 1.8.1)
|
||||
jekyll (= 3.7.3)
|
||||
jekyll-avatar (= 0.5.0)
|
||||
gemoji (3.0.1)
|
||||
github-pages (215)
|
||||
github-pages-health-check (= 1.17.2)
|
||||
jekyll (= 3.9.0)
|
||||
jekyll-avatar (= 0.7.0)
|
||||
jekyll-coffeescript (= 1.1.1)
|
||||
jekyll-commonmark-ghpages (= 0.1.5)
|
||||
jekyll-commonmark-ghpages (= 0.1.6)
|
||||
jekyll-default-layout (= 0.1.4)
|
||||
jekyll-feed (= 0.9.3)
|
||||
jekyll-feed (= 0.15.1)
|
||||
jekyll-gist (= 1.5.0)
|
||||
jekyll-github-metadata (= 2.9.4)
|
||||
jekyll-mentions (= 1.3.0)
|
||||
jekyll-optional-front-matter (= 0.3.0)
|
||||
jekyll-github-metadata (= 2.13.0)
|
||||
jekyll-mentions (= 1.6.0)
|
||||
jekyll-optional-front-matter (= 0.3.2)
|
||||
jekyll-paginate (= 1.1.0)
|
||||
jekyll-readme-index (= 0.2.0)
|
||||
jekyll-redirect-from (= 0.13.0)
|
||||
jekyll-relative-links (= 0.5.3)
|
||||
jekyll-remote-theme (= 0.3.1)
|
||||
jekyll-readme-index (= 0.3.0)
|
||||
jekyll-redirect-from (= 0.16.0)
|
||||
jekyll-relative-links (= 0.6.1)
|
||||
jekyll-remote-theme (= 0.4.3)
|
||||
jekyll-sass-converter (= 1.5.2)
|
||||
jekyll-seo-tag (= 2.4.0)
|
||||
jekyll-sitemap (= 1.2.0)
|
||||
jekyll-swiss (= 0.4.0)
|
||||
jekyll-seo-tag (= 2.7.1)
|
||||
jekyll-sitemap (= 1.4.0)
|
||||
jekyll-swiss (= 1.0.0)
|
||||
jekyll-theme-architect (= 0.1.1)
|
||||
jekyll-theme-cayman (= 0.1.1)
|
||||
jekyll-theme-dinky (= 0.1.1)
|
||||
jekyll-theme-hacker (= 0.1.1)
|
||||
jekyll-theme-hacker (= 0.1.2)
|
||||
jekyll-theme-leap-day (= 0.1.1)
|
||||
jekyll-theme-merlot (= 0.1.1)
|
||||
jekyll-theme-midnight (= 0.1.1)
|
||||
jekyll-theme-minimal (= 0.1.1)
|
||||
jekyll-theme-modernist (= 0.1.1)
|
||||
jekyll-theme-primer (= 0.5.3)
|
||||
jekyll-theme-primer (= 0.5.4)
|
||||
jekyll-theme-slate (= 0.1.1)
|
||||
jekyll-theme-tactile (= 0.1.1)
|
||||
jekyll-theme-time-machine (= 0.1.1)
|
||||
jekyll-titles-from-headings (= 0.5.1)
|
||||
jemoji (= 0.9.0)
|
||||
kramdown (= 1.16.2)
|
||||
liquid (= 4.0.0)
|
||||
listen (= 3.1.5)
|
||||
jekyll-titles-from-headings (= 0.5.3)
|
||||
jemoji (= 0.12.0)
|
||||
kramdown (= 2.3.1)
|
||||
kramdown-parser-gfm (= 1.1.0)
|
||||
liquid (= 4.0.3)
|
||||
mercenary (~> 0.3)
|
||||
minima (= 2.4.1)
|
||||
nokogiri (>= 1.8.2, < 2.0)
|
||||
rouge (= 2.2.1)
|
||||
minima (= 2.5.1)
|
||||
nokogiri (>= 1.10.4, < 2.0)
|
||||
rouge (= 3.26.0)
|
||||
terminal-table (~> 1.4)
|
||||
github-pages-health-check (1.8.1)
|
||||
github-pages-health-check (1.17.2)
|
||||
addressable (~> 2.3)
|
||||
dnsruby (~> 1.60)
|
||||
octokit (~> 4.0)
|
||||
public_suffix (~> 2.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
typhoeus (~> 1.3)
|
||||
html-pipeline (2.8.0)
|
||||
html-pipeline (2.14.0)
|
||||
activesupport (>= 2)
|
||||
nokogiri (>= 1.4)
|
||||
http_parser.rb (0.6.0)
|
||||
i18n (0.9.5)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (3.7.3)
|
||||
jekyll (3.9.0)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
i18n (~> 0.7)
|
||||
jekyll-sass-converter (~> 1.0)
|
||||
jekyll-watch (~> 2.0)
|
||||
kramdown (~> 1.14)
|
||||
kramdown (>= 1.17, < 3)
|
||||
liquid (~> 4.0)
|
||||
mercenary (~> 0.3.3)
|
||||
pathutil (~> 0.9)
|
||||
rouge (>= 1.7, < 4)
|
||||
safe_yaml (~> 1.0)
|
||||
jekyll-avatar (0.5.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-avatar (0.7.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-coffeescript (1.1.1)
|
||||
coffee-script (~> 2.2)
|
||||
coffee-script-source (~> 1.11.1)
|
||||
jekyll-commonmark (1.2.0)
|
||||
jekyll-commonmark (1.3.1)
|
||||
commonmarker (~> 0.14)
|
||||
jekyll (>= 3.0, < 4.0)
|
||||
jekyll-commonmark-ghpages (0.1.5)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-commonmark-ghpages (0.1.6)
|
||||
commonmarker (~> 0.17.6)
|
||||
jekyll-commonmark (~> 1)
|
||||
rouge (~> 2)
|
||||
jekyll-commonmark (~> 1.2)
|
||||
rouge (>= 2.0, < 4.0)
|
||||
jekyll-default-layout (0.1.4)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-feed (0.9.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-feed (0.15.1)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-gist (1.5.0)
|
||||
octokit (~> 4.2)
|
||||
jekyll-github-metadata (2.9.4)
|
||||
jekyll (~> 3.1)
|
||||
jekyll-github-metadata (2.13.0)
|
||||
jekyll (>= 3.4, < 5.0)
|
||||
octokit (~> 4.0, != 4.4.0)
|
||||
jekyll-mentions (1.3.0)
|
||||
activesupport (~> 4.0)
|
||||
jekyll-mentions (1.6.0)
|
||||
html-pipeline (~> 2.3)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-optional-front-matter (0.3.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-optional-front-matter (0.3.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-paginate (1.1.0)
|
||||
jekyll-readme-index (0.2.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-redirect-from (0.13.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-relative-links (0.5.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-remote-theme (0.3.1)
|
||||
jekyll (~> 3.5)
|
||||
rubyzip (>= 1.2.1, < 3.0)
|
||||
jekyll-readme-index (0.3.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-redirect-from (0.16.0)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-relative-links (0.6.1)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-remote-theme (0.4.3)
|
||||
addressable (~> 2.0)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
|
||||
rubyzip (>= 1.3.0, < 3.0)
|
||||
jekyll-sass-converter (1.5.2)
|
||||
sass (~> 3.4)
|
||||
jekyll-seo-tag (2.4.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-sitemap (1.2.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-swiss (0.4.0)
|
||||
jekyll-seo-tag (2.7.1)
|
||||
jekyll (>= 3.8, < 5.0)
|
||||
jekyll-sitemap (1.4.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-swiss (1.0.0)
|
||||
jekyll-theme-architect (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
|
@ -152,8 +165,8 @@ GEM
|
|||
jekyll-theme-dinky (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-hacker (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-theme-hacker (0.1.2)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-leap-day (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
|
@ -170,8 +183,8 @@ GEM
|
|||
jekyll-theme-modernist (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-primer (0.5.3)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-theme-primer (0.5.4)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-github-metadata (~> 2.9)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-slate (0.1.1)
|
||||
|
@ -183,71 +196,82 @@ GEM
|
|||
jekyll-theme-time-machine (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-titles-from-headings (0.5.1)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-watch (2.0.0)
|
||||
jekyll-titles-from-headings (0.5.3)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-watch (2.2.1)
|
||||
listen (~> 3.0)
|
||||
jemoji (0.9.0)
|
||||
activesupport (~> 4.0, >= 4.2.9)
|
||||
jemoji (0.12.0)
|
||||
gemoji (~> 3.0)
|
||||
html-pipeline (~> 2.2)
|
||||
jekyll (~> 3.0)
|
||||
kramdown (1.16.2)
|
||||
liquid (4.0.0)
|
||||
listen (3.1.5)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
ruby_dep (~> 1.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
kramdown (2.3.1)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
liquid (4.0.3)
|
||||
listen (3.5.1)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
mercenary (0.3.6)
|
||||
mini_portile2 (2.3.0)
|
||||
minima (2.4.1)
|
||||
jekyll (~> 3.5)
|
||||
mini_portile2 (2.6.1)
|
||||
minima (2.5.1)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
jekyll-feed (~> 0.9)
|
||||
jekyll-seo-tag (~> 2.1)
|
||||
minitest (5.11.3)
|
||||
multipart-post (2.0.0)
|
||||
nokogiri (1.8.3)
|
||||
mini_portile2 (~> 2.3.0)
|
||||
octokit (4.9.0)
|
||||
minitest (5.14.4)
|
||||
multipart-post (2.1.1)
|
||||
nokogiri (1.12.5)
|
||||
mini_portile2 (~> 2.6.1)
|
||||
racc (~> 1.4)
|
||||
octokit (4.21.0)
|
||||
faraday (>= 0.9)
|
||||
sawyer (~> 0.8.0, >= 0.5.3)
|
||||
pathutil (0.16.1)
|
||||
pathutil (0.16.2)
|
||||
forwardable-extended (~> 2.6)
|
||||
public_suffix (2.0.5)
|
||||
rb-fsevent (0.10.3)
|
||||
rb-inotify (0.9.10)
|
||||
ffi (>= 0.5.0, < 2)
|
||||
rouge (2.2.1)
|
||||
ruby-enum (0.7.2)
|
||||
public_suffix (4.0.6)
|
||||
racc (1.5.2)
|
||||
rb-fsevent (0.11.0)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.5)
|
||||
rouge (3.26.0)
|
||||
ruby-enum (0.9.0)
|
||||
i18n
|
||||
ruby_dep (1.5.0)
|
||||
rubyzip (1.2.1)
|
||||
safe_yaml (1.0.4)
|
||||
sass (3.5.6)
|
||||
ruby2_keywords (0.0.4)
|
||||
rubyzip (2.3.0)
|
||||
safe_yaml (1.0.5)
|
||||
sass (3.7.4)
|
||||
sass-listen (~> 4.0.0)
|
||||
sass-listen (4.0.0)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
sawyer (0.8.1)
|
||||
addressable (>= 2.3.5, < 2.6)
|
||||
faraday (~> 0.8, < 1.0)
|
||||
sawyer (0.8.2)
|
||||
addressable (>= 2.3.5)
|
||||
faraday (> 0.8, < 2.0)
|
||||
simpleidn (0.2.1)
|
||||
unf (~> 0.1.4)
|
||||
terminal-table (1.8.0)
|
||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||
thread_safe (0.3.6)
|
||||
typhoeus (1.3.0)
|
||||
typhoeus (1.4.0)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (1.2.5)
|
||||
tzinfo (1.2.9)
|
||||
thread_safe (~> 0.1)
|
||||
unicode-display_width (1.4.0)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.7.7)
|
||||
unicode-display_width (1.7.0)
|
||||
zeitwerk (2.4.2)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
github-pages (~> 186)
|
||||
jekyll-commonmark-ghpages (~> 0.1.5)
|
||||
jekyll-feed (~> 0.9.3)
|
||||
minima (~> 2.0)
|
||||
github-pages (~> 215)
|
||||
jekyll-commonmark-ghpages (~> 0.1.6)
|
||||
jekyll-feed (~> 0.15.1)
|
||||
minima (~> 2.5.1)
|
||||
tzinfo-data
|
||||
|
||||
BUNDLED WITH
|
||||
1.16.2
|
||||
2.2.21
|
||||
|
|
|
@ -10,19 +10,21 @@ description = "EDN parser for Project Mentat"
|
|||
readme = "./README.md"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4"
|
||||
itertools = "0.8"
|
||||
num = "0.2"
|
||||
ordered-float = "1.0"
|
||||
pretty = "0.9"
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "1.0", optional = true }
|
||||
serde_derive = { version = "1.0", optional = true }
|
||||
peg = "0.6"
|
||||
chrono = "~0.4"
|
||||
itertools = "~0.10"
|
||||
num = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
pretty = "~0.12"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
peg = "~0.8"
|
||||
bytes = "1.0.1"
|
||||
hex = "0.4.3"
|
||||
|
||||
[dev-dependencies]
|
||||
serde_test = "1.0"
|
||||
serde_json = "1.0"
|
||||
serde_test = "~1.0"
|
||||
serde_json = "~1.0"
|
||||
|
||||
[features]
|
||||
serde_support = ["serde", "serde_derive"]
|
||||
|
|
|
@ -13,11 +13,11 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
|
||||
use value_rc::ValueRc;
|
||||
use crate::value_rc::ValueRc;
|
||||
|
||||
use symbols::{Keyword, PlainSymbol};
|
||||
use crate::symbols::{Keyword, PlainSymbol};
|
||||
|
||||
use types::ValueAndSpan;
|
||||
use crate::types::ValueAndSpan;
|
||||
|
||||
/// `EntityPlace` and `ValuePlace` embed values, either directly (i.e., `ValuePlace::Atom`) or
|
||||
/// indirectly (i.e., `EntityPlace::LookupRef`). In order to maintain the graph of `Into` and
|
||||
|
@ -49,8 +49,8 @@ impl TempId {
|
|||
impl fmt::Display for TempId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
match self {
|
||||
&TempId::External(ref s) => write!(f, "{}", s),
|
||||
&TempId::Internal(x) => write!(f, "<tempid {}>", x),
|
||||
TempId::External(ref s) => write!(f, "{}", s),
|
||||
TempId::Internal(x) => write!(f, "<tempid {}>", x),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -76,8 +76,8 @@ impl From<Keyword> for EntidOrIdent {
|
|||
impl EntidOrIdent {
|
||||
pub fn unreversed(&self) -> Option<EntidOrIdent> {
|
||||
match self {
|
||||
&EntidOrIdent::Entid(_) => None,
|
||||
&EntidOrIdent::Ident(ref a) => a.unreversed().map(EntidOrIdent::Ident),
|
||||
EntidOrIdent::Entid(_) => None,
|
||||
EntidOrIdent::Ident(ref a) => a.unreversed().map(EntidOrIdent::Ident),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::HashSet;
|
|||
use std::hash::Hash;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use ValueRc;
|
||||
use crate::ValueRc;
|
||||
|
||||
/// An `InternSet` allows to "intern" some potentially large values, maintaining a single value
|
||||
/// instance owned by the `InternSet` and leaving consumers with lightweight ref-counted handles to
|
||||
|
|
|
@ -8,7 +8,9 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
extern crate bytes;
|
||||
extern crate chrono;
|
||||
extern crate hex;
|
||||
extern crate itertools;
|
||||
extern crate num;
|
||||
extern crate ordered_float;
|
||||
|
@ -25,7 +27,7 @@ extern crate serde_derive;
|
|||
|
||||
pub mod entities;
|
||||
pub mod intern_set;
|
||||
pub use intern_set::InternSet;
|
||||
pub use crate::intern_set::InternSet;
|
||||
// Intentionally not pub.
|
||||
pub mod matcher;
|
||||
mod namespaceable_name;
|
||||
|
@ -35,20 +37,22 @@ pub mod symbols;
|
|||
pub mod types;
|
||||
pub mod utils;
|
||||
pub mod value_rc;
|
||||
pub use value_rc::{Cloned, FromRc, ValueRc};
|
||||
pub use crate::value_rc::{Cloned, FromRc, ValueRc};
|
||||
|
||||
// Re-export the types we use.
|
||||
use bytes::Bytes;
|
||||
pub use chrono::{DateTime, Utc};
|
||||
use hex::decode;
|
||||
pub use num::BigInt;
|
||||
pub use ordered_float::OrderedFloat;
|
||||
pub use uuid::Uuid;
|
||||
|
||||
// Export from our modules.
|
||||
pub use types::{
|
||||
pub use crate::types::{
|
||||
FromMicros, FromMillis, Span, SpannedValue, ToMicros, ToMillis, Value, ValueAndSpan,
|
||||
};
|
||||
|
||||
pub use symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
||||
pub use crate::symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||
use std::f64::{INFINITY, NAN, NEG_INFINITY};
|
||||
|
@ -56,8 +60,8 @@ use std::iter::FromIterator;
|
|||
|
||||
use chrono::TimeZone;
|
||||
|
||||
use entities::*;
|
||||
use query::FromValue;
|
||||
use crate::entities::*;
|
||||
use crate::query::FromValue;
|
||||
|
||||
// Goal: Be able to parse https://github.com/edn-format/edn
|
||||
// Also extensible to help parse http://docs.datomic.com/query.html
|
||||
|
@ -68,7 +72,7 @@ use query::FromValue;
|
|||
// TODO: Support tagged elements
|
||||
// TODO: Support discard
|
||||
|
||||
pub type ParseErrorKind = peg::error::ParseError<peg::str::LineCol>;
|
||||
pub type ParseError = peg::error::ParseError<peg::str::LineCol>;
|
||||
|
||||
peg::parser!(pub grammar parse() for str {
|
||||
|
||||
|
@ -124,7 +128,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
// result = r#""foo\\bar""#
|
||||
// For the typical case, string_normal_chars will match multiple, leading to a single-element vec.
|
||||
pub rule raw_text() -> String = "\"" t:((string_special_char() / string_normal_chars())*) "\""
|
||||
{ t.join(&"").to_string() }
|
||||
{ t.join("") }
|
||||
|
||||
pub rule text() -> SpannedValue
|
||||
= v:raw_text() { SpannedValue::Text(v) }
|
||||
|
@ -148,17 +152,17 @@ peg::parser!(pub grammar parse() for str {
|
|||
rule inst_micros() -> DateTime<Utc> =
|
||||
"#instmicros" whitespace()+ d:$( digit()+ ) {
|
||||
let micros = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = micros / 1000000;
|
||||
let nanos: u32 = ((micros % 1000000).abs() as u32) * 1000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
let seconds: i64 = micros / 1_000_000;
|
||||
let nanos: u32 = ((micros % 1_000_000).unsigned_abs() as u32) * 1000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
}
|
||||
|
||||
rule inst_millis() -> DateTime<Utc> =
|
||||
"#instmillis" whitespace()+ d:$( digit()+ ) {
|
||||
let millis = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = millis / 1000;
|
||||
let nanos: u32 = ((millis % 1000).abs() as u32) * 1000000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
let nanos: u32 = ((millis % 1000).unsigned_abs() as u32) * 1_000_000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
}
|
||||
|
||||
rule inst() -> SpannedValue = t:(inst_millis() / inst_micros() / inst_string())
|
||||
|
@ -172,6 +176,14 @@ peg::parser!(pub grammar parse() for str {
|
|||
pub rule uuid() -> SpannedValue = "#uuid" whitespace()+ u:uuid_string()
|
||||
{ SpannedValue::Uuid(u) }
|
||||
|
||||
rule byte_buffer() -> Bytes =
|
||||
u:$( hex()+ ) {
|
||||
let b = decode(u).expect("this is a valid hex byte string");
|
||||
Bytes::copy_from_slice(&b)
|
||||
}
|
||||
pub rule bytes() -> SpannedValue = "#bytes" whitespace()+ u:byte_buffer()
|
||||
{ SpannedValue::Bytes(u) }
|
||||
|
||||
rule namespace_divider() = "."
|
||||
rule namespace_separator() = "/"
|
||||
|
||||
|
@ -219,7 +231,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
|
||||
// Note: It's important that float comes before integer or the parser assumes that floats are integers and fails to parse.
|
||||
pub rule value() -> ValueAndSpan =
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / text() / keyword() / symbol() / list() / vector() / map() / set()) end:position!() __ {
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / bytes() / text() / keyword() / symbol() / list() / vector() / map() / set() ) end:position!() __ {
|
||||
ValueAndSpan {
|
||||
inner: v,
|
||||
span: Span::new(start, end)
|
||||
|
@ -311,7 +323,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
/ __ v:atom() __ { ValuePlace::Atom(v) }
|
||||
|
||||
pub rule entity() -> Entity<ValueAndSpan>
|
||||
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e: e, a: AttributePlace::Entid(a), v: v } }
|
||||
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e, a: AttributePlace::Entid(a), v } }
|
||||
/ __ "[" __ op:(op()) __ e:(value_place()) __ a:(backward_entid()) __ v:(entity_place()) __ "]" __ { Entity::AddOrRetract { op, e: v, a: AttributePlace::Entid(a), v: e } }
|
||||
/ __ map:map_notation() __ { Entity::MapNotation(map) }
|
||||
/ expected!("entity")
|
||||
|
@ -349,11 +361,11 @@ peg::parser!(pub grammar parse() for str {
|
|||
= __ "*" __ { query::PullAttributeSpec::Wildcard }
|
||||
/ __ k:raw_forward_namespaced_keyword() __ alias:(":as" __ alias:raw_forward_keyword() __ { alias })? {
|
||||
let attribute = query::PullConcreteAttribute::Ident(::std::rc::Rc::new(k));
|
||||
let alias = alias.map(|alias| ::std::rc::Rc::new(alias));
|
||||
let alias = alias.map(::std::rc::Rc::new);
|
||||
query::PullAttributeSpec::Attribute(
|
||||
query::NamedPullAttribute {
|
||||
attribute,
|
||||
alias: alias,
|
||||
alias,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -470,7 +482,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
query::WhereClause::Pred(
|
||||
query::Predicate {
|
||||
operator: func.0,
|
||||
args: args,
|
||||
args,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -479,7 +491,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
query::WhereClause::WhereFn(
|
||||
query::WhereFn {
|
||||
operator: func.0,
|
||||
args: args,
|
||||
args,
|
||||
binding,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,8 +12,8 @@ use itertools::diff_with;
|
|||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use symbols;
|
||||
use types::Value;
|
||||
use crate::symbols;
|
||||
use crate::types::Value;
|
||||
|
||||
/// A trait defining pattern matching rules for any given pattern of type `T`.
|
||||
trait PatternMatchingRules<'a, T> {
|
||||
|
@ -87,7 +87,7 @@ impl<'a> Matcher<'a> {
|
|||
where
|
||||
T: PatternMatchingRules<'a, Value>,
|
||||
{
|
||||
use Value::*;
|
||||
use crate::Value::*;
|
||||
|
||||
if T::matches_any(pattern) {
|
||||
true
|
||||
|
@ -140,7 +140,7 @@ impl Value {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use parse;
|
||||
use crate::parse;
|
||||
|
||||
macro_rules! assert_match {
|
||||
( $pattern:tt, $value:tt, $expected:expr ) => {
|
||||
|
|
|
@ -85,7 +85,7 @@ impl NamespaceableName {
|
|||
|
||||
NamespaceableName {
|
||||
components: dest,
|
||||
boundary: boundary,
|
||||
boundary,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ impl NamespaceableName {
|
|||
if name.starts_with('_') {
|
||||
Self::new(self.namespace(), &name[1..])
|
||||
} else {
|
||||
Self::new(self.namespace(), &format!("_{}", name))
|
||||
Self::new(self.namespace(), format!("_{}", name))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,7 +144,7 @@ impl NamespaceableName {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||
pub fn components(&self) -> (&str, &str) {
|
||||
if self.boundary > 0 {
|
||||
(
|
||||
&self.components[0..self.boundary],
|
||||
|
@ -205,8 +205,8 @@ impl fmt::Display for NamespaceableName {
|
|||
// friendly and automatic (e.g. `derive`d), and just pass all work off to it in our custom
|
||||
// implementation of Serialize and Deserialize.
|
||||
#[cfg(feature = "serde_support")]
|
||||
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
||||
struct SerializedNamespaceableName<'a> {
|
||||
namespace: Option<&'a str>,
|
||||
name: &'a str,
|
||||
|
@ -219,11 +219,11 @@ impl<'de> Deserialize<'de> for NamespaceableName {
|
|||
D: Deserializer<'de>,
|
||||
{
|
||||
let separated = SerializedNamespaceableName::deserialize(deserializer)?;
|
||||
if separated.name.len() == 0 {
|
||||
if separated.name.is_empty() {
|
||||
return Err(de::Error::custom("Empty name in keyword or symbol"));
|
||||
}
|
||||
if let Some(ns) = separated.namespace {
|
||||
if ns.len() == 0 {
|
||||
if ns.is_empty() {
|
||||
Err(de::Error::custom(
|
||||
"Empty but present namespace in keyword or symbol",
|
||||
))
|
||||
|
@ -309,17 +309,6 @@ mod test {
|
|||
|
||||
arr.sort();
|
||||
|
||||
assert_eq!(
|
||||
arr,
|
||||
[
|
||||
n0.clone(),
|
||||
n2.clone(),
|
||||
n1.clone(),
|
||||
n3.clone(),
|
||||
n4.clone(),
|
||||
n5.clone(),
|
||||
n6.clone(),
|
||||
]
|
||||
);
|
||||
assert_eq!(arr, [n0, n2, n1, n3, n4, n5, n6,]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use pretty;
|
|||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
use types::Value;
|
||||
use crate::types::Value;
|
||||
|
||||
impl Value {
|
||||
/// Return a pretty string representation of this `Value`.
|
||||
|
@ -57,10 +57,11 @@ impl Value {
|
|||
{
|
||||
let open = open.into();
|
||||
let n = open.len() as isize;
|
||||
let i = vs
|
||||
.into_iter()
|
||||
.map(|v| v.as_doc(allocator))
|
||||
.intersperse(allocator.line());
|
||||
let i = {
|
||||
let this = vs.into_iter().map(|v| v.as_doc(allocator));
|
||||
let element = allocator.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
allocator
|
||||
.text(open)
|
||||
.append(allocator.concat(i).nest(n))
|
||||
|
@ -81,11 +82,14 @@ impl Value {
|
|||
Value::List(ref vs) => self.bracket(pp, "(", vs, ")"),
|
||||
Value::Set(ref vs) => self.bracket(pp, "#{", vs, "}"),
|
||||
Value::Map(ref vs) => {
|
||||
let xs = vs
|
||||
let xs = {
|
||||
let this = vs
|
||||
.iter()
|
||||
.rev()
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group())
|
||||
.intersperse(pp.line());
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group());
|
||||
let element = pp.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
pp.text("{")
|
||||
.append(pp.concat(xs).nest(1))
|
||||
.append(pp.text("}"))
|
||||
|
@ -97,7 +101,7 @@ impl Value {
|
|||
Value::Text(ref v) => pp.text("\"").append(v.as_str()).append("\""),
|
||||
Value::Uuid(ref u) => pp
|
||||
.text("#uuid \"")
|
||||
.append(u.to_hyphenated().to_string())
|
||||
.append(u.hyphenated().to_string())
|
||||
.append("\""),
|
||||
Value::Instant(ref v) => pp
|
||||
.text("#inst \"")
|
||||
|
@ -110,7 +114,7 @@ impl Value {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use parse;
|
||||
use crate::parse;
|
||||
|
||||
#[test]
|
||||
fn test_pp_io() {
|
||||
|
|
219
edn/src/query.rs
219
edn/src/query.rs
|
@ -35,11 +35,11 @@ use std;
|
|||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use {BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
||||
use crate::{BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
||||
|
||||
use value_rc::{FromRc, ValueRc};
|
||||
use crate::value_rc::{FromRc, ValueRc};
|
||||
|
||||
pub use {Keyword, PlainSymbol};
|
||||
pub use crate::{Keyword, PlainSymbol};
|
||||
|
||||
pub type SrcVarName = String; // Do not include the required syntactic '$'.
|
||||
|
||||
|
@ -51,10 +51,6 @@ impl Variable {
|
|||
self.0.as_ref().0.as_str()
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> String {
|
||||
self.0.as_ref().0.clone()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> PlainSymbol {
|
||||
self.0.as_ref().clone()
|
||||
}
|
||||
|
@ -68,15 +64,15 @@ impl Variable {
|
|||
}
|
||||
|
||||
pub trait FromValue<T> {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<T>;
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<T>;
|
||||
}
|
||||
|
||||
/// If the provided EDN value is a PlainSymbol beginning with '?', return
|
||||
/// it wrapped in a Variable. If not, return None.
|
||||
/// TODO: intern strings. #398.
|
||||
impl FromValue<Variable> for Variable {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<Variable> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<Variable> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
Variable::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -87,7 +83,7 @@ impl FromValue<Variable> for Variable {
|
|||
impl Variable {
|
||||
pub fn from_rc(sym: Rc<PlainSymbol>) -> Option<Variable> {
|
||||
if sym.is_var_symbol() {
|
||||
Some(Variable(sym.clone()))
|
||||
Some(Variable(sym))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -119,8 +115,8 @@ impl std::fmt::Display for Variable {
|
|||
pub struct QueryFunction(pub PlainSymbol);
|
||||
|
||||
impl FromValue<QueryFunction> for QueryFunction {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<QueryFunction> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<QueryFunction> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
QueryFunction::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -158,8 +154,8 @@ pub enum SrcVar {
|
|||
}
|
||||
|
||||
impl FromValue<SrcVar> for SrcVar {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<SrcVar> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<SrcVar> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
SrcVar::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -217,8 +213,8 @@ pub enum FnArg {
|
|||
}
|
||||
|
||||
impl FromValue<FnArg> for FnArg {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<FnArg> {
|
||||
use SpannedValue::*;
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<FnArg> {
|
||||
use crate::SpannedValue::*;
|
||||
match v.inner {
|
||||
Integer(x) => Some(FnArg::EntidOrInteger(x)),
|
||||
PlainSymbol(ref x) if x.is_src_symbol() => SrcVar::from_symbol(x).map(FnArg::SrcVar),
|
||||
|
@ -237,7 +233,7 @@ impl FromValue<FnArg> for FnArg {
|
|||
{
|
||||
Some(FnArg::Constant(x.clone().into()))
|
||||
}
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) => None,
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) | Bytes(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -246,18 +242,18 @@ impl FromValue<FnArg> for FnArg {
|
|||
impl std::fmt::Display for FnArg {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
&FnArg::Variable(ref var) => write!(f, "{}", var),
|
||||
&FnArg::SrcVar(ref var) => {
|
||||
FnArg::Variable(ref var) => write!(f, "{}", var),
|
||||
FnArg::SrcVar(ref var) => {
|
||||
if var == &SrcVar::DefaultSrc {
|
||||
write!(f, "$")
|
||||
} else {
|
||||
write!(f, "{:?}", var)
|
||||
}
|
||||
}
|
||||
&FnArg::EntidOrInteger(entid) => write!(f, "{}", entid),
|
||||
&FnArg::IdentOrKeyword(ref kw) => write!(f, "{}", kw),
|
||||
&FnArg::Constant(ref constant) => write!(f, "{:?}", constant),
|
||||
&FnArg::Vector(ref vec) => write!(f, "{:?}", vec),
|
||||
FnArg::EntidOrInteger(entid) => write!(f, "{}", entid),
|
||||
FnArg::IdentOrKeyword(ref kw) => write!(f, "{}", kw),
|
||||
FnArg::Constant(ref constant) => write!(f, "{:?}", constant),
|
||||
FnArg::Vector(ref vec) => write!(f, "{:?}", vec),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -265,7 +261,7 @@ impl std::fmt::Display for FnArg {
|
|||
impl FnArg {
|
||||
pub fn as_variable(&self) -> Option<&Variable> {
|
||||
match self {
|
||||
&FnArg::Variable(ref v) => Some(v),
|
||||
FnArg::Variable(ref v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -320,27 +316,25 @@ impl PatternNonValuePlace {
|
|||
}
|
||||
|
||||
impl FromValue<PatternNonValuePlace> for PatternNonValuePlace {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
||||
match v.inner {
|
||||
::SpannedValue::Integer(x) => {
|
||||
crate::SpannedValue::Integer(x) => {
|
||||
if x >= 0 {
|
||||
Some(PatternNonValuePlace::Entid(x))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
::SpannedValue::PlainSymbol(ref x) => {
|
||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
||||
if x.0.as_str() == "_" {
|
||||
Some(PatternNonValuePlace::Placeholder)
|
||||
} else {
|
||||
if let Some(v) = Variable::from_symbol(x) {
|
||||
} else if let Some(v) = Variable::from_symbol(x) {
|
||||
Some(PatternNonValuePlace::Variable(v))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
||||
crate::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -377,45 +371,46 @@ impl From<Keyword> for PatternValuePlace {
|
|||
}
|
||||
|
||||
impl FromValue<PatternValuePlace> for PatternValuePlace {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<PatternValuePlace> {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternValuePlace> {
|
||||
match v.inner {
|
||||
::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
||||
::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
||||
crate::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
||||
crate::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
||||
Some(PatternValuePlace::Placeholder)
|
||||
}
|
||||
::SpannedValue::PlainSymbol(ref x) => {
|
||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
||||
Variable::from_symbol(x).map(PatternValuePlace::Variable)
|
||||
}
|
||||
::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
||||
::SpannedValue::Boolean(x) => {
|
||||
crate::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
||||
crate::SpannedValue::Boolean(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Boolean(x)))
|
||||
}
|
||||
::SpannedValue::Float(x) => {
|
||||
crate::SpannedValue::Float(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Float(x)))
|
||||
}
|
||||
::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
||||
crate::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
||||
NonIntegerConstant::BigInteger(x.clone()),
|
||||
)),
|
||||
::SpannedValue::Instant(x) => {
|
||||
crate::SpannedValue::Instant(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Instant(x)))
|
||||
}
|
||||
::SpannedValue::Text(ref x) =>
|
||||
crate::SpannedValue::Text(ref x) =>
|
||||
// TODO: intern strings. #398.
|
||||
{
|
||||
Some(PatternValuePlace::Constant(x.clone().into()))
|
||||
}
|
||||
::SpannedValue::Uuid(ref u) => Some(PatternValuePlace::Constant(
|
||||
NonIntegerConstant::Uuid(u.clone()),
|
||||
)),
|
||||
crate::SpannedValue::Uuid(ref u) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Uuid(*u)))
|
||||
}
|
||||
|
||||
// These don't appear in queries.
|
||||
::SpannedValue::Nil => None,
|
||||
::SpannedValue::NamespacedSymbol(_) => None,
|
||||
::SpannedValue::Keyword(_) => None, // … yet.
|
||||
::SpannedValue::Map(_) => None,
|
||||
::SpannedValue::List(_) => None,
|
||||
::SpannedValue::Set(_) => None,
|
||||
::SpannedValue::Vector(_) => None,
|
||||
crate::SpannedValue::Nil => None,
|
||||
crate::SpannedValue::NamespacedSymbol(_) => None,
|
||||
crate::SpannedValue::Keyword(_) => None, // … yet.
|
||||
crate::SpannedValue::Map(_) => None,
|
||||
crate::SpannedValue::List(_) => None,
|
||||
crate::SpannedValue::Set(_) => None,
|
||||
crate::SpannedValue::Vector(_) => None,
|
||||
crate::SpannedValue::Bytes(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -498,15 +493,15 @@ pub enum PullAttributeSpec {
|
|||
impl std::fmt::Display for PullConcreteAttribute {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
&PullConcreteAttribute::Ident(ref k) => write!(f, "{}", k),
|
||||
&PullConcreteAttribute::Entid(i) => write!(f, "{}", i),
|
||||
PullConcreteAttribute::Ident(ref k) => write!(f, "{}", k),
|
||||
PullConcreteAttribute::Entid(i) => write!(f, "{}", i),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for NamedPullAttribute {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
if let &Some(ref alias) = &self.alias {
|
||||
if let Some(ref alias) = self.alias {
|
||||
write!(f, "{} :as {}", self.attribute, alias)
|
||||
} else {
|
||||
write!(f, "{}", self.attribute)
|
||||
|
@ -517,8 +512,8 @@ impl std::fmt::Display for NamedPullAttribute {
|
|||
impl std::fmt::Display for PullAttributeSpec {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
&PullAttributeSpec::Wildcard => write!(f, "*"),
|
||||
&PullAttributeSpec::Attribute(ref attr) => write!(f, "{}", attr),
|
||||
PullAttributeSpec::Wildcard => write!(f, "*"),
|
||||
PullAttributeSpec::Attribute(ref attr) => write!(f, "{}", attr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -553,10 +548,10 @@ impl Element {
|
|||
/// Returns true if the element must yield only one value.
|
||||
pub fn is_unit(&self) -> bool {
|
||||
match self {
|
||||
&Element::Variable(_) => false,
|
||||
&Element::Pull(_) => false,
|
||||
&Element::Aggregate(_) => true,
|
||||
&Element::Corresponding(_) => true,
|
||||
Element::Variable(_) => false,
|
||||
Element::Pull(_) => false,
|
||||
Element::Aggregate(_) => true,
|
||||
Element::Corresponding(_) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -570,8 +565,8 @@ impl From<Variable> for Element {
|
|||
impl std::fmt::Display for Element {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
&Element::Variable(ref var) => write!(f, "{}", var),
|
||||
&Element::Pull(Pull {
|
||||
Element::Variable(ref var) => write!(f, "{}", var),
|
||||
Element::Pull(Pull {
|
||||
ref var,
|
||||
ref patterns,
|
||||
}) => {
|
||||
|
@ -581,12 +576,12 @@ impl std::fmt::Display for Element {
|
|||
}
|
||||
write!(f, "])")
|
||||
}
|
||||
&Element::Aggregate(ref agg) => match agg.args.len() {
|
||||
Element::Aggregate(ref agg) => match agg.args.len() {
|
||||
0 => write!(f, "({})", agg.func),
|
||||
1 => write!(f, "({} {})", agg.func, agg.args[0]),
|
||||
_ => write!(f, "({} {:?})", agg.func, agg.args),
|
||||
},
|
||||
&Element::Corresponding(ref var) => write!(f, "(the {})", var),
|
||||
Element::Corresponding(ref var) => write!(f, "(the {})", var),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -609,9 +604,6 @@ pub enum Limit {
|
|||
///
|
||||
/// ```rust
|
||||
/// # use edn::query::{Element, FindSpec, Variable};
|
||||
///
|
||||
/// # fn main() {
|
||||
///
|
||||
/// let elements = vec![
|
||||
/// Element::Variable(Variable::from_valid_name("?foo")),
|
||||
/// Element::Variable(Variable::from_valid_name("?bar")),
|
||||
|
@ -621,8 +613,6 @@ pub enum Limit {
|
|||
/// if let FindSpec::FindRel(elements) = rel {
|
||||
/// assert_eq!(2, elements.len());
|
||||
/// }
|
||||
///
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
|
@ -649,19 +639,19 @@ impl FindSpec {
|
|||
pub fn is_unit_limited(&self) -> bool {
|
||||
use self::FindSpec::*;
|
||||
match self {
|
||||
&FindScalar(..) => true,
|
||||
&FindTuple(..) => true,
|
||||
&FindRel(..) => false,
|
||||
&FindColl(..) => false,
|
||||
FindScalar(..) => true,
|
||||
FindTuple(..) => true,
|
||||
FindRel(..) => false,
|
||||
FindColl(..) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expected_column_count(&self) -> usize {
|
||||
use self::FindSpec::*;
|
||||
match self {
|
||||
&FindScalar(..) => 1,
|
||||
&FindColl(..) => 1,
|
||||
&FindTuple(ref elems) | &FindRel(ref elems) => elems.len(),
|
||||
FindScalar(..) => 1,
|
||||
FindColl(..) => 1,
|
||||
FindTuple(ref elems) | &FindRel(ref elems) => elems.len(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -690,10 +680,10 @@ impl FindSpec {
|
|||
pub fn columns<'s>(&'s self) -> Box<dyn Iterator<Item = &Element> + 's> {
|
||||
use self::FindSpec::*;
|
||||
match self {
|
||||
&FindScalar(ref e) => Box::new(std::iter::once(e)),
|
||||
&FindColl(ref e) => Box::new(std::iter::once(e)),
|
||||
&FindTuple(ref v) => Box::new(v.iter()),
|
||||
&FindRel(ref v) => Box::new(v.iter()),
|
||||
FindScalar(ref e) => Box::new(std::iter::once(e)),
|
||||
FindColl(ref e) => Box::new(std::iter::once(e)),
|
||||
FindTuple(ref v) => Box::new(v.iter()),
|
||||
FindRel(ref v) => Box::new(v.iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -716,8 +706,8 @@ impl VariableOrPlaceholder {
|
|||
|
||||
pub fn var(&self) -> Option<&Variable> {
|
||||
match self {
|
||||
&VariableOrPlaceholder::Placeholder => None,
|
||||
&VariableOrPlaceholder::Variable(ref var) => Some(var),
|
||||
VariableOrPlaceholder::Placeholder => None,
|
||||
VariableOrPlaceholder::Variable(ref var) => Some(var),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -771,11 +761,11 @@ impl Binding {
|
|||
/// ```
|
||||
pub fn is_valid(&self) -> bool {
|
||||
match self {
|
||||
&Binding::BindScalar(_) | &Binding::BindColl(_) => true,
|
||||
&Binding::BindRel(ref vars) | &Binding::BindTuple(ref vars) => {
|
||||
Binding::BindScalar(_) | &Binding::BindColl(_) => true,
|
||||
Binding::BindRel(ref vars) | &Binding::BindTuple(ref vars) => {
|
||||
let mut acc = HashSet::<Variable>::new();
|
||||
for var in vars {
|
||||
if let &VariableOrPlaceholder::Variable(ref var) = var {
|
||||
if let VariableOrPlaceholder::Variable(ref var) = *var {
|
||||
if !acc.insert(var.clone()) {
|
||||
// It's invalid if there was an equal var already present in the set --
|
||||
// i.e., we have a duplicate var.
|
||||
|
@ -832,7 +822,7 @@ impl Pattern {
|
|||
entity: v_e,
|
||||
attribute: k.to_reversed().into(),
|
||||
value: e_v,
|
||||
tx: tx,
|
||||
tx,
|
||||
});
|
||||
} else {
|
||||
return None;
|
||||
|
@ -844,7 +834,7 @@ impl Pattern {
|
|||
entity: e,
|
||||
attribute: a,
|
||||
value: v,
|
||||
tx: tx,
|
||||
tx,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -893,10 +883,7 @@ pub enum UnifyVars {
|
|||
|
||||
impl WhereClause {
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self {
|
||||
&WhereClause::Pattern(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, WhereClause::Pattern(_))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -909,8 +896,8 @@ pub enum OrWhereClause {
|
|||
impl OrWhereClause {
|
||||
pub fn is_pattern_or_patterns(&self) -> bool {
|
||||
match self {
|
||||
&OrWhereClause::Clause(WhereClause::Pattern(_)) => true,
|
||||
&OrWhereClause::And(ref clauses) => clauses.iter().all(|clause| clause.is_pattern()),
|
||||
OrWhereClause::Clause(WhereClause::Pattern(_)) => true,
|
||||
OrWhereClause::And(ref clauses) => clauses.iter().all(|clause| clause.is_pattern()),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -934,8 +921,8 @@ pub struct NotJoin {
|
|||
impl NotJoin {
|
||||
pub fn new(unify_vars: UnifyVars, clauses: Vec<WhereClause>) -> NotJoin {
|
||||
NotJoin {
|
||||
unify_vars: unify_vars,
|
||||
clauses: clauses,
|
||||
unify_vars,
|
||||
clauses,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1041,8 +1028,8 @@ impl ParsedQuery {
|
|||
Ok(ParsedQuery {
|
||||
find_spec: find_spec.ok_or("expected :find")?,
|
||||
default_source: SrcVar::DefaultSrc,
|
||||
with: with.unwrap_or(vec![]),
|
||||
in_vars: in_vars.unwrap_or(vec![]),
|
||||
with: with.unwrap_or_default(),
|
||||
in_vars: in_vars.unwrap_or_default(),
|
||||
in_sources: BTreeSet::default(),
|
||||
limit: limit.unwrap_or(Limit::None),
|
||||
where_clauses: where_clauses.ok_or("expected :where")?,
|
||||
|
@ -1054,8 +1041,8 @@ impl ParsedQuery {
|
|||
impl OrJoin {
|
||||
pub fn new(unify_vars: UnifyVars, clauses: Vec<OrWhereClause>) -> OrJoin {
|
||||
OrJoin {
|
||||
unify_vars: unify_vars,
|
||||
clauses: clauses,
|
||||
unify_vars,
|
||||
clauses,
|
||||
mentioned_vars: None,
|
||||
}
|
||||
}
|
||||
|
@ -1064,8 +1051,8 @@ impl OrJoin {
|
|||
/// every variable mentioned inside the join is also mentioned in the `UnifyVars` list.
|
||||
pub fn is_fully_unified(&self) -> bool {
|
||||
match &self.unify_vars {
|
||||
&UnifyVars::Implicit => true,
|
||||
&UnifyVars::Explicit(ref vars) => {
|
||||
UnifyVars::Implicit => true,
|
||||
UnifyVars::Explicit(ref vars) => {
|
||||
// We know that the join list must be a subset of the vars in the pattern, or
|
||||
// it would have failed validation. That allows us to simply compare counts here.
|
||||
// TODO: in debug mode, do a full intersection, and verify that our count check
|
||||
|
@ -1094,13 +1081,13 @@ impl ContainsVariables for WhereClause {
|
|||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||
use self::WhereClause::*;
|
||||
match self {
|
||||
&OrJoin(ref o) => o.accumulate_mentioned_variables(acc),
|
||||
&Pred(ref p) => p.accumulate_mentioned_variables(acc),
|
||||
&Pattern(ref p) => p.accumulate_mentioned_variables(acc),
|
||||
&NotJoin(ref n) => n.accumulate_mentioned_variables(acc),
|
||||
&WhereFn(ref f) => f.accumulate_mentioned_variables(acc),
|
||||
&TypeAnnotation(ref a) => a.accumulate_mentioned_variables(acc),
|
||||
&RuleExpr => (),
|
||||
OrJoin(ref o) => o.accumulate_mentioned_variables(acc),
|
||||
Pred(ref p) => p.accumulate_mentioned_variables(acc),
|
||||
Pattern(ref p) => p.accumulate_mentioned_variables(acc),
|
||||
NotJoin(ref n) => n.accumulate_mentioned_variables(acc),
|
||||
WhereFn(ref f) => f.accumulate_mentioned_variables(acc),
|
||||
TypeAnnotation(ref a) => a.accumulate_mentioned_variables(acc),
|
||||
RuleExpr => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1109,12 +1096,12 @@ impl ContainsVariables for OrWhereClause {
|
|||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||
use self::OrWhereClause::*;
|
||||
match self {
|
||||
&And(ref clauses) => {
|
||||
And(ref clauses) => {
|
||||
for clause in clauses {
|
||||
clause.accumulate_mentioned_variables(acc)
|
||||
}
|
||||
}
|
||||
&Clause(ref clause) => clause.accumulate_mentioned_variables(acc),
|
||||
Clause(ref clause) => clause.accumulate_mentioned_variables(acc),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1161,7 +1148,7 @@ impl ContainsVariables for NotJoin {
|
|||
impl ContainsVariables for Predicate {
|
||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||
for arg in &self.args {
|
||||
if let &FnArg::Variable(ref v) = arg {
|
||||
if let FnArg::Variable(ref v) = *arg {
|
||||
acc_ref(acc, v)
|
||||
}
|
||||
}
|
||||
|
@ -1177,10 +1164,10 @@ impl ContainsVariables for TypeAnnotation {
|
|||
impl ContainsVariables for Binding {
|
||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||
match self {
|
||||
&Binding::BindScalar(ref v) | &Binding::BindColl(ref v) => acc_ref(acc, v),
|
||||
&Binding::BindRel(ref vs) | &Binding::BindTuple(ref vs) => {
|
||||
Binding::BindScalar(ref v) | &Binding::BindColl(ref v) => acc_ref(acc, v),
|
||||
Binding::BindRel(ref vs) | &Binding::BindTuple(ref vs) => {
|
||||
for v in vs {
|
||||
if let &VariableOrPlaceholder::Variable(ref v) = v {
|
||||
if let VariableOrPlaceholder::Variable(ref v) = *v {
|
||||
acc_ref(acc, v);
|
||||
}
|
||||
}
|
||||
|
@ -1192,7 +1179,7 @@ impl ContainsVariables for Binding {
|
|||
impl ContainsVariables for WhereFn {
|
||||
fn accumulate_mentioned_variables(&self, acc: &mut BTreeSet<Variable>) {
|
||||
for arg in &self.args {
|
||||
if let &FnArg::Variable(ref v) = arg {
|
||||
if let FnArg::Variable(ref v) = *arg {
|
||||
acc_ref(acc, v)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::fmt::{Display, Formatter, Write};
|
||||
|
||||
use namespaceable_name::NamespaceableName;
|
||||
use crate::namespaceable_name::NamespaceableName;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! ns_keyword {
|
||||
|
@ -130,7 +130,7 @@ impl NamespacedSymbol {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||
pub fn components(&self) -> (&str, &str) {
|
||||
self.0.components()
|
||||
}
|
||||
}
|
||||
|
@ -180,7 +180,7 @@ impl Keyword {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
|
||||
pub fn components(&self) -> (&str, &str) {
|
||||
self.0.components()
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
#![allow(redundant_semicolon)]
|
||||
#![allow(redundant_semicolons)]
|
||||
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||
|
@ -25,8 +25,10 @@ use num::BigInt;
|
|||
use ordered_float::OrderedFloat;
|
||||
use uuid::Uuid;
|
||||
|
||||
use symbols;
|
||||
use crate::symbols;
|
||||
|
||||
use bytes::Bytes;
|
||||
use hex::encode;
|
||||
/// Value represents one of the allowed values in an EDN string.
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub enum Value {
|
||||
|
@ -52,6 +54,7 @@ pub enum Value {
|
|||
// See https://internals.rust-lang.org/t/implementing-hash-for-hashset-hashmap/3817/1
|
||||
Set(BTreeSet<Value>),
|
||||
Map(BTreeMap<Value, Value>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// `SpannedValue` is the parallel to `Value` but used in `ValueAndSpan`.
|
||||
|
@ -73,6 +76,7 @@ pub enum SpannedValue {
|
|||
List(LinkedList<ValueAndSpan>),
|
||||
Set(BTreeSet<ValueAndSpan>),
|
||||
Map(BTreeMap<ValueAndSpan, ValueAndSpan>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// Span represents the current offset (start, end) into the input string.
|
||||
|
@ -139,7 +143,7 @@ impl Value {
|
|||
/// But right now, it's used in the bootstrapper. We'll fix that soon.
|
||||
pub fn with_spans(self) -> ValueAndSpan {
|
||||
let s = self.to_pretty(120).unwrap();
|
||||
use parse;
|
||||
use crate::parse;
|
||||
let with_spans = parse::value(&s).unwrap();
|
||||
assert_eq!(self, with_spans.clone().without_spans());
|
||||
with_spans
|
||||
|
@ -172,6 +176,7 @@ impl From<SpannedValue> for Value {
|
|||
.map(|(x, y)| (x.without_spans(), y.without_spans()))
|
||||
.collect(),
|
||||
),
|
||||
SpannedValue::Bytes(b) => Value::Bytes(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -209,9 +214,9 @@ macro_rules! def_from_option {
|
|||
macro_rules! def_is {
|
||||
($name: ident, $pat: pat) => {
|
||||
pub fn $name(&self) -> bool {
|
||||
match *self { $pat => true, _ => false }
|
||||
}
|
||||
matches!(*self, $pat)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Creates `as_$TYPE` helper functions for Value or SpannedValue, like
|
||||
|
@ -231,9 +236,12 @@ macro_rules! def_as {
|
|||
macro_rules! def_as_ref {
|
||||
($name: ident, $kind: path, $t: ty) => {
|
||||
pub fn $name(&self) -> Option<&$t> {
|
||||
match *self { $kind(ref v) => Some(v), _ => None }
|
||||
match *self {
|
||||
$kind(ref v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Creates `into_$TYPE` helper functions for Value or SpannedValue, like
|
||||
|
@ -325,17 +333,18 @@ macro_rules! def_common_value_methods {
|
|||
def_is!(is_list, $t::List(_));
|
||||
def_is!(is_set, $t::Set(_));
|
||||
def_is!(is_map, $t::Map(_));
|
||||
def_is!(is_bytes, $t::Bytes(_));
|
||||
|
||||
pub fn is_keyword(&self) -> bool {
|
||||
match self {
|
||||
&$t::Keyword(ref k) => !k.is_namespaced(),
|
||||
$t::Keyword(ref k) => !k.is_namespaced(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_namespaced_keyword(&self) -> bool {
|
||||
match self {
|
||||
&$t::Keyword(ref k) => k.is_namespaced(),
|
||||
$t::Keyword(ref k) => k.is_namespaced(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -357,24 +366,25 @@ macro_rules! def_common_value_methods {
|
|||
def_as_ref!(as_uuid, $t::Uuid, Uuid);
|
||||
def_as_ref!(as_symbol, $t::PlainSymbol, symbols::PlainSymbol);
|
||||
def_as_ref!(as_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol);
|
||||
def_as_ref!(as_bytes, $t::Bytes, Bytes);
|
||||
|
||||
pub fn as_keyword(&self) -> Option<&symbols::Keyword> {
|
||||
match self {
|
||||
&$t::Keyword(ref k) => Some(k),
|
||||
$t::Keyword(ref k) => Some(k),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_plain_keyword(&self) -> Option<&symbols::Keyword> {
|
||||
match self {
|
||||
&$t::Keyword(ref k) if !k.is_namespaced() => Some(k),
|
||||
$t::Keyword(ref k) if !k.is_namespaced() => Some(k),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_namespaced_keyword(&self) -> Option<&symbols::Keyword> {
|
||||
match self {
|
||||
&$t::Keyword(ref k) if k.is_namespaced() => Some(k),
|
||||
$t::Keyword(ref k) if k.is_namespaced() => Some(k),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -394,6 +404,7 @@ macro_rules! def_common_value_methods {
|
|||
def_into!(into_uuid, $t::Uuid, Uuid,);
|
||||
def_into!(into_symbol, $t::PlainSymbol, symbols::PlainSymbol,);
|
||||
def_into!(into_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol,);
|
||||
def_into!(into_bytes, $t::Bytes, Bytes,);
|
||||
|
||||
pub fn into_keyword(self) -> Option<symbols::Keyword> {
|
||||
match self {
|
||||
|
@ -464,6 +475,7 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => 13,
|
||||
$t::Set(_) => 14,
|
||||
$t::Map(_) => 15,
|
||||
$t::Bytes(_) => 16,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -484,6 +496,7 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => true,
|
||||
$t::Set(_) => true,
|
||||
$t::Map(_) => true,
|
||||
$t::Bytes(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,6 +534,7 @@ macro_rules! def_common_value_ord {
|
|||
(&$t::List(ref a), &$t::List(ref b)) => b.cmp(a),
|
||||
(&$t::Set(ref a), &$t::Set(ref b)) => b.cmp(a),
|
||||
(&$t::Map(ref a), &$t::Map(ref b)) => b.cmp(a),
|
||||
(&$t::Bytes(ref a), &$t::Bytes(ref b)) => b.cmp(a),
|
||||
_ => $value.precedence().cmp(&$other.precedence()),
|
||||
}
|
||||
};
|
||||
|
@ -555,7 +569,7 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
// TODO: EDN escaping.
|
||||
$t::Text(ref v) => write!($f, "\"{}\"", v),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.to_hyphenated().to_string()),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.hyphenated().to_string()),
|
||||
$t::PlainSymbol(ref v) => v.fmt($f),
|
||||
$t::NamespacedSymbol(ref v) => v.fmt($f),
|
||||
$t::Keyword(ref v) => v.fmt($f),
|
||||
|
@ -587,6 +601,10 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
write!($f, " }}")
|
||||
}
|
||||
$t::Bytes(ref v) => {
|
||||
let s = encode(v);
|
||||
write!($f, "#bytes {}", s)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -650,7 +668,7 @@ pub trait FromMicros {
|
|||
|
||||
impl FromMicros for DateTime<Utc> {
|
||||
fn from_micros(ts: i64) -> Self {
|
||||
Utc.timestamp(ts / 1_000_000, ((ts % 1_000_000).abs() as u32) * 1_000)
|
||||
Utc.timestamp_opt(ts / 1_000_000, ((ts % 1_000_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -672,7 +690,7 @@ pub trait FromMillis {
|
|||
|
||||
impl FromMillis for DateTime<Utc> {
|
||||
fn from_millis(ts: i64) -> Self {
|
||||
Utc.timestamp(ts / 1_000, ((ts % 1_000).abs() as u32) * 1_000)
|
||||
Utc.timestamp_opt(ts / 1_000, ((ts % 1_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -701,7 +719,7 @@ mod test {
|
|||
use std::f64;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use parse;
|
||||
use crate::parse;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use num::BigInt;
|
||||
|
@ -734,12 +752,12 @@ mod test {
|
|||
fn test_print_edn() {
|
||||
assert_eq!("1234N", Value::from_bigint("1234").unwrap().to_string());
|
||||
|
||||
let string = "[ 1 2 ( 3.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
||||
let string = "[ 1 2 ( 7.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
||||
|
||||
let data = Value::Vector(vec![
|
||||
Value::Integer(1),
|
||||
Value::Integer(2),
|
||||
Value::List(LinkedList::from_iter(vec![Value::from_float(3.14)])),
|
||||
Value::List(LinkedList::from_iter(vec![Value::from_float(7.14)])),
|
||||
Value::Set(BTreeSet::from_iter(vec![Value::from_bigint("4").unwrap()])),
|
||||
Value::Map(BTreeMap::from_iter(vec![
|
||||
(Value::from_symbol("foo", "bar"), Value::Integer(42)),
|
||||
|
@ -841,10 +859,10 @@ mod test {
|
|||
|
||||
assert!(n_v.clone().into_keyword().is_some());
|
||||
assert!(n_v.clone().into_plain_keyword().is_none());
|
||||
assert!(n_v.clone().into_namespaced_keyword().is_some());
|
||||
assert!(n_v.into_namespaced_keyword().is_some());
|
||||
|
||||
assert!(p_v.clone().into_keyword().is_some());
|
||||
assert!(p_v.clone().into_plain_keyword().is_some());
|
||||
assert!(p_v.clone().into_namespaced_keyword().is_none());
|
||||
assert!(p_v.into_namespaced_keyword().is_none());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use types::Value;
|
||||
use crate::types::Value;
|
||||
|
||||
/// Merge the EDN `Value::Map` instance `right` into `left`. Returns `None` if either `left` or
|
||||
/// `right` is not a `Value::Map`.
|
||||
|
@ -21,9 +21,9 @@ use types::Value;
|
|||
/// TODO: implement `merge` for [Value], following the `concat`/`SliceConcatExt` pattern.
|
||||
pub fn merge(left: &Value, right: &Value) -> Option<Value> {
|
||||
match (left, right) {
|
||||
(&Value::Map(ref l), &Value::Map(ref r)) => {
|
||||
(Value::Map(l), Value::Map(r)) => {
|
||||
let mut result = l.clone();
|
||||
result.extend(r.clone().into_iter());
|
||||
result.extend(r.clone());
|
||||
Some(Value::Map(result))
|
||||
}
|
||||
_ => None,
|
||||
|
|
|
@ -22,7 +22,7 @@ where
|
|||
T: Sized + Clone,
|
||||
{
|
||||
fn from_rc(val: Rc<T>) -> Self {
|
||||
val.clone()
|
||||
val
|
||||
}
|
||||
|
||||
fn from_arc(val: Arc<T>) -> Self {
|
||||
|
@ -45,7 +45,7 @@ where
|
|||
}
|
||||
|
||||
fn from_arc(val: Arc<T>) -> Self {
|
||||
val.clone()
|
||||
val
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ use chrono::{TimeZone, Utc};
|
|||
use edn::{
|
||||
parse, symbols,
|
||||
types::{Span, SpannedValue, Value, ValueAndSpan},
|
||||
utils, ParseErrorKind,
|
||||
utils, ParseError,
|
||||
};
|
||||
|
||||
// Helper for making wrapped keywords with a namespace.
|
||||
|
@ -82,6 +82,7 @@ fn_parse_into_value!(vector);
|
|||
fn_parse_into_value!(set);
|
||||
fn_parse_into_value!(map);
|
||||
fn_parse_into_value!(value);
|
||||
fn_parse_into_value!(bytes);
|
||||
|
||||
#[test]
|
||||
fn test_nil() {
|
||||
|
@ -316,6 +317,38 @@ fn test_uuid() {
|
|||
assert_eq!(value.to_pretty(100).unwrap(), s);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bytes() {
|
||||
assert!(parse::bytes("#bytes01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes _ZZ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes 01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#01 ").is_err()); // No whitespace.
|
||||
|
||||
let expected = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let s = format!("{} {}", "#bytes", hex::encode(expected.clone()));
|
||||
let actual: Value = parse::bytes(&s).expect("parse success").into();
|
||||
assert!(actual.is_bytes());
|
||||
assert_eq!(expected, actual.as_bytes().unwrap().to_vec());
|
||||
|
||||
assert_eq!(
|
||||
self::bytes("#bytes 010203050403022a").unwrap(),
|
||||
Value::Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
let data =
|
||||
r#"[ { :test/instant #inst "2018-01-01T11:00:00Z" :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let result = parse::value(data).unwrap().without_spans().to_string();
|
||||
assert_eq!(data, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_entities() {
|
||||
let d2 = r#"[ { :test/boolean true :test/long 33 :test/double 1.4 :test/string "foo" :test/keyword :foo/bar :test/uuid #uuid "12341234-1234-1234-1234-123412341234" :test/instant #inst "2018-01-01T11:00:00Z" :test/ref 1 :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let r2 = parse::entities(d2);
|
||||
assert!(r2.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inst() {
|
||||
assert!(parse::value("#inst\"2016-01-01T11:00:00.000Z\"").is_err()); // No whitespace.
|
||||
|
@ -584,6 +617,12 @@ fn test_value() {
|
|||
value("#inst \"2017-04-28T20:23:05.187Z\"").unwrap(),
|
||||
Instant(Utc.timestamp(1493410985, 187000000))
|
||||
);
|
||||
assert_eq!(
|
||||
value("#bytes 010203050403022a").unwrap(),
|
||||
Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1497,7 +1536,7 @@ macro_rules! def_test_into_type {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(float_cmp))]
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(clippy::float_cmp, clippy::unit_cmp))]
|
||||
fn test_is_and_as_type_helper_functions() {
|
||||
let max_i64 = i64::max_value().to_bigint().unwrap();
|
||||
let bigger = &max_i64 * &max_i64;
|
||||
|
|
31
examples/movies.edn
Normal file
31
examples/movies.edn
Normal file
|
@ -0,0 +1,31 @@
|
|||
;; movie schema
|
||||
[{:db/ident :movie/title
|
||||
:db/valueType :db.type/string
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/doc "The title of the movie"}
|
||||
|
||||
{:db/ident :movie/genre
|
||||
:db/valueType :db.type/string
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/doc "The genre of the movie"}
|
||||
|
||||
{:db/ident :movie/release-year
|
||||
:db/valueType :db.type/long
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/doc "The year the movie was released in theaters"}]
|
||||
|
||||
;; a few movies
|
||||
[{:movie/title "The Goonies"
|
||||
:movie/genre "action/adventure"
|
||||
:movie/release-year 1985}
|
||||
{:movie/title "Commando"
|
||||
:movie/genre "thriller/action"
|
||||
:movie/release-year 1985}
|
||||
{:movie/title "Repo Man"
|
||||
:movie/genre "punk dystopia"
|
||||
:movie/release-year 1984}]
|
||||
|
||||
;; query
|
||||
[:find ?movie-title
|
||||
:where [_ :movie/title ?movie-title]]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "mentat_ffi"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
authors = ["Emily Toop <etoop@mozilla.com>"]
|
||||
|
||||
[lib]
|
||||
|
@ -13,7 +13,7 @@ sqlcipher = ["mentat/sqlcipher"]
|
|||
bundled_sqlite3 = ["mentat/bundled_sqlite3"]
|
||||
|
||||
[dependencies]
|
||||
libc = "0.2"
|
||||
libc = "~0.2"
|
||||
|
||||
[dependencies.mentat]
|
||||
path = "../"
|
||||
|
|
305
ffi/src/lib.rs
305
ffi/src/lib.rs
|
@ -70,6 +70,7 @@
|
|||
//! (for `Result<(), T>`). Callers are responsible for freeing the `message` field of `ExternError`.
|
||||
|
||||
#![allow(unused_doc_comments)]
|
||||
#![allow(clippy::missing_safety_doc)]
|
||||
|
||||
extern crate core;
|
||||
extern crate libc;
|
||||
|
@ -176,6 +177,12 @@ pub unsafe extern "C" fn store_open(uri: *const c_char, error: *mut ExternError)
|
|||
}
|
||||
|
||||
/// Variant of store_open that opens an encrypted database.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `store_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
#[cfg(feature = "sqlcipher")]
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn store_open_encrypted(
|
||||
|
@ -246,6 +253,11 @@ pub unsafe extern "C" fn in_progress_transact<'m>(
|
|||
/// Commit all the transacts that have been performed using this
|
||||
/// in progress transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
///
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_commit<'m>(
|
||||
|
@ -260,6 +272,12 @@ pub unsafe extern "C" fn in_progress_commit<'m>(
|
|||
/// Rolls back all the transacts that have been performed using this
|
||||
/// in progress transaction.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
///
|
||||
/// TODO: Document the errors that can result from rollback
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_rollback<'m>(
|
||||
|
@ -342,7 +360,7 @@ pub unsafe extern "C" fn store_in_progress_builder<'a, 'c>(
|
|||
let store = &mut *store;
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.and_then(|in_progress| Ok(in_progress.builder()));
|
||||
.map(|in_progress| in_progress.builder());
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -365,7 +383,7 @@ pub unsafe extern "C" fn store_entity_builder_from_temp_id<'a, 'c>(
|
|||
let temp_id = c_char_to_string(temp_id);
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.and_then(|in_progress| Ok(in_progress.builder().describe_tempid(&temp_id)));
|
||||
.map(|in_progress| in_progress.builder().describe_tempid(&temp_id));
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -387,7 +405,7 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
|||
let store = &mut *store;
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.and_then(|in_progress| Ok(in_progress.builder().describe(KnownEntid(entid))));
|
||||
.map(|in_progress| in_progress.builder().describe(KnownEntid(entid)));
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -399,10 +417,12 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_string<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_string(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -422,10 +442,13 @@ pub unsafe extern "C" fn in_progress_builder_add_string<'a, 'c>(
|
|||
/// If `entid` is not present in the store.
|
||||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_long<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_long(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -446,10 +469,13 @@ pub unsafe extern "C" fn in_progress_builder_add_long<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If `value` is not present as an Entid in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_ref<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_ref(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -471,10 +497,12 @@ pub unsafe extern "C" fn in_progress_builder_add_ref<'a, 'c>(
|
|||
/// If `value` is not present as an attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_keyword<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_keyword(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -495,10 +523,12 @@ pub unsafe extern "C" fn in_progress_builder_add_keyword<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_boolean<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_boolean(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
|
@ -519,10 +549,12 @@ pub unsafe extern "C" fn in_progress_builder_add_boolean<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_double<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_double(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
|
@ -543,10 +575,12 @@ pub unsafe extern "C" fn in_progress_builder_add_double<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_timestamp<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_timestamp(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -567,10 +601,12 @@ pub unsafe extern "C" fn in_progress_builder_add_timestamp<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_uuid<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_uuid(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
|
@ -593,10 +629,12 @@ pub unsafe extern "C" fn in_progress_builder_add_uuid<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_string<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_string(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -617,10 +655,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_string<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_long<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_long(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -641,10 +681,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_long<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_ref<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_ref(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -665,10 +707,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_ref<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_keyword<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_keyword(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -689,10 +733,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_keyword<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_boolean<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_boolean(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
|
@ -713,10 +759,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_boolean<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_double<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_double(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
|
@ -737,10 +785,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_double<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_timestamp<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_timestamp(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -761,12 +811,13 @@ pub unsafe extern "C" fn in_progress_builder_retract_timestamp<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO don't panic if the UUID is not valid - return result instead.
|
||||
//
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_uuid<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_uuid(
|
||||
builder: *mut InProgressBuilder,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
|
@ -786,10 +837,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_uuid<'a, 'c>(
|
|||
///
|
||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_commit<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
pub unsafe extern "C" fn in_progress_builder_commit(
|
||||
builder: *mut InProgressBuilder,
|
||||
error: *mut ExternError,
|
||||
) -> *mut TxReport {
|
||||
assert_not_null!(builder);
|
||||
|
@ -828,10 +881,12 @@ pub unsafe extern "C" fn in_progress_builder_transact<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_string<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_string(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -851,10 +906,12 @@ pub unsafe extern "C" fn entity_builder_add_string<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_long<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_long(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -874,10 +931,12 @@ pub unsafe extern "C" fn entity_builder_add_long<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_ref<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_ref(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -897,10 +956,12 @@ pub unsafe extern "C" fn entity_builder_add_ref<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_keyword<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_keyword(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -920,10 +981,12 @@ pub unsafe extern "C" fn entity_builder_add_keyword<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_boolean<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_boolean(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
error: *mut ExternError,
|
||||
|
@ -943,10 +1006,12 @@ pub unsafe extern "C" fn entity_builder_add_boolean<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_double<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_double(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
error: *mut ExternError,
|
||||
|
@ -966,10 +1031,12 @@ pub unsafe extern "C" fn entity_builder_add_double<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_timestamp<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_timestamp(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -989,10 +1056,12 @@ pub unsafe extern "C" fn entity_builder_add_timestamp<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_uuid<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_add_uuid(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
error: *mut ExternError,
|
||||
|
@ -1014,10 +1083,12 @@ pub unsafe extern "C" fn entity_builder_add_uuid<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_string<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_string(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -1037,10 +1108,12 @@ pub unsafe extern "C" fn entity_builder_retract_string<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_long<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_long(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1060,10 +1133,12 @@ pub unsafe extern "C" fn entity_builder_retract_long<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_ref<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_ref(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1083,10 +1158,12 @@ pub unsafe extern "C" fn entity_builder_retract_ref<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_keyword<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_keyword(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -1106,10 +1183,12 @@ pub unsafe extern "C" fn entity_builder_retract_keyword<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_boolean<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_boolean(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
error: *mut ExternError,
|
||||
|
@ -1129,10 +1208,12 @@ pub unsafe extern "C" fn entity_builder_retract_boolean<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_double<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_double(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
error: *mut ExternError,
|
||||
|
@ -1152,10 +1233,12 @@ pub unsafe extern "C" fn entity_builder_retract_double<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_timestamp<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_timestamp(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1175,11 +1258,13 @@ pub unsafe extern "C" fn entity_builder_retract_timestamp<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO don't panic if the UUID is not valid - return result instead.
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO: don't panic if the UUID is not valid - return result instead.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_uuid<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_uuid(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
error: *mut ExternError,
|
||||
|
@ -1221,10 +1306,12 @@ pub unsafe extern "C" fn entity_builder_transact<'a, 'c>(
|
|||
///
|
||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_commit<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
pub unsafe extern "C" fn entity_builder_commit(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
error: *mut ExternError,
|
||||
) -> *mut TxReport {
|
||||
assert_not_null!(builder);
|
||||
|
@ -1234,6 +1321,8 @@ pub unsafe extern "C" fn entity_builder_commit<'a, 'c>(
|
|||
|
||||
/// Performs a single transaction against the store.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn store_transact(
|
||||
|
@ -1253,6 +1342,7 @@ pub unsafe extern "C" fn store_transact(
|
|||
}
|
||||
|
||||
/// Fetches the `tx_id` for the given [TxReport](mentat::TxReport)`.
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_longlong {
|
||||
assert_not_null!(tx_report);
|
||||
|
@ -1261,6 +1351,7 @@ pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_long
|
|||
}
|
||||
|
||||
/// Fetches the `tx_instant` for the given [TxReport](mentat::TxReport).
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn tx_report_get_tx_instant(tx_report: *mut TxReport) -> c_longlong {
|
||||
assert_not_null!(tx_report);
|
||||
|
@ -1283,7 +1374,7 @@ pub unsafe extern "C" fn tx_report_entity_for_temp_id(
|
|||
let tx_report = &*tx_report;
|
||||
let key = c_char_to_string(tempid);
|
||||
if let Some(entid) = tx_report.tempids.get(key) {
|
||||
Box::into_raw(Box::new(entid.clone() as c_longlong))
|
||||
Box::into_raw(Box::new(*entid as c_longlong))
|
||||
} else {
|
||||
std::ptr::null_mut()
|
||||
}
|
||||
|
@ -1408,7 +1499,7 @@ pub unsafe extern "C" fn query_builder_bind_ref_kw(
|
|||
let kw = kw_from_string(c_char_to_string(value));
|
||||
let query_builder = &mut *query_builder;
|
||||
if let Some(err) = query_builder.bind_ref_from_kw(&var, kw).err() {
|
||||
panic!(err);
|
||||
std::panic::panic_any(err);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2068,7 +2159,7 @@ pub unsafe extern "C" fn store_register_observer(
|
|||
.map(|(tx_id, changes)| {
|
||||
(
|
||||
*tx_id,
|
||||
changes.into_iter().map(|eid| *eid as c_longlong).collect(),
|
||||
changes.iter().map(|eid| *eid as c_longlong).collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -14,9 +14,12 @@ pub mod strings {
|
|||
|
||||
use mentat::Keyword;
|
||||
|
||||
pub fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
||||
/// # Safety
|
||||
///
|
||||
/// This function TODO
|
||||
pub unsafe fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
||||
assert!(!cchar.is_null());
|
||||
let c_str = unsafe { CStr::from_ptr(cchar) };
|
||||
let c_str = CStr::from_ptr(cchar);
|
||||
c_str.to_str().unwrap_or("")
|
||||
}
|
||||
|
||||
|
@ -29,8 +32,8 @@ pub mod strings {
|
|||
|
||||
pub fn kw_from_string(keyword_string: &'static str) -> Keyword {
|
||||
// TODO: validate. The input might not be a keyword!
|
||||
let attr_name = keyword_string.trim_start_matches(":");
|
||||
let parts: Vec<&str> = attr_name.split("/").collect();
|
||||
let attr_name = keyword_string.trim_start_matches(':');
|
||||
let parts: Vec<&str> = attr_name.split('/').collect();
|
||||
Keyword::namespaced(parts[0], parts[1])
|
||||
}
|
||||
}
|
||||
|
@ -107,6 +110,8 @@ pub mod error {
|
|||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||
/// message (which was allocated on the heap and should eventually be freed) into
|
||||
/// `error.message`
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_result<T, E>(result: Result<T, E>, error: *mut ExternError) -> *mut T
|
||||
where
|
||||
E: Display,
|
||||
|
@ -133,6 +138,8 @@ pub mod error {
|
|||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||
/// message (which was allocated on the heap and should eventually be freed) into
|
||||
/// `error.message`
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_opt_result<T, E>(
|
||||
result: Result<Option<T>, E>,
|
||||
error: *mut ExternError,
|
||||
|
@ -155,6 +162,8 @@ pub mod error {
|
|||
|
||||
/// Identical to `translate_result`, but with additional type checking for the case that we have
|
||||
/// a `Result<(), E>` (which we're about to drop on the floor).
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_void_result<E>(result: Result<(), E>, error: *mut ExternError)
|
||||
where
|
||||
E: Display,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "public_traits"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -13,15 +13,23 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
syncable = ["tolstoy_traits", "hyper", "serde_json"]
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
http = "0.2"
|
||||
tokio-core = "0.1"
|
||||
uuid = "0.8"
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
http = "~0.2"
|
||||
tokio = { version = "1.8.0", features = ["full"] }
|
||||
uuid = "~1.0"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.hyper]
|
||||
version = "~0.14"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "~1.0"
|
||||
optional = true
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -47,11 +55,3 @@ path = "../sql-traits"
|
|||
[dependencies.tolstoy_traits]
|
||||
path = "../tolstoy-traits"
|
||||
optional = true
|
||||
|
||||
[dependencies.hyper]
|
||||
version = "0.13"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0"
|
||||
optional = true
|
||||
|
|
|
@ -16,12 +16,6 @@ use std::collections::BTreeSet;
|
|||
use std::error::Error;
|
||||
|
||||
use rusqlite;
|
||||
use failure::{
|
||||
Backtrace,
|
||||
Context,
|
||||
Fail,
|
||||
};
|
||||
+use std::fmt;
|
||||
use uuid;
|
||||
|
||||
use edn;
|
||||
|
@ -45,51 +39,8 @@ use serde_json;
|
|||
|
||||
pub type Result<T> = std::result::Result<T, MentatError>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MentatError(Box<Context<MentatErrorKind>>);
|
||||
|
||||
impl Fail for MentatError {
|
||||
#[inline]
|
||||
fn cause(&self) -> Option<&Fail> {
|
||||
self.0.cause()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn backtrace(&self) -> Option<&Backtrace> {
|
||||
self.0.backtrace()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for MentatError {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl MentatError {
|
||||
#[inline]
|
||||
pub fn kind(&self) -> &MentatErrorKind {
|
||||
&*self.0.get_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MentatErrorKind> for MentatError {
|
||||
#[inline]
|
||||
fn from(kind: MentatErrorKind) -> MentatError {
|
||||
MentatError(Box::new(Context::new(kind)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Context<MentatErrorKind>> for MentatError {
|
||||
#[inline]
|
||||
fn from(inner: Context<MentatErrorKind>) -> MentatError {
|
||||
MentatError(Box::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum MentatErrorKind {
|
||||
pub enum MentatError {
|
||||
#[fail(display = "bad uuid {}", _0)]
|
||||
BadUuid(String),
|
||||
|
||||
|
@ -189,67 +140,9 @@ pub enum MentatErrorKind {
|
|||
SerializationError(#[cause] serde_json::Error),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for MentatErrorKind {
|
||||
fn from(error: std::io::Error) -> MentatErrorKind {
|
||||
MentatErrorKind::IoError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rusqlite::Error> for MentatErrorKind {
|
||||
fn from(error: rusqlite::Error) -> MentatErrorKind {
|
||||
MentatErrorKind::RusqliteError(error.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<edn::ParseError> for MentatErrorKind {
|
||||
fn from(error: edn::ParseError) -> MentatErrorKind {
|
||||
MentatErrorKind::EdnParseError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_db::DbError> for MentatErrorKind {
|
||||
fn from(error: mentat_db::DbError) -> MentatErrorKind {
|
||||
MentatErrorKind::DbError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_query_algebrizer::AlgebrizerError> for MentatErrorKind {
|
||||
fn from(error: mentat_query_algebrizer::AlgebrizerError) -> MentatErrorKind {
|
||||
MentatErrorKind::AlgebrizerError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_query_projector::ProjectorError> for MentatErrorKind {
|
||||
fn from(error: mentat_query_projector::ProjectorError) -> MentatErrorKind {
|
||||
MentatErrorKind::ProjectorError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_query_pull::PullError> for MentatErrorKind {
|
||||
fn from(error: mentat_query_pull::PullError) -> MentatErrorKind {
|
||||
MentatErrorKind::PullError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_sql::SQLError> for MentatErrorKind {
|
||||
fn from(error: mentat_sql::SQLError) -> MentatErrorKind {
|
||||
MentatErrorKind::SQLError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "syncable")]
|
||||
impl From<mentat_tolstoy::TolstoyError> for MentatErrorKind {
|
||||
fn from(error: mentat_tolstoy::TolstoyError) -> MentatErrorKind {
|
||||
MentatErrorKind::TolstoyError(error)
|
||||
}
|
||||
}
|
||||
|
||||
// XXX reduce dupe if this isn't completely throwaway
|
||||
|
||||
|
||||
impl From<std::io::Error> for MentatError {
|
||||
fn from(error: std::io::Error) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::IoError(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,76 +152,76 @@ impl From<rusqlite::Error> for MentatError {
|
|||
Some(e) => e.to_string(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
MentatError::from(error).into()
|
||||
MentatError::RusqliteError(error.to_string(), cause)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<uuid::Error> for MentatError {
|
||||
fn from(error: uuid::Error) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::UuidError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<edn::ParseError> for MentatError {
|
||||
fn from(error: edn::ParseError) -> Self {
|
||||
MentatError:from(error).into()
|
||||
MentatError::EdnParseError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DbError> for MentatError {
|
||||
fn from(error: DbError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::DbError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AlgebrizerError> for MentatError {
|
||||
fn from(error: AlgebrizerError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::AlgebrizerError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ProjectorError> for MentatError {
|
||||
fn from(error: ProjectorError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::ProjectorError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PullError> for MentatError {
|
||||
fn from(error: PullError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::PullError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SQLError> for MentatError {
|
||||
fn from(error: SQLError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::SQLError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "syncable")]
|
||||
impl From<TolstoyError> for MentatError {
|
||||
fn from(error: TolstoyError) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::TolstoyError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "syncable")]
|
||||
impl From<serde_json::Error> for MentatError {
|
||||
fn from(error: serde_json::Error) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::SerializationError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "syncable")]
|
||||
impl From<hyper::Error> for MentatError {
|
||||
fn from(error: hyper::Error) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::NetworkError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "syncable")]
|
||||
impl From<http::uri::InvalidUri> for MentatError {
|
||||
fn from(error: http::uri::InvalidUri) -> Self {
|
||||
MentatError::from(error).into()
|
||||
MentatError::UriError(error)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "query_algebrizer_traits"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -8,8 +8,8 @@ name = "query_algebrizer_traits"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -12,60 +12,10 @@ use std; // To refer to std::result::Result.
|
|||
|
||||
use core_traits::{ValueType, ValueTypeSet};
|
||||
|
||||
use std::fmt;
|
||||
use failure::{
|
||||
Backtrace,
|
||||
Context,
|
||||
Fail,
|
||||
};
|
||||
|
||||
use edn::{query::PlainSymbol, ParseErrorKind};
|
||||
use edn::{query::PlainSymbol, ParseError};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AlgebrizerError>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AlgebrizerError(Box<Context<AlgebrizerErrorKind>>);
|
||||
|
||||
impl Fail for AlgebrizerError {
|
||||
#[inline]
|
||||
fn cause(&self) -> Option<&dyn Fail> {
|
||||
self.0.cause()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn backtrace(&self) -> Option<&Backtrace> {
|
||||
self.0.backtrace()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AlgebrizerError {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl AlgebrizerError {
|
||||
#[inline]
|
||||
pub fn kind(&self) -> &AlgebrizerErrorKind {
|
||||
&*self.0.get_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AlgebrizerErrorKind> for AlgebrizerError {
|
||||
#[inline]
|
||||
fn from(kind: AlgebrizerErrorKind) -> AlgebrizerError {
|
||||
AlgebrizerError(Box::new(Context::new(kind)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Context<AlgebrizerErrorKind>> for AlgebrizerError {
|
||||
#[inline]
|
||||
fn from(inner: Context<AlgebrizerErrorKind>) -> AlgebrizerError {
|
||||
AlgebrizerError(Box::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum BindingError {
|
||||
NoBoundVariable,
|
||||
|
@ -90,7 +40,7 @@ pub enum BindingError {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, Fail, PartialEq)]
|
||||
pub enum AlgebrizerErrorKind {
|
||||
pub enum AlgebrizerError {
|
||||
#[fail(display = "{} var {} is duplicated", _0, _1)]
|
||||
DuplicateVariableError(PlainSymbol, &'static str),
|
||||
|
||||
|
@ -157,11 +107,11 @@ pub enum AlgebrizerErrorKind {
|
|||
InvalidBinding(PlainSymbol, BindingError),
|
||||
|
||||
#[fail(display = "{}", _0)]
|
||||
EdnParseError(#[cause] ParseErrorKind),
|
||||
EdnParseError(#[cause] ParseError),
|
||||
}
|
||||
|
||||
impl From<ParseErrorKind> for AlgebrizerError {
|
||||
fn from(error: ParseErrorKind) -> AlgebrizerError {
|
||||
AlgebrizerError::from(error).into()
|
||||
impl From<ParseError> for AlgebrizerError {
|
||||
fn from(error: ParseError) -> AlgebrizerError {
|
||||
AlgebrizerError::EdnParseError(error)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
|
||||
extern crate core_traits;
|
||||
extern crate edn;
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "mentat_query_algebrizer"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1.1"
|
||||
failure = "~0.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -19,4 +19,4 @@ path = "../core-traits"
|
|||
path = "../query-algebrizer-traits"
|
||||
|
||||
[dev-dependencies]
|
||||
itertools = "0.8"
|
||||
itertools = "~0.10"
|
||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, SQLValueType, Schema};
|
|||
|
||||
use edn::query::{FnArg, NonIntegerConstant, Variable};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use types::EmptyBecause;
|
||||
use crate::types::EmptyBecause;
|
||||
|
||||
macro_rules! coerce_to_typed_value {
|
||||
($var: ident, $val: ident, $types: expr, $type: path, $constructor: path) => {{
|
||||
|
@ -62,11 +62,11 @@ impl ValueTypes for FnArg {
|
|||
|
||||
&FnArg::Constant(NonIntegerConstant::BigInteger(_)) => {
|
||||
// Not yet implemented.
|
||||
bail!(AlgebrizerErrorKind::UnsupportedArgument)
|
||||
bail!(AlgebrizerError::UnsupportedArgument)
|
||||
}
|
||||
|
||||
// These don't make sense here. TODO: split FnArg into scalar and non-scalar…
|
||||
&FnArg::Vector(_) | &FnArg::SrcVar(_) => bail!(AlgebrizerErrorKind::UnsupportedArgument),
|
||||
&FnArg::Vector(_) | &FnArg::SrcVar(_) => bail!(AlgebrizerError::UnsupportedArgument),
|
||||
|
||||
// These are all straightforward.
|
||||
&FnArg::Constant(NonIntegerConstant::Boolean(_)) => {
|
||||
|
@ -116,7 +116,7 @@ impl ConjoiningClauses {
|
|||
|
||||
let constrained_types;
|
||||
if let Some(required) = self.required_types.get(var) {
|
||||
constrained_types = known_types.intersection(required);
|
||||
constrained_types = known_types.intersection(*required);
|
||||
} else {
|
||||
constrained_types = known_types;
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ impl ConjoiningClauses {
|
|||
FnArg::Variable(in_var) => {
|
||||
// TODO: technically you could ground an existing variable inside the query….
|
||||
if !self.input_variables.contains(&in_var) {
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
||||
}
|
||||
match self.bound_value(&in_var) {
|
||||
// The type is already known if it's a bound variable….
|
||||
|
@ -200,7 +200,7 @@ impl ConjoiningClauses {
|
|||
// The variable is present in `:in`, but it hasn't yet been provided.
|
||||
// This is a restriction we will eventually relax: we don't yet have a way
|
||||
// to collect variables as part of a computed table or substitution.
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -209,7 +209,7 @@ impl ConjoiningClauses {
|
|||
FnArg::Constant(NonIntegerConstant::BigInteger(_)) => unimplemented!(),
|
||||
|
||||
// These don't make sense here.
|
||||
FnArg::Vector(_) | FnArg::SrcVar(_) => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||
FnArg::Vector(_) | FnArg::SrcVar(_) => bail!(AlgebrizerError::InvalidGroundConstant),
|
||||
|
||||
// These are all straightforward.
|
||||
FnArg::Constant(NonIntegerConstant::Boolean(x)) => {
|
||||
|
|
|
@ -16,22 +16,22 @@ use mentat_core::util::Either;
|
|||
|
||||
use edn::query::{Binding, FnArg, NonIntegerConstant, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
Column, ColumnConstraint, DatomsColumn, DatomsTable, EmptyBecause, FulltextColumn,
|
||||
QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
#[allow(unused_variables)]
|
||||
pub(crate) fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 3 {
|
||||
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
||||
where_fn.operator.clone(),
|
||||
where_fn.args.len(),
|
||||
3
|
||||
|
@ -40,7 +40,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if where_fn.binding.is_empty() {
|
||||
// The binding must introduce at least one bound variable.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::NoBoundVariable
|
||||
));
|
||||
|
@ -48,7 +48,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if !where_fn.binding.is_valid() {
|
||||
// The binding must not duplicate bound variables.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::RepeatedBoundVariable
|
||||
));
|
||||
|
@ -59,7 +59,7 @@ impl ConjoiningClauses {
|
|||
Binding::BindRel(bindings) => {
|
||||
let bindings_count = bindings.len();
|
||||
if bindings_count < 1 || bindings_count > 4 {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::InvalidNumberOfBindings {
|
||||
number: bindings.len(),
|
||||
|
@ -70,7 +70,7 @@ impl ConjoiningClauses {
|
|||
bindings
|
||||
}
|
||||
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::ExpectedBindRel
|
||||
))
|
||||
|
@ -90,10 +90,10 @@ impl ConjoiningClauses {
|
|||
|
||||
let mut args = where_fn.args.into_iter();
|
||||
|
||||
// TODO: process source variables.
|
||||
// TODO(gburd): process source variables.
|
||||
match args.next().unwrap() {
|
||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"source variable",
|
||||
0
|
||||
|
@ -116,12 +116,12 @@ impl ConjoiningClauses {
|
|||
// TODO: allow non-constant attributes.
|
||||
match self.bound_value(&v) {
|
||||
Some(TypedValue::Ref(entid)) => Some(entid),
|
||||
Some(tv) => bail!(AlgebrizerErrorKind::InputTypeDisagreement(
|
||||
v.name().clone(),
|
||||
Some(tv) => bail!(AlgebrizerError::InputTypeDisagreement(
|
||||
v.name(),
|
||||
ValueType::Ref,
|
||||
tv.value_type()
|
||||
)),
|
||||
None => bail!(AlgebrizerErrorKind::UnboundVariable((*v.0).clone())),
|
||||
None => bail!(AlgebrizerError::UnboundVariable((*v.0).clone())),
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
|
@ -130,20 +130,13 @@ impl ConjoiningClauses {
|
|||
// An unknown ident, or an entity that isn't present in the store, or isn't a fulltext
|
||||
// attribute, is likely enough to be a coding error that we choose to bail instead of
|
||||
// marking the pattern as known-empty.
|
||||
let a = a.ok_or(AlgebrizerErrorKind::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"attribute",
|
||||
1,
|
||||
))?;
|
||||
let attribute =
|
||||
schema
|
||||
let op = where_fn.operator.clone(); //TODO(gburd): remove me...
|
||||
let a = a.ok_or_else(move || AlgebrizerError::InvalidArgument(op, "attribute", 1))?;
|
||||
let op = where_fn.operator.clone(); //TODO(gburd): remove me...
|
||||
let attribute = schema
|
||||
.attribute_for_entid(a)
|
||||
.cloned()
|
||||
.ok_or(AlgebrizerErrorKind::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"attribute",
|
||||
1,
|
||||
))?;
|
||||
.ok_or_else(move || AlgebrizerError::InvalidArgument(op, "attribute", 1))?;
|
||||
|
||||
if !attribute.fulltext {
|
||||
// We can never get results from a non-fulltext attribute!
|
||||
|
@ -190,7 +183,7 @@ impl ConjoiningClauses {
|
|||
FnArg::Variable(in_var) => {
|
||||
match self.bound_value(&in_var) {
|
||||
Some(t @ TypedValue::String(_)) => Either::Left(t),
|
||||
Some(_) => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
Some(_) => bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"string",
|
||||
2
|
||||
|
@ -199,7 +192,7 @@ impl ConjoiningClauses {
|
|||
// Regardless of whether we'll be providing a string later, or the value
|
||||
// comes from a column, it must be a string.
|
||||
if self.known_type(&in_var) != Some(ValueType::String) {
|
||||
bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"string",
|
||||
2
|
||||
|
@ -209,7 +202,7 @@ impl ConjoiningClauses {
|
|||
if self.input_variables.contains(&in_var) {
|
||||
// Sorry, we haven't implemented late binding.
|
||||
// TODO: implement this.
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
||||
} else {
|
||||
// It must be bound earlier in the query. We already established that
|
||||
// it must be a string column.
|
||||
|
@ -220,13 +213,13 @@ impl ConjoiningClauses {
|
|||
{
|
||||
Either::Right(binding)
|
||||
} else {
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable((*in_var.0).clone()))
|
||||
bail!(AlgebrizerError::UnboundVariable((*in_var.0).clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"string",
|
||||
2
|
||||
|
@ -271,7 +264,7 @@ impl ConjoiningClauses {
|
|||
|
||||
self.bind_column_to_var(
|
||||
schema,
|
||||
fulltext_values_alias.clone(),
|
||||
fulltext_values_alias,
|
||||
Column::Fulltext(FulltextColumn::Text),
|
||||
var.clone(),
|
||||
);
|
||||
|
@ -284,12 +277,7 @@ impl ConjoiningClauses {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
self.bind_column_to_var(
|
||||
schema,
|
||||
datoms_table_alias.clone(),
|
||||
DatomsColumn::Tx,
|
||||
var.clone(),
|
||||
);
|
||||
self.bind_column_to_var(schema, datoms_table_alias, DatomsColumn::Tx, var.clone());
|
||||
}
|
||||
|
||||
if let VariableOrPlaceholder::Variable(ref var) = b_score {
|
||||
|
@ -298,7 +286,7 @@ impl ConjoiningClauses {
|
|||
|
||||
// We do not allow the score to be bound.
|
||||
if self.value_bindings.contains_key(var) || self.input_variables.contains(var) {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
var.name(),
|
||||
BindingError::UnexpectedBinding
|
||||
));
|
||||
|
@ -323,7 +311,7 @@ mod testing {
|
|||
|
||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
|
||||
#[test]
|
||||
fn test_apply_fulltext() {
|
||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::{Binding, FnArg, Variable, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use clauses::{ConjoiningClauses, PushComputed};
|
||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
||||
|
||||
use clauses::convert::ValueConversion;
|
||||
use crate::clauses::convert::ValueConversion;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
||||
use crate::types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
/// Take a relation: a matrix of values which will successively bind to named variables of
|
||||
|
@ -47,7 +47,7 @@ impl ConjoiningClauses {
|
|||
|
||||
let named_values = ComputedTable::NamedValues {
|
||||
names: names.clone(),
|
||||
values: values,
|
||||
values,
|
||||
};
|
||||
|
||||
let table = self.computed_tables.push_computed(named_values);
|
||||
|
@ -103,13 +103,13 @@ impl ConjoiningClauses {
|
|||
if existing != value {
|
||||
self.mark_known_empty(EmptyBecause::ConflictingBindings {
|
||||
var: var.clone(),
|
||||
existing: existing.clone(),
|
||||
existing,
|
||||
desired: value,
|
||||
});
|
||||
return Ok(());
|
||||
}
|
||||
} else {
|
||||
self.bind_value(&var, value.clone());
|
||||
self.bind_value(&var, value);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -117,7 +117,7 @@ impl ConjoiningClauses {
|
|||
|
||||
pub(crate) fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 1 {
|
||||
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
||||
where_fn.operator.clone(),
|
||||
where_fn.args.len(),
|
||||
1
|
||||
|
@ -128,16 +128,16 @@ impl ConjoiningClauses {
|
|||
|
||||
if where_fn.binding.is_empty() {
|
||||
// The binding must introduce at least one bound variable.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator,
|
||||
BindingError::NoBoundVariable
|
||||
));
|
||||
}
|
||||
|
||||
if !where_fn.binding.is_valid() {
|
||||
// The binding must not duplicate bound variables.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator,
|
||||
BindingError::RepeatedBoundVariable
|
||||
));
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ impl ConjoiningClauses {
|
|||
// Just the same, but we bind more than one column at a time.
|
||||
if children.len() != places.len() {
|
||||
// Number of arguments don't match the number of values. TODO: better error message.
|
||||
bail!(AlgebrizerErrorKind::GroundBindingsMismatch)
|
||||
bail!(AlgebrizerError::GroundBindingsMismatch)
|
||||
}
|
||||
for (place, arg) in places.into_iter().zip(children.into_iter()) {
|
||||
self.apply_ground_place(schema, place, arg)? // TODO: short-circuit on impossible.
|
||||
|
@ -168,7 +168,7 @@ impl ConjoiningClauses {
|
|||
// are all in a single structure. That makes it substantially simpler!
|
||||
(Binding::BindColl(var), FnArg::Vector(children)) => {
|
||||
if children.is_empty() {
|
||||
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
||||
}
|
||||
|
||||
// Turn a collection of arguments into a Vec of `TypedValue`s of the same type.
|
||||
|
@ -180,7 +180,7 @@ impl ConjoiningClauses {
|
|||
.into_iter()
|
||||
.filter_map(|arg| -> Option<Result<TypedValue>> {
|
||||
// We need to get conversion errors out.
|
||||
// We also want to mark known-empty on impossibilty, but
|
||||
// We also want to mark known-empty on impossibility, but
|
||||
// still detect serious errors.
|
||||
match self.typed_value_from_arg(schema, &var, arg, known_types) {
|
||||
Ok(ValueConversion::Val(tv)) => {
|
||||
|
@ -188,7 +188,7 @@ impl ConjoiningClauses {
|
|||
&& !accumulated_types.is_unit()
|
||||
{
|
||||
// Values not all of the same type.
|
||||
Some(Err(AlgebrizerErrorKind::InvalidGroundConstant.into()))
|
||||
Some(Err(AlgebrizerError::InvalidGroundConstant))
|
||||
} else {
|
||||
Some(Ok(tv))
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ impl ConjoiningClauses {
|
|||
skip = Some(because);
|
||||
None
|
||||
}
|
||||
Err(e) => Some(Err(e.into())),
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<TypedValue>>>()?;
|
||||
|
@ -211,7 +211,7 @@ impl ConjoiningClauses {
|
|||
|
||||
// Otherwise, we now have the values and the type.
|
||||
let types = vec![accumulated_types.exemplar().unwrap()];
|
||||
let names = vec![var.clone()];
|
||||
let names = vec![var];
|
||||
|
||||
self.collect_named_bindings(schema, names, types, values);
|
||||
Ok(())
|
||||
|
@ -219,7 +219,7 @@ impl ConjoiningClauses {
|
|||
|
||||
(Binding::BindRel(places), FnArg::Vector(rows)) => {
|
||||
if rows.is_empty() {
|
||||
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
||||
}
|
||||
|
||||
// Grab the known types to which these args must conform, and track
|
||||
|
@ -227,8 +227,8 @@ impl ConjoiningClauses {
|
|||
let template: Vec<Option<(Variable, ValueTypeSet)>> = places
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
&VariableOrPlaceholder::Placeholder => None,
|
||||
&VariableOrPlaceholder::Variable(ref v) => {
|
||||
VariableOrPlaceholder::Placeholder => None,
|
||||
VariableOrPlaceholder::Variable(ref v) => {
|
||||
Some((v.clone(), self.known_type_set(v)))
|
||||
}
|
||||
})
|
||||
|
@ -243,7 +243,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if expected_width == 0 {
|
||||
// They can't all be placeholders.
|
||||
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
||||
}
|
||||
|
||||
// Accumulate values into `matrix` and types into `a_t_f_c`.
|
||||
|
@ -259,7 +259,7 @@ impl ConjoiningClauses {
|
|||
FnArg::Vector(cols) => {
|
||||
// Make sure that every row is the same length.
|
||||
if cols.len() != full_width {
|
||||
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
||||
}
|
||||
|
||||
// TODO: don't accumulate twice.
|
||||
|
@ -271,7 +271,7 @@ impl ConjoiningClauses {
|
|||
// Convert each item in the row.
|
||||
// If any value in the row is impossible, then skip the row.
|
||||
// If all rows are impossible, fail the entire CC.
|
||||
if let &Some(ref pair) = pair {
|
||||
if let Some(ref pair) = pair {
|
||||
match self.typed_value_from_arg(schema, &pair.0, col, pair.1)? {
|
||||
ValueConversion::Val(tv) => vals.push(tv),
|
||||
ValueConversion::Impossible(because) => {
|
||||
|
@ -297,12 +297,12 @@ impl ConjoiningClauses {
|
|||
let inserted = acc.insert(val.value_type());
|
||||
if inserted && !acc.is_unit() {
|
||||
// Heterogeneous types.
|
||||
bail!(AlgebrizerErrorKind::InvalidGroundConstant)
|
||||
bail!(AlgebrizerError::InvalidGroundConstant)
|
||||
}
|
||||
matrix.push(val);
|
||||
}
|
||||
}
|
||||
_ => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||
_ => bail!(AlgebrizerError::InvalidGroundConstant),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -329,7 +329,7 @@ impl ConjoiningClauses {
|
|||
self.collect_named_bindings(schema, names, types, matrix);
|
||||
Ok(())
|
||||
}
|
||||
(_, _) => bail!(AlgebrizerErrorKind::InvalidGroundConstant),
|
||||
(_, _) => bail!(AlgebrizerError::InvalidGroundConstant),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ mod testing {
|
|||
|
||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
|
||||
#[test]
|
||||
fn test_apply_ground() {
|
||||
|
|
|
@ -14,7 +14,7 @@ use core_traits::{TypedValue, ValueType};
|
|||
|
||||
use edn::query::Variable;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
/// Define the inputs to a query. This is in two parts: a set of values known now, and a set of
|
||||
/// types known now.
|
||||
|
@ -55,7 +55,7 @@ impl QueryInputs {
|
|||
.iter()
|
||||
.map(|(var, val)| (var.clone(), val.value_type()))
|
||||
.collect(),
|
||||
values: values,
|
||||
values,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,13 +69,10 @@ impl QueryInputs {
|
|||
let old = types.insert(var.clone(), t);
|
||||
if let Some(old) = old {
|
||||
if old != t {
|
||||
bail!(AlgebrizerErrorKind::InputTypeDisagreement(var.name(), old, t));
|
||||
bail!(AlgebrizerError::InputTypeDisagreement(var.name(), old, t));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(QueryInputs {
|
||||
types: types,
|
||||
values: values,
|
||||
})
|
||||
Ok(QueryInputs { types, values })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,9 +24,9 @@ use mentat_core::counter::RcCounter;
|
|||
|
||||
use edn::query::{Element, FindSpec, Keyword, PatternNonValuePlace, Pull, Variable, WhereClause};
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
Column, ColumnConstraint, ColumnIntersection, ComputedTable, DatomsColumn, DatomsTable,
|
||||
EmptyBecause, EvolvedNonValuePlace, EvolvedPattern, EvolvedValuePlace, FulltextColumn,
|
||||
PlaceOrEmpty, QualifiedAlias, QueryValue, SourceAlias, TableAlias,
|
||||
|
@ -45,11 +45,11 @@ mod ground;
|
|||
mod tx_log_api;
|
||||
mod where_fn;
|
||||
|
||||
use validate::{validate_not_join, validate_or_join};
|
||||
use crate::validate::{validate_not_join, validate_or_join};
|
||||
|
||||
pub use self::inputs::QueryInputs;
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
trait Contains<K, T> {
|
||||
fn when_contains<F: FnOnce() -> T>(&self, k: &K, f: F) -> Option<T>;
|
||||
|
@ -147,8 +147,8 @@ pub struct ConjoiningClauses {
|
|||
/// A map from var to qualified columns. Used to project.
|
||||
pub column_bindings: BTreeMap<Variable, Vec<QualifiedAlias>>,
|
||||
|
||||
/// A list of variables mentioned in the enclosing query's :in clause. These must all be bound
|
||||
/// before the query can be executed. TODO: clarify what this means for nested CCs.
|
||||
/// A list of variables mentioned in the enclosing query's `:in` clause all of which must be
|
||||
/// bound before the query can be executed. TODO: clarify what this means for nested CCs.
|
||||
pub input_variables: BTreeSet<Variable>,
|
||||
|
||||
/// In some situations -- e.g., when a query is being run only once -- we know in advance the
|
||||
|
@ -279,7 +279,7 @@ impl ConjoiningClauses {
|
|||
values.keep_intersected_keys(&in_variables);
|
||||
|
||||
let mut cc = ConjoiningClauses {
|
||||
alias_counter: alias_counter,
|
||||
alias_counter,
|
||||
input_variables: in_variables,
|
||||
value_bindings: values,
|
||||
..Default::default()
|
||||
|
@ -301,15 +301,9 @@ impl ConjoiningClauses {
|
|||
impl ConjoiningClauses {
|
||||
pub(crate) fn derive_types_from_find_spec(&mut self, find_spec: &FindSpec) {
|
||||
for spec in find_spec.columns() {
|
||||
match spec {
|
||||
&Element::Pull(Pull {
|
||||
ref var,
|
||||
patterns: _,
|
||||
}) => {
|
||||
if let Element::Pull(Pull { ref var, .. }) = spec {
|
||||
self.constrain_var_to_type(var.clone(), ValueType::Ref);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -410,7 +404,7 @@ impl ConjoiningClauses {
|
|||
self.known_types
|
||||
.get(var)
|
||||
.cloned()
|
||||
.unwrap_or(ValueTypeSet::any())
|
||||
.unwrap_or_else(ValueTypeSet::any)
|
||||
}
|
||||
|
||||
pub(crate) fn bind_column_to_var<C: Into<Column>>(
|
||||
|
@ -514,7 +508,7 @@ impl ConjoiningClauses {
|
|||
|
||||
self.column_bindings
|
||||
.entry(var)
|
||||
.or_insert(vec![])
|
||||
.or_insert_with(Vec::new)
|
||||
.push(alias);
|
||||
}
|
||||
|
||||
|
@ -585,10 +579,10 @@ impl ConjoiningClauses {
|
|||
these_types: ValueTypeSet,
|
||||
) -> Option<EmptyBecause> {
|
||||
if let Some(existing) = self.known_types.get(var) {
|
||||
if existing.intersection(&these_types).is_empty() {
|
||||
if existing.intersection(these_types).is_empty() {
|
||||
return Some(EmptyBecause::TypeMismatch {
|
||||
var: var.clone(),
|
||||
existing: existing.clone(),
|
||||
existing: *existing,
|
||||
desired: these_types,
|
||||
});
|
||||
}
|
||||
|
@ -640,7 +634,7 @@ impl ConjoiningClauses {
|
|||
// We have an existing requirement. The new requirement will be
|
||||
// the intersection, but we'll `mark_known_empty` if that's empty.
|
||||
let existing = *entry.get();
|
||||
let intersection = types.intersection(&existing);
|
||||
let intersection = types.intersection(existing);
|
||||
entry.insert(intersection);
|
||||
|
||||
if !intersection.is_empty() {
|
||||
|
@ -648,8 +642,8 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: var,
|
||||
existing: existing,
|
||||
var,
|
||||
existing,
|
||||
desired: types,
|
||||
}
|
||||
}
|
||||
|
@ -684,7 +678,7 @@ impl ConjoiningClauses {
|
|||
panic!("Uh oh: we failed this pattern, probably because {:?} couldn't match, but now we're broadening its type.",
|
||||
e.key());
|
||||
}
|
||||
new = existing_types.union(&new_types);
|
||||
new = existing_types.union(new_types);
|
||||
}
|
||||
e.insert(new);
|
||||
}
|
||||
|
@ -710,11 +704,11 @@ impl ConjoiningClauses {
|
|||
e.insert(types);
|
||||
}
|
||||
Entry::Occupied(mut e) => {
|
||||
let intersected: ValueTypeSet = types.intersection(e.get());
|
||||
let intersected: ValueTypeSet = types.intersection(*e.get());
|
||||
if intersected.is_empty() {
|
||||
let reason = EmptyBecause::TypeMismatch {
|
||||
var: e.key().clone(),
|
||||
existing: e.get().clone(),
|
||||
existing: *e.get(),
|
||||
desired: types,
|
||||
};
|
||||
empty_because = Some(reason);
|
||||
|
@ -751,7 +745,7 @@ impl ConjoiningClauses {
|
|||
// If it's a variable, record that it has the right type.
|
||||
// Ident or attribute resolution errors (the only other check we need to do) will be done
|
||||
// by the caller.
|
||||
if let &EvolvedNonValuePlace::Variable(ref v) = value {
|
||||
if let EvolvedNonValuePlace::Variable(ref v) = value {
|
||||
self.constrain_var_to_type(v.clone(), ValueType::Ref)
|
||||
}
|
||||
}
|
||||
|
@ -784,12 +778,12 @@ impl ConjoiningClauses {
|
|||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
if attribute.fulltext {
|
||||
match value {
|
||||
&EvolvedValuePlace::Placeholder => Ok(DatomsTable::Datoms), // We don't need the value.
|
||||
EvolvedValuePlace::Placeholder => Ok(DatomsTable::Datoms), // We don't need the value.
|
||||
|
||||
// TODO: an existing non-string binding can cause this pattern to fail.
|
||||
&EvolvedValuePlace::Variable(_) => Ok(DatomsTable::FulltextDatoms),
|
||||
EvolvedValuePlace::Variable(_) => Ok(DatomsTable::FulltextDatoms),
|
||||
|
||||
&EvolvedValuePlace::Value(TypedValue::String(_)) => Ok(DatomsTable::FulltextDatoms),
|
||||
EvolvedValuePlace::Value(TypedValue::String(_)) => Ok(DatomsTable::FulltextDatoms),
|
||||
|
||||
_ => {
|
||||
// We can't succeed if there's a non-string constant value for a fulltext
|
||||
|
@ -802,9 +796,9 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
|
||||
fn table_for_unknown_attribute<'s, 'a>(
|
||||
fn table_for_unknown_attribute(
|
||||
&self,
|
||||
value: &'a EvolvedValuePlace,
|
||||
value: &EvolvedValuePlace,
|
||||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
// If the value is known to be non-textual, we can simply use the regular datoms
|
||||
// table (TODO: and exclude on `index_fulltext`!).
|
||||
|
@ -817,7 +811,7 @@ impl ConjoiningClauses {
|
|||
Ok(match value {
|
||||
// TODO: see if the variable is projected, aggregated, or compared elsewhere in
|
||||
// the query. If it's not, we don't need to use all_datoms here.
|
||||
&EvolvedValuePlace::Variable(ref v) => {
|
||||
EvolvedValuePlace::Variable(ref v) => {
|
||||
// If `required_types` and `known_types` don't exclude strings,
|
||||
// we need to query `all_datoms`.
|
||||
if self
|
||||
|
@ -834,7 +828,7 @@ impl ConjoiningClauses {
|
|||
DatomsTable::Datoms
|
||||
}
|
||||
}
|
||||
&EvolvedValuePlace::Value(TypedValue::String(_)) => DatomsTable::AllDatoms,
|
||||
EvolvedValuePlace::Value(TypedValue::String(_)) => DatomsTable::AllDatoms,
|
||||
_ => DatomsTable::Datoms,
|
||||
})
|
||||
}
|
||||
|
@ -850,14 +844,14 @@ impl ConjoiningClauses {
|
|||
value: &'a EvolvedValuePlace,
|
||||
) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
match attribute {
|
||||
&EvolvedNonValuePlace::Entid(id) => schema
|
||||
.attribute_for_entid(id)
|
||||
.ok_or_else(|| EmptyBecause::InvalidAttributeEntid(id))
|
||||
EvolvedNonValuePlace::Entid(id) => schema
|
||||
.attribute_for_entid(*id)
|
||||
.ok_or_else(|| EmptyBecause::InvalidAttributeEntid(*id))
|
||||
.and_then(|attribute| self.table_for_attribute_and_value(attribute, value)),
|
||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||
// #278.
|
||||
&EvolvedNonValuePlace::Placeholder => self.table_for_unknown_attribute(value),
|
||||
&EvolvedNonValuePlace::Variable(ref v) => {
|
||||
EvolvedNonValuePlace::Placeholder => self.table_for_unknown_attribute(value),
|
||||
EvolvedNonValuePlace::Variable(ref v) => {
|
||||
// See if we have a binding for the variable.
|
||||
match self.bound_value(v) {
|
||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||
|
@ -883,7 +877,7 @@ impl ConjoiningClauses {
|
|||
// attribute place.
|
||||
Err(EmptyBecause::InvalidBinding(
|
||||
Column::Fixed(DatomsColumn::Attribute),
|
||||
v.clone(),
|
||||
v,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -922,8 +916,8 @@ impl ConjoiningClauses {
|
|||
) -> Option<&'s Attribute> {
|
||||
match value {
|
||||
// We know this one is known if the attribute lookup succeeds…
|
||||
&TypedValue::Ref(id) => schema.attribute_for_entid(id),
|
||||
&TypedValue::Keyword(ref kw) => schema.attribute_for_ident(kw).map(|(a, _id)| a),
|
||||
TypedValue::Ref(id) => schema.attribute_for_entid(*id),
|
||||
TypedValue::Keyword(ref kw) => schema.attribute_for_ident(kw).map(|(a, _id)| a),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -981,7 +975,7 @@ impl ConjoiningClauses {
|
|||
pub(crate) fn expand_column_bindings(&mut self) {
|
||||
for cols in self.column_bindings.values() {
|
||||
if cols.len() > 1 {
|
||||
let ref primary = cols[0];
|
||||
let primary = &cols[0];
|
||||
let secondaries = cols.iter().skip(1);
|
||||
for secondary in secondaries {
|
||||
// TODO: if both primary and secondary are .v, should we make sure
|
||||
|
@ -1029,18 +1023,18 @@ impl ConjoiningClauses {
|
|||
let mut empty_because: Option<EmptyBecause> = None;
|
||||
for (var, types) in self.required_types.clone().into_iter() {
|
||||
if let Some(already_known) = self.known_types.get(&var) {
|
||||
if already_known.is_disjoint(&types) {
|
||||
if already_known.is_disjoint(types) {
|
||||
// If we know the constraint can't be one of the types
|
||||
// the variable could take, then we know we're empty.
|
||||
empty_because = Some(EmptyBecause::TypeMismatch {
|
||||
var: var,
|
||||
var,
|
||||
existing: *already_known,
|
||||
desired: types,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
if already_known.is_subset(&types) {
|
||||
if already_known.is_subset(types) {
|
||||
// TODO: I'm not convinced that we can do nothing here.
|
||||
//
|
||||
// Consider `[:find ?x ?v :where [_ _ ?v] [(> ?v 10)] [?x :foo/long ?v]]`.
|
||||
|
@ -1071,7 +1065,7 @@ impl ConjoiningClauses {
|
|||
let qa = self
|
||||
.extracted_types
|
||||
.get(&var)
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()))?;
|
||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))?;
|
||||
self.wheres.add_intersection(ColumnConstraint::HasTypes {
|
||||
value: qa.0.clone(),
|
||||
value_types: types,
|
||||
|
@ -1129,7 +1123,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
fn mark_as_ref(&mut self, pos: &PatternNonValuePlace) {
|
||||
if let &PatternNonValuePlace::Variable(ref var) = pos {
|
||||
if let PatternNonValuePlace::Variable(ref var) = pos {
|
||||
self.constrain_var_to_type(var.clone(), ValueType::Ref)
|
||||
}
|
||||
}
|
||||
|
@ -1142,13 +1136,13 @@ impl ConjoiningClauses {
|
|||
// We apply (top level) type predicates first as an optimization.
|
||||
for clause in where_clauses.iter() {
|
||||
match clause {
|
||||
&WhereClause::TypeAnnotation(ref anno) => {
|
||||
WhereClause::TypeAnnotation(ref anno) => {
|
||||
self.apply_type_anno(anno)?;
|
||||
}
|
||||
|
||||
// Patterns are common, so let's grab as much type information from
|
||||
// them as we can.
|
||||
&WhereClause::Pattern(ref p) => {
|
||||
WhereClause::Pattern(ref p) => {
|
||||
self.mark_as_ref(&p.entity);
|
||||
self.mark_as_ref(&p.attribute);
|
||||
self.mark_as_ref(&p.tx);
|
||||
|
@ -1167,7 +1161,7 @@ impl ConjoiningClauses {
|
|||
let mut patterns: VecDeque<EvolvedPattern> = VecDeque::with_capacity(remaining);
|
||||
for clause in where_clauses {
|
||||
remaining -= 1;
|
||||
if let &WhereClause::TypeAnnotation(_) = &clause {
|
||||
if let WhereClause::TypeAnnotation(_) = &clause {
|
||||
continue;
|
||||
}
|
||||
match clause {
|
||||
|
@ -1233,7 +1227,7 @@ impl PushComputed for Vec<ComputedTable> {
|
|||
#[cfg(test)]
|
||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
schema.ident_map.insert(i, e);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -10,13 +10,13 @@
|
|||
|
||||
use edn::query::{ContainsVariables, NotJoin, UnifyVars};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use types::{ColumnConstraint, ComputedTable};
|
||||
use crate::types::{ColumnConstraint, ComputedTable};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
|
||||
|
@ -35,7 +35,7 @@ impl ConjoiningClauses {
|
|||
let col = self.column_bindings.get(&v).unwrap()[0].clone();
|
||||
template.column_bindings.insert(v.clone(), vec![col]);
|
||||
} else {
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable(v.name()));
|
||||
bail!(AlgebrizerError::UnboundVariable(v.name()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ impl ConjoiningClauses {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let subquery = ComputedTable::Subquery(template);
|
||||
let subquery = ComputedTable::Subquery(Box::new(template));
|
||||
|
||||
self.wheres
|
||||
.add_intersection(ColumnConstraint::NotExists(subquery));
|
||||
|
@ -87,16 +87,16 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||
|
||||
use clauses::{add_attribute, associate_ident, QueryInputs};
|
||||
use crate::clauses::{add_attribute, associate_ident, QueryInputs};
|
||||
|
||||
use query_algebrizer_traits::errors::AlgebrizerErrorKind;
|
||||
use query_algebrizer_traits::errors::AlgebrizerError;
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||
DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
|
||||
use {algebrize, algebrize_with_inputs, parse_find_string};
|
||||
use crate::{algebrize, algebrize_with_inputs, parse_find_string};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let known = Known::for_schema(schema);
|
||||
|
@ -216,26 +216,17 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), ambar)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2v.clone(),
|
||||
daphne,
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
QueryValue::Column(d2e),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -247,16 +238,10 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
john
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(subquery)
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
@ -317,17 +302,14 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vy.clone(), vec![d0v.clone(), d3v.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d3a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d3a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d3e.clone()),
|
||||
QueryValue::Column(d3e),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
QueryValue::Column(d3v.clone()),
|
||||
d0v,
|
||||
QueryValue::Column(d3v),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -336,26 +318,17 @@ mod testing {
|
|||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||
.insert(vy, ValueTypeSet::of_one(ValueType::String));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
let expected_wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a.clone(), knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
age.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
eleven,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
name.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v.clone(), john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, eleven)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, name)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(subquery),
|
||||
ComputedTable::Subquery(Box::new(subquery)),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
|
@ -423,29 +396,17 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
john.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2v.clone(),
|
||||
daphne.clone(),
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
QueryValue::Column(d2e),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -457,17 +418,14 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
age.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||
operator: Inequality::LessThan,
|
||||
left: QueryValue::Column(d0v.clone()),
|
||||
left: QueryValue::Column(d0v),
|
||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||
}),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(subquery)
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
@ -490,7 +448,7 @@ mod testing {
|
|||
let d0 = "datoms00".to_string();
|
||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
||||
|
||||
let d1 = "datoms01".to_string();
|
||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||
|
@ -534,51 +492,36 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
d1a,
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
ambar,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
||||
]),
|
||||
])),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2v.clone(),
|
||||
daphne,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
QueryValue::Column(d1e),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
d0e,
|
||||
QueryValue::Column(d2e),
|
||||
)),
|
||||
]);
|
||||
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
bill
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(subquery)
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
@ -611,7 +554,7 @@ mod testing {
|
|||
let d0 = "datoms00".to_string();
|
||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
||||
|
||||
let d1 = "datoms01".to_string();
|
||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||
|
@ -624,20 +567,17 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
d0e,
|
||||
QueryValue::Column(d1e),
|
||||
)),
|
||||
]);
|
||||
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||
|
@ -647,22 +587,16 @@ mod testing {
|
|||
subquery.input_variables = input_vars;
|
||||
subquery
|
||||
.value_bindings
|
||||
.insert(vy.clone(), TypedValue::typed_string("John"));
|
||||
.insert(vy, TypedValue::typed_string("John"));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
bill
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(subquery)
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
@ -714,7 +648,7 @@ mod testing {
|
|||
let parsed = parse_find_string(query).expect("parse failed");
|
||||
let err = algebrize(known, parsed).expect_err("algebrization should have failed");
|
||||
match err {
|
||||
AlgebrizerErrorKind::UnboundVariable(var) => {
|
||||
AlgebrizerError::UnboundVariable(var) => {
|
||||
assert_eq!(var, PlainSymbol("?x".to_string()));
|
||||
}
|
||||
x => panic!("expected Unbound Variable error, got {:?}", x),
|
||||
|
|
|
@ -18,17 +18,17 @@ use edn::query::{
|
|||
WhereClause,
|
||||
};
|
||||
|
||||
use clauses::{ConjoiningClauses, PushComputed};
|
||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
||||
|
||||
use query_algebrizer_traits::errors::Result;
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
ColumnAlternation, ColumnConstraintOrAlternation, ColumnIntersection, ComputedTable,
|
||||
DatomsTable, EmptyBecause, EvolvedPattern, PlaceOrEmpty, QualifiedAlias, SourceAlias,
|
||||
VariableColumn,
|
||||
};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
/// Return true if both left and right are the same variable or both are non-variable.
|
||||
fn _simply_matches_place(left: &PatternNonValuePlace, right: &PatternNonValuePlace) -> bool {
|
||||
|
@ -642,7 +642,7 @@ impl ConjoiningClauses {
|
|||
// For any variable which has an imprecise type anywhere in the UNION, add it to the
|
||||
// set that needs type extraction. All UNION arms must project the same columns.
|
||||
for var in projection.iter() {
|
||||
if acc.iter().any(|cc| !cc.known_type(var).is_some()) {
|
||||
if acc.iter().any(|cc| cc.known_type(var).is_none()) {
|
||||
type_needed.insert(var.clone());
|
||||
}
|
||||
}
|
||||
|
@ -672,7 +672,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
let union = ComputedTable::Union {
|
||||
projection: projection,
|
||||
projection,
|
||||
type_extraction: type_needed,
|
||||
arms: acc,
|
||||
};
|
||||
|
@ -727,10 +727,10 @@ fn union_types(
|
|||
for (var, new_types) in additional_types {
|
||||
match into.entry(var.clone()) {
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(new_types.clone());
|
||||
e.insert(*new_types);
|
||||
}
|
||||
Entry::Occupied(mut e) => {
|
||||
let new = e.get().union(&new_types);
|
||||
let new = e.get().union(*new_types);
|
||||
e.insert(new);
|
||||
}
|
||||
}
|
||||
|
@ -750,14 +750,14 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, Variable};
|
||||
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
ColumnConstraint, DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue,
|
||||
SourceAlias,
|
||||
};
|
||||
|
||||
use {algebrize, algebrize_with_counter, parse_find_string};
|
||||
use crate::{algebrize, algebrize_with_counter, parse_find_string};
|
||||
|
||||
fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
|
@ -920,12 +920,10 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows
|
||||
d0a, knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
daphne
|
||||
d0v, daphne
|
||||
))
|
||||
]),
|
||||
])
|
||||
|
@ -967,10 +965,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
name.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, name)),
|
||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
|
@ -994,12 +989,10 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
knows
|
||||
d1a, knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
daphne
|
||||
d1v, daphne
|
||||
))
|
||||
]),
|
||||
])),
|
||||
|
@ -1051,13 +1044,10 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
age.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||
operator: Inequality::LessThan,
|
||||
left: QueryValue::Column(d0v.clone()),
|
||||
left: QueryValue::Column(d0v),
|
||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||
}),
|
||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||
|
@ -1073,12 +1063,10 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
knows
|
||||
d1a, knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
daphne
|
||||
d1v, daphne
|
||||
))
|
||||
]),
|
||||
])),
|
||||
|
@ -1124,10 +1112,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
// The outer pattern joins against the `or` on the entity, but not value -- ?y means
|
||||
// different things in each place.
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
#![allow(clippy::single_match)]
|
||||
|
||||
use core_traits::{Entid, TypedValue, ValueType, ValueTypeSet};
|
||||
|
||||
use mentat_core::{Cloned, HasSchema};
|
||||
|
@ -16,18 +18,18 @@ use edn::query::{
|
|||
NonIntegerConstant, Pattern, PatternNonValuePlace, PatternValuePlace, SrcVar, Variable,
|
||||
};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
ColumnConstraint, DatomsColumn, EmptyBecause, EvolvedNonValuePlace, EvolvedPattern,
|
||||
EvolvedValuePlace, PlaceOrEmpty, SourceAlias,
|
||||
};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
pub fn into_typed_value(nic: NonIntegerConstant) -> TypedValue {
|
||||
match nic {
|
||||
NonIntegerConstant::BigInteger(_) => unimplemented!(), // TODO: #280.
|
||||
NonIntegerConstant::BigInteger(_) => unimplemented!(), // TODO(gburd): #280.
|
||||
NonIntegerConstant::Boolean(v) => TypedValue::Boolean(v),
|
||||
NonIntegerConstant::Float(v) => TypedValue::Double(v),
|
||||
NonIntegerConstant::Text(v) => v.into(),
|
||||
|
@ -93,7 +95,7 @@ impl ConjoiningClauses {
|
|||
self.constrain_to_ref(&pattern.entity);
|
||||
self.constrain_to_ref(&pattern.attribute);
|
||||
|
||||
let ref col = alias.1;
|
||||
let col = &alias.1;
|
||||
|
||||
let schema = known.schema;
|
||||
match pattern.entity {
|
||||
|
@ -101,9 +103,7 @@ impl ConjoiningClauses {
|
|||
// Placeholders don't contribute any column bindings, nor do
|
||||
// they constrain the query -- there's no need to produce
|
||||
// IS NOT NULL, because we don't store nulls in our schema.
|
||||
{
|
||||
()
|
||||
}
|
||||
{}
|
||||
EvolvedNonValuePlace::Variable(ref v) => {
|
||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Entity, v.clone())
|
||||
}
|
||||
|
@ -287,7 +287,7 @@ impl ConjoiningClauses {
|
|||
None => {
|
||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||
value: val.clone(),
|
||||
attr: attr,
|
||||
attr,
|
||||
});
|
||||
true
|
||||
}
|
||||
|
@ -301,7 +301,7 @@ impl ConjoiningClauses {
|
|||
None => {
|
||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||
value: val.clone(),
|
||||
attr: attr,
|
||||
attr,
|
||||
});
|
||||
true
|
||||
}
|
||||
|
@ -403,8 +403,8 @@ impl ConjoiningClauses {
|
|||
None => {
|
||||
self.mark_known_empty(
|
||||
EmptyBecause::CachedAttributeHasNoValues {
|
||||
entity: entity,
|
||||
attr: attr,
|
||||
entity,
|
||||
attr,
|
||||
},
|
||||
);
|
||||
return true;
|
||||
|
@ -416,7 +416,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
}
|
||||
_ => {} // TODO: check constant values against cache.
|
||||
_ => {} // TODO: check constant values against the cache.
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -591,7 +591,7 @@ impl ConjoiningClauses {
|
|||
entity: e,
|
||||
attribute: a,
|
||||
value: v,
|
||||
tx: tx,
|
||||
tx,
|
||||
}),
|
||||
},
|
||||
},
|
||||
|
@ -612,7 +612,7 @@ impl ConjoiningClauses {
|
|||
let mut new_value: Option<EvolvedValuePlace> = None;
|
||||
|
||||
match &pattern.entity {
|
||||
&EvolvedNonValuePlace::Variable(ref var) => {
|
||||
EvolvedNonValuePlace::Variable(ref var) => {
|
||||
// See if we have it yet!
|
||||
match self.bound_value(&var) {
|
||||
None => (),
|
||||
|
@ -631,12 +631,12 @@ impl ConjoiningClauses {
|
|||
_ => (),
|
||||
}
|
||||
match &pattern.value {
|
||||
&EvolvedValuePlace::Variable(ref var) => {
|
||||
EvolvedValuePlace::Variable(ref var) => {
|
||||
// See if we have it yet!
|
||||
match self.bound_value(&var) {
|
||||
None => (),
|
||||
Some(tv) => {
|
||||
new_value = Some(EvolvedValuePlace::Value(tv.clone()));
|
||||
new_value = Some(EvolvedValuePlace::Value(tv));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -679,7 +679,6 @@ impl ConjoiningClauses {
|
|||
// between an attribute and a value.
|
||||
// We know we cannot return a result, so we short-circuit here.
|
||||
self.mark_known_empty(EmptyBecause::AttributeLookupFailed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -697,11 +696,13 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, Variable};
|
||||
|
||||
use clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
||||
use crate::clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
||||
|
||||
use types::{Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias};
|
||||
use crate::types::{
|
||||
Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
|
||||
use {algebrize, parse_find_string};
|
||||
use crate::{algebrize, parse_find_string};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
|
@ -796,7 +797,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.a = 99
|
||||
|
@ -845,7 +846,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.v = true
|
||||
|
@ -889,7 +890,7 @@ mod testing {
|
|||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
|
@ -914,7 +915,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&v), Some(ValueType::Boolean));
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
vec![ColumnConstraint::Equals(d0_a, QueryValue::Entid(99)),].into()
|
||||
|
@ -939,9 +940,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
value: PatternValuePlace::Variable(v),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -968,8 +969,8 @@ mod testing {
|
|||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
value: PatternValuePlace::Variable(v),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -991,7 +992,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.wheres, vec![].into());
|
||||
}
|
||||
|
||||
|
@ -1032,7 +1033,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.v = 'hello'
|
||||
|
@ -1094,7 +1095,7 @@ mod testing {
|
|||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1203,7 +1204,7 @@ mod testing {
|
|||
assert!(!cc.column_bindings.contains_key(&y));
|
||||
|
||||
// ?x is bound to the entity.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1238,9 +1239,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1283,9 +1284,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1339,7 +1340,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -1353,7 +1354,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: y.clone(),
|
||||
var: y,
|
||||
existing: ValueTypeSet::of_one(ValueType::String),
|
||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||
}
|
||||
|
@ -1390,8 +1391,8 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(z.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(y.clone()),
|
||||
entity: PatternNonValuePlace::Variable(z),
|
||||
attribute: PatternNonValuePlace::Variable(y),
|
||||
value: PatternValuePlace::Variable(x.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
|
@ -1404,7 +1405,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: x.clone(),
|
||||
var: x,
|
||||
existing: ValueTypeSet::of_one(ValueType::Ref),
|
||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||
}
|
||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::{FnArg, PlainSymbol, Predicate, TypeAnnotation};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use clauses::convert::ValueTypes;
|
||||
use crate::clauses::convert::ValueTypes;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
||||
use crate::types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
/// Application of predicates.
|
||||
impl ConjoiningClauses {
|
||||
|
@ -38,13 +38,13 @@ impl ConjoiningClauses {
|
|||
if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) {
|
||||
self.apply_inequality(known, op, predicate)
|
||||
} else {
|
||||
bail!(AlgebrizerErrorKind::UnknownFunction(predicate.operator.clone()))
|
||||
bail!(AlgebrizerError::UnknownFunction(predicate.operator.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
fn potential_types(&self, schema: &Schema, fn_arg: &FnArg) -> Result<ValueTypeSet> {
|
||||
match fn_arg {
|
||||
&FnArg::Variable(ref v) => Ok(self.known_type_set(v)),
|
||||
FnArg::Variable(ref v) => Ok(self.known_type_set(v)),
|
||||
_ => fn_arg.potential_types(schema),
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ impl ConjoiningClauses {
|
|||
Some(value_type) => {
|
||||
self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(value_type))
|
||||
}
|
||||
None => bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
None => bail!(AlgebrizerError::InvalidArgumentType(
|
||||
PlainSymbol::plain("type"),
|
||||
ValueTypeSet::any(),
|
||||
2
|
||||
|
@ -76,7 +76,7 @@ impl ConjoiningClauses {
|
|||
predicate: Predicate,
|
||||
) -> Result<()> {
|
||||
if predicate.args.len() != 2 {
|
||||
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
||||
predicate.operator.clone(),
|
||||
predicate.args.len(),
|
||||
2
|
||||
|
@ -95,10 +95,10 @@ impl ConjoiningClauses {
|
|||
let supported_types = comparison.supported_types();
|
||||
let mut left_types = self
|
||||
.potential_types(known.schema, &left)?
|
||||
.intersection(&supported_types);
|
||||
.intersection(supported_types);
|
||||
if left_types.is_empty() {
|
||||
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
predicate.operator.clone(),
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
supported_types,
|
||||
0
|
||||
));
|
||||
|
@ -106,10 +106,10 @@ impl ConjoiningClauses {
|
|||
|
||||
let mut right_types = self
|
||||
.potential_types(known.schema, &right)?
|
||||
.intersection(&supported_types);
|
||||
.intersection(supported_types);
|
||||
if right_types.is_empty() {
|
||||
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
predicate.operator.clone(),
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
supported_types,
|
||||
1
|
||||
));
|
||||
|
@ -125,7 +125,7 @@ impl ConjoiningClauses {
|
|||
left_types.insert(ValueType::Double);
|
||||
}
|
||||
|
||||
let shared_types = left_types.intersection(&right_types);
|
||||
let shared_types = left_types.intersection(right_types);
|
||||
if shared_types.is_empty() {
|
||||
// In isolation these are both valid inputs to the operator, but the query cannot
|
||||
// succeed because the types don't match.
|
||||
|
@ -160,8 +160,8 @@ impl ConjoiningClauses {
|
|||
left_v = self.resolve_ref_argument(known.schema, &predicate.operator, 0, left)?;
|
||||
right_v = self.resolve_ref_argument(known.schema, &predicate.operator, 1, right)?;
|
||||
} else {
|
||||
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
predicate.operator.clone(),
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
supported_types,
|
||||
0
|
||||
));
|
||||
|
@ -176,8 +176,8 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
impl Inequality {
|
||||
fn to_constraint(&self, left: QueryValue, right: QueryValue) -> ColumnConstraint {
|
||||
match *self {
|
||||
fn to_constraint(self, left: QueryValue, right: QueryValue) -> ColumnConstraint {
|
||||
match self {
|
||||
Inequality::TxAfter | Inequality::TxBefore => {
|
||||
// TODO: both ends of the range must be inside the tx partition!
|
||||
// If we know the partition map -- and at this point we do, it's just
|
||||
|
@ -188,9 +188,9 @@ impl Inequality {
|
|||
}
|
||||
|
||||
ColumnConstraint::Inequality {
|
||||
operator: *self,
|
||||
left: left,
|
||||
right: right,
|
||||
operator: self,
|
||||
left,
|
||||
right,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,9 +206,9 @@ mod testing {
|
|||
FnArg, Keyword, Pattern, PatternNonValuePlace, PatternValuePlace, PlainSymbol, Variable,
|
||||
};
|
||||
|
||||
use clauses::{add_attribute, associate_ident, ident};
|
||||
use crate::clauses::{add_attribute, associate_ident, ident};
|
||||
|
||||
use types::{ColumnConstraint, EmptyBecause, QueryValue};
|
||||
use crate::types::{ColumnConstraint, EmptyBecause, QueryValue};
|
||||
|
||||
#[test]
|
||||
/// Apply two patterns: a pattern and a numeric predicate.
|
||||
|
@ -235,7 +235,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -348,7 +348,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: ident("foo", "roz"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -362,7 +362,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: y.clone(),
|
||||
var: y,
|
||||
existing: ValueTypeSet::of_numeric_types(),
|
||||
desired: ValueTypeSet::of_one(ValueType::String),
|
||||
}
|
||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, Schema};
|
|||
|
||||
use edn::query::{FnArg, NonIntegerConstant, PlainSymbol};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use types::{EmptyBecause, QueryValue};
|
||||
use crate::types::{EmptyBecause, QueryValue};
|
||||
|
||||
/// Argument resolution.
|
||||
impl ConjoiningClauses {
|
||||
|
@ -41,14 +41,14 @@ impl ConjoiningClauses {
|
|||
if v.value_type().is_numeric() {
|
||||
Ok(QueryValue::TypedValue(v))
|
||||
} else {
|
||||
bail!(AlgebrizerErrorKind::InputTypeDisagreement(var.name().clone(), ValueType::Long, v.value_type()))
|
||||
bail!(AlgebrizerError::InputTypeDisagreement(var.name(), ValueType::Long, v.value_type()))
|
||||
}
|
||||
} else {
|
||||
self.constrain_var_to_numeric(var.clone());
|
||||
self.column_bindings
|
||||
.get(&var)
|
||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
||||
}
|
||||
},
|
||||
// Can't be an entid.
|
||||
|
@ -62,7 +62,7 @@ impl ConjoiningClauses {
|
|||
Constant(NonIntegerConstant::BigInteger(_)) |
|
||||
Vector(_) => {
|
||||
self.mark_known_empty(EmptyBecause::NonNumericArgument);
|
||||
bail!(AlgebrizerErrorKind::InvalidArgument(function.clone(), "numeric", position))
|
||||
bail!(AlgebrizerError::InvalidArgument(function.clone(), "numeric", position))
|
||||
},
|
||||
Constant(NonIntegerConstant::Float(f)) => Ok(QueryValue::TypedValue(TypedValue::Double(f))),
|
||||
}
|
||||
|
@ -79,8 +79,8 @@ impl ConjoiningClauses {
|
|||
match arg {
|
||||
FnArg::Variable(var) => match self.bound_value(&var) {
|
||||
Some(TypedValue::Instant(v)) => Ok(QueryValue::TypedValue(TypedValue::Instant(v))),
|
||||
Some(v) => bail!(AlgebrizerErrorKind::InputTypeDisagreement(
|
||||
var.name().clone(),
|
||||
Some(v) => bail!(AlgebrizerError::InputTypeDisagreement(
|
||||
var.name(),
|
||||
ValueType::Instant,
|
||||
v.value_type()
|
||||
)),
|
||||
|
@ -89,7 +89,7 @@ impl ConjoiningClauses {
|
|||
self.column_bindings
|
||||
.get(&var)
|
||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
||||
}
|
||||
},
|
||||
Constant(NonIntegerConstant::Instant(v)) => {
|
||||
|
@ -107,7 +107,7 @@ impl ConjoiningClauses {
|
|||
| Constant(NonIntegerConstant::BigInteger(_))
|
||||
| Vector(_) => {
|
||||
self.mark_known_empty(EmptyBecause::NonInstantArgument);
|
||||
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
function.clone(),
|
||||
ValueType::Instant.into(),
|
||||
position
|
||||
|
@ -136,14 +136,14 @@ impl ConjoiningClauses {
|
|||
self.column_bindings
|
||||
.get(&var)
|
||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into())
|
||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name()))
|
||||
}
|
||||
}
|
||||
EntidOrInteger(i) => Ok(QueryValue::TypedValue(TypedValue::Ref(i))),
|
||||
IdentOrKeyword(i) => schema
|
||||
.get_entid(&i)
|
||||
.map(|known_entid| QueryValue::Entid(known_entid.into()))
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnrecognizedIdent(i.to_string()).into()),
|
||||
.ok_or_else(|| AlgebrizerError::UnrecognizedIdent(i.to_string())),
|
||||
Constant(NonIntegerConstant::Boolean(_))
|
||||
| Constant(NonIntegerConstant::Float(_))
|
||||
| Constant(NonIntegerConstant::Text(_))
|
||||
|
@ -153,7 +153,7 @@ impl ConjoiningClauses {
|
|||
| SrcVar(_)
|
||||
| Vector(_) => {
|
||||
self.mark_known_empty(EmptyBecause::NonEntityArgument);
|
||||
bail!(AlgebrizerErrorKind::InvalidArgumentType(
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
function.clone(),
|
||||
ValueType::Ref.into(),
|
||||
position
|
||||
|
@ -188,7 +188,7 @@ impl ConjoiningClauses {
|
|||
.column_bindings
|
||||
.get(&var)
|
||||
.and_then(|cols| cols.first().map(|col| QueryValue::Column(col.clone())))
|
||||
.ok_or_else(|| AlgebrizerErrorKind::UnboundVariable(var.name()).into()),
|
||||
.ok_or_else(|| AlgebrizerError::UnboundVariable(var.name())),
|
||||
},
|
||||
EntidOrInteger(i) => Ok(QueryValue::PrimitiveLong(i)),
|
||||
IdentOrKeyword(_) => unimplemented!(), // TODO
|
||||
|
|
|
@ -12,16 +12,16 @@ use core_traits::ValueType;
|
|||
|
||||
use edn::query::{Binding, FnArg, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use types::{
|
||||
use crate::types::{
|
||||
Column, ColumnConstraint, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||
TransactionsColumn,
|
||||
};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
// Log in Query: tx-ids and tx-data
|
||||
|
@ -40,7 +40,7 @@ impl ConjoiningClauses {
|
|||
// transactions that impact one of the given attributes.
|
||||
pub(crate) fn apply_tx_ids(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 3 {
|
||||
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
||||
where_fn.operator.clone(),
|
||||
where_fn.args.len(),
|
||||
3
|
||||
|
@ -49,7 +49,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if where_fn.binding.is_empty() {
|
||||
// The binding must introduce at least one bound variable.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::NoBoundVariable
|
||||
));
|
||||
|
@ -57,7 +57,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if !where_fn.binding.is_valid() {
|
||||
// The binding must not duplicate bound variables.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::RepeatedBoundVariable
|
||||
));
|
||||
|
@ -68,7 +68,7 @@ impl ConjoiningClauses {
|
|||
Binding::BindRel(bindings) => {
|
||||
let bindings_count = bindings.len();
|
||||
if bindings_count != 1 {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::InvalidNumberOfBindings {
|
||||
number: bindings_count,
|
||||
|
@ -83,7 +83,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
Binding::BindColl(v) => v,
|
||||
Binding::BindScalar(_) | Binding::BindTuple(_) => {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::ExpectedBindRelOrBindColl
|
||||
))
|
||||
|
@ -95,7 +95,7 @@ impl ConjoiningClauses {
|
|||
// TODO: process source variables.
|
||||
match args.next().unwrap() {
|
||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"source variable",
|
||||
0
|
||||
|
@ -122,7 +122,7 @@ impl ConjoiningClauses {
|
|||
known.schema,
|
||||
transactions.clone(),
|
||||
TransactionsColumn::Tx,
|
||||
tx_var.clone(),
|
||||
tx_var,
|
||||
);
|
||||
|
||||
let after_constraint = ColumnConstraint::Inequality {
|
||||
|
@ -138,7 +138,7 @@ impl ConjoiningClauses {
|
|||
let before_constraint = ColumnConstraint::Inequality {
|
||||
operator: Inequality::LessThan,
|
||||
left: QueryValue::Column(QualifiedAlias(
|
||||
transactions.clone(),
|
||||
transactions,
|
||||
Column::Transactions(TransactionsColumn::Tx),
|
||||
)),
|
||||
right: tx2,
|
||||
|
@ -150,7 +150,7 @@ impl ConjoiningClauses {
|
|||
|
||||
pub(crate) fn apply_tx_data(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 2 {
|
||||
bail!(AlgebrizerErrorKind::InvalidNumberOfArguments(
|
||||
bail!(AlgebrizerError::InvalidNumberOfArguments(
|
||||
where_fn.operator.clone(),
|
||||
where_fn.args.len(),
|
||||
2
|
||||
|
@ -159,7 +159,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if where_fn.binding.is_empty() {
|
||||
// The binding must introduce at least one bound variable.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::NoBoundVariable
|
||||
));
|
||||
|
@ -167,7 +167,7 @@ impl ConjoiningClauses {
|
|||
|
||||
if !where_fn.binding.is_valid() {
|
||||
// The binding must not duplicate bound variables.
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::RepeatedBoundVariable
|
||||
));
|
||||
|
@ -178,7 +178,7 @@ impl ConjoiningClauses {
|
|||
Binding::BindRel(bindings) => {
|
||||
let bindings_count = bindings.len();
|
||||
if bindings_count < 1 || bindings_count > 5 {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::InvalidNumberOfBindings {
|
||||
number: bindings.len(),
|
||||
|
@ -189,7 +189,7 @@ impl ConjoiningClauses {
|
|||
bindings
|
||||
}
|
||||
Binding::BindScalar(_) | Binding::BindTuple(_) | Binding::BindColl(_) => {
|
||||
bail!(AlgebrizerErrorKind::InvalidBinding(
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator.clone(),
|
||||
BindingError::ExpectedBindRel
|
||||
))
|
||||
|
@ -217,7 +217,7 @@ impl ConjoiningClauses {
|
|||
// TODO: process source variables.
|
||||
match args.next().unwrap() {
|
||||
FnArg::SrcVar(SrcVar::DefaultSrc) => {}
|
||||
_ => bail!(AlgebrizerErrorKind::InvalidArgument(
|
||||
_ => bail!(AlgebrizerError::InvalidArgument(
|
||||
where_fn.operator.clone(),
|
||||
"source variable",
|
||||
0
|
||||
|
@ -306,7 +306,7 @@ impl ConjoiningClauses {
|
|||
|
||||
self.bind_column_to_var(
|
||||
known.schema,
|
||||
transactions.clone(),
|
||||
transactions,
|
||||
TransactionsColumn::Added,
|
||||
var.clone(),
|
||||
);
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
|
||||
use edn::query::WhereFn;
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use Known;
|
||||
use crate::Known;
|
||||
|
||||
/// Application of `where` functions.
|
||||
impl ConjoiningClauses {
|
||||
|
@ -32,7 +32,7 @@ impl ConjoiningClauses {
|
|||
"ground" => self.apply_ground(known, where_fn),
|
||||
"tx-data" => self.apply_tx_data(known, where_fn),
|
||||
"tx-ids" => self.apply_tx_ids(known, where_fn),
|
||||
_ => bail!(AlgebrizerErrorKind::UnknownFunction(where_fn.operator.clone())),
|
||||
_ => bail!(AlgebrizerError::UnknownFunction(where_fn.operator.clone())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,11 +32,11 @@ use mentat_core::counter::RcCounter;
|
|||
|
||||
use edn::query::{Element, FindSpec, Limit, Order, ParsedQuery, SrcVar, Variable, WhereClause};
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
pub use clauses::{QueryInputs, VariableBindings};
|
||||
pub use crate::clauses::{QueryInputs, VariableBindings};
|
||||
|
||||
pub use types::{EmptyBecause, FindQuery};
|
||||
pub use crate::types::{EmptyBecause, FindQuery};
|
||||
|
||||
/// A convenience wrapper around things known in memory: the schema and caches.
|
||||
/// We use a trait object here to avoid making dozens of functions generic over the type
|
||||
|
@ -229,7 +229,7 @@ fn validate_and_simplify_order(
|
|||
|
||||
// Fail if the var isn't bound by the query.
|
||||
if !cc.column_bindings.contains_key(&var) {
|
||||
bail!(AlgebrizerErrorKind::UnboundVariable(var.name()))
|
||||
bail!(AlgebrizerError::UnboundVariable(var.name()))
|
||||
}
|
||||
|
||||
// Otherwise, determine if we also need to order by type…
|
||||
|
@ -263,7 +263,7 @@ fn simplify_limit(mut query: AlgebraicQuery) -> Result<AlgebraicQuery> {
|
|||
Some(TypedValue::Long(n)) => {
|
||||
if n <= 0 {
|
||||
// User-specified limits should always be natural numbers (> 0).
|
||||
bail!(AlgebrizerErrorKind::InvalidLimit(
|
||||
bail!(AlgebrizerError::InvalidLimit(
|
||||
n.to_string(),
|
||||
ValueType::Long
|
||||
))
|
||||
|
@ -273,7 +273,7 @@ fn simplify_limit(mut query: AlgebraicQuery) -> Result<AlgebraicQuery> {
|
|||
}
|
||||
Some(val) => {
|
||||
// Same.
|
||||
bail!(AlgebrizerErrorKind::InvalidLimit(
|
||||
bail!(AlgebrizerError::InvalidLimit(
|
||||
format!("{:?}", val),
|
||||
val.value_type()
|
||||
))
|
||||
|
@ -312,7 +312,7 @@ pub fn algebrize_with_inputs(
|
|||
cc.derive_types_from_find_spec(&parsed.find_spec);
|
||||
|
||||
// Do we have a variable limit? If so, tell the CC that the var must be numeric.
|
||||
if let &Limit::Variable(ref var) = &parsed.limit {
|
||||
if let Limit::Variable(ref var) = parsed.limit {
|
||||
cc.constrain_var_to_long(var.clone());
|
||||
}
|
||||
|
||||
|
@ -338,18 +338,18 @@ pub fn algebrize_with_inputs(
|
|||
has_aggregates: false, // TODO: we don't parse them yet.
|
||||
with: parsed.with,
|
||||
named_projection: extra_vars,
|
||||
order: order,
|
||||
limit: limit,
|
||||
cc: cc,
|
||||
order,
|
||||
limit,
|
||||
cc,
|
||||
};
|
||||
|
||||
// Substitute in any fixed values and fail if they're out of range.
|
||||
simplify_limit(q)
|
||||
}
|
||||
|
||||
pub use clauses::ConjoiningClauses;
|
||||
pub use crate::clauses::ConjoiningClauses;
|
||||
|
||||
pub use types::{
|
||||
pub use crate::types::{
|
||||
Column, ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||
ColumnName, ComputedTable, DatomsColumn, DatomsTable, FulltextColumn, OrderBy, QualifiedAlias,
|
||||
QueryValue, SourceAlias, TableAlias, VariableColumn,
|
||||
|
@ -364,7 +364,7 @@ impl FindQuery {
|
|||
in_vars: BTreeSet::default(),
|
||||
in_sources: BTreeSet::default(),
|
||||
limit: Limit::None,
|
||||
where_clauses: where_clauses,
|
||||
where_clauses,
|
||||
order: None,
|
||||
}
|
||||
}
|
||||
|
@ -375,7 +375,7 @@ impl FindQuery {
|
|||
|
||||
for var in parsed.in_vars.into_iter() {
|
||||
if !set.insert(var.clone()) {
|
||||
bail!(AlgebrizerErrorKind::DuplicateVariableError(var.name(), ":in"));
|
||||
bail!(AlgebrizerError::DuplicateVariableError(var.name(), ":in"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -387,7 +387,7 @@ impl FindQuery {
|
|||
|
||||
for var in parsed.with.into_iter() {
|
||||
if !set.insert(var.clone()) {
|
||||
bail!(AlgebrizerErrorKind::DuplicateVariableError(var.name(), ":with"));
|
||||
bail!(AlgebrizerError::DuplicateVariableError(var.name(), ":with"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -397,7 +397,7 @@ impl FindQuery {
|
|||
// Make sure that if we have `:limit ?x`, `?x` appears in `:in`.
|
||||
if let Limit::Variable(ref v) = parsed.limit {
|
||||
if !in_vars.contains(v) {
|
||||
bail!(AlgebrizerErrorKind::UnknownLimitVar(v.name()));
|
||||
bail!(AlgebrizerError::UnknownLimitVar(v.name()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -417,5 +417,5 @@ impl FindQuery {
|
|||
pub fn parse_find_string(string: &str) -> Result<FindQuery> {
|
||||
parse_query(string)
|
||||
.map_err(|e| e.into())
|
||||
.and_then(|parsed| FindQuery::from_parsed_query(parsed))
|
||||
.and_then(FindQuery::from_parsed_query)
|
||||
}
|
||||
|
|
|
@ -32,11 +32,11 @@ pub enum DatomsTable {
|
|||
/// A source of rows that isn't a named table -- typically a subquery or union.
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub enum ComputedTable {
|
||||
Subquery(::clauses::ConjoiningClauses),
|
||||
Subquery(Box<crate::clauses::ConjoiningClauses>),
|
||||
Union {
|
||||
projection: BTreeSet<Variable>,
|
||||
type_extraction: BTreeSet<Variable>,
|
||||
arms: Vec<::clauses::ConjoiningClauses>,
|
||||
arms: Vec<crate::clauses::ConjoiningClauses>,
|
||||
},
|
||||
NamedValues {
|
||||
names: Vec<Variable>,
|
||||
|
@ -153,8 +153,8 @@ impl ColumnName for DatomsColumn {
|
|||
impl ColumnName for VariableColumn {
|
||||
fn column_name(&self) -> String {
|
||||
match self {
|
||||
&VariableColumn::Variable(ref v) => v.to_string(),
|
||||
&VariableColumn::VariableTypeTag(ref v) => format!("{}_value_type_tag", v.as_str()),
|
||||
VariableColumn::Variable(ref v) => v.to_string(),
|
||||
VariableColumn::VariableTypeTag(ref v) => format!("{}_value_type_tag", v.as_str()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -163,8 +163,8 @@ impl Debug for VariableColumn {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
match self {
|
||||
// These should agree with VariableColumn::column_name.
|
||||
&VariableColumn::Variable(ref v) => write!(f, "{}", v.as_str()),
|
||||
&VariableColumn::VariableTypeTag(ref v) => write!(f, "{}_value_type_tag", v.as_str()),
|
||||
VariableColumn::Variable(ref v) => write!(f, "{}", v.as_str()),
|
||||
VariableColumn::VariableTypeTag(ref v) => write!(f, "{}_value_type_tag", v.as_str()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -178,10 +178,10 @@ impl Debug for DatomsColumn {
|
|||
impl Debug for Column {
|
||||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
match self {
|
||||
&Column::Fixed(ref c) => c.fmt(f),
|
||||
&Column::Fulltext(ref c) => c.fmt(f),
|
||||
&Column::Variable(ref v) => v.fmt(f),
|
||||
&Column::Transactions(ref t) => t.fmt(f),
|
||||
Column::Fixed(ref c) => c.fmt(f),
|
||||
Column::Fulltext(ref c) => c.fmt(f),
|
||||
Column::Variable(ref v) => v.fmt(f),
|
||||
Column::Transactions(ref t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -298,10 +298,10 @@ impl Debug for QueryValue {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
use self::QueryValue::*;
|
||||
match self {
|
||||
&Column(ref qa) => write!(f, "{:?}", qa),
|
||||
&Entid(ref entid) => write!(f, "entity({:?})", entid),
|
||||
&TypedValue(ref typed_value) => write!(f, "value({:?})", typed_value),
|
||||
&PrimitiveLong(value) => write!(f, "primitive({:?})", value),
|
||||
Column(ref qa) => write!(f, "{:?}", qa),
|
||||
Entid(ref entid) => write!(f, "entity({:?})", entid),
|
||||
TypedValue(ref typed_value) => write!(f, "value({:?})", typed_value),
|
||||
PrimitiveLong(value) => write!(f, "primitive({:?})", value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -375,15 +375,15 @@ impl Inequality {
|
|||
}
|
||||
|
||||
// The built-in inequality operators apply to Long, Double, and Instant.
|
||||
pub fn supported_types(&self) -> ValueTypeSet {
|
||||
pub fn supported_types(self) -> ValueTypeSet {
|
||||
use self::Inequality::*;
|
||||
match self {
|
||||
&LessThan | &LessThanOrEquals | &GreaterThan | &GreaterThanOrEquals | &NotEquals => {
|
||||
LessThan | LessThanOrEquals | GreaterThan | GreaterThanOrEquals | NotEquals => {
|
||||
let mut ts = ValueTypeSet::of_numeric_types();
|
||||
ts.insert(ValueType::Instant);
|
||||
ts
|
||||
}
|
||||
&Unpermute | &Differ | &TxAfter | &TxBefore => ValueTypeSet::of_one(ValueType::Ref),
|
||||
Unpermute | Differ | TxAfter | TxBefore => ValueTypeSet::of_one(ValueType::Ref),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -392,17 +392,17 @@ impl Debug for Inequality {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
use self::Inequality::*;
|
||||
f.write_str(match self {
|
||||
&LessThan => "<",
|
||||
&LessThanOrEquals => "<=",
|
||||
&GreaterThan => ">",
|
||||
&GreaterThanOrEquals => ">=",
|
||||
&NotEquals => "!=", // Datalog uses !=. SQL uses <>.
|
||||
LessThan => "<",
|
||||
LessThanOrEquals => "<=",
|
||||
GreaterThan => ">",
|
||||
GreaterThanOrEquals => ">=",
|
||||
NotEquals => "!=", // Datalog uses !=. SQL uses <>.
|
||||
|
||||
&Unpermute => "<",
|
||||
&Differ => "<>",
|
||||
Unpermute => "<",
|
||||
Differ => "<>",
|
||||
|
||||
&TxAfter => ">",
|
||||
&TxBefore => "<",
|
||||
TxAfter => ">",
|
||||
TxBefore => "<",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -534,17 +534,17 @@ impl Debug for ColumnConstraint {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
use self::ColumnConstraint::*;
|
||||
match self {
|
||||
&Equals(ref qa1, ref thing) => write!(f, "{:?} = {:?}", qa1, thing),
|
||||
Equals(ref qa1, ref thing) => write!(f, "{:?} = {:?}", qa1, thing),
|
||||
|
||||
&Inequality {
|
||||
Inequality {
|
||||
operator,
|
||||
ref left,
|
||||
ref right,
|
||||
} => write!(f, "{:?} {:?} {:?}", left, operator, right),
|
||||
|
||||
&Matches(ref qa, ref thing) => write!(f, "{:?} MATCHES {:?}", qa, thing),
|
||||
Matches(ref qa, ref thing) => write!(f, "{:?} MATCHES {:?}", qa, thing),
|
||||
|
||||
&HasTypes {
|
||||
HasTypes {
|
||||
ref value,
|
||||
ref value_types,
|
||||
check_value,
|
||||
|
@ -553,7 +553,7 @@ impl Debug for ColumnConstraint {
|
|||
write!(f, "(")?;
|
||||
for value_type in value_types.iter() {
|
||||
write!(f, "({:?}.value_type_tag = {:?}", value, value_type)?;
|
||||
if check_value && value_type == ValueType::Double
|
||||
if *check_value && value_type == ValueType::Double
|
||||
|| value_type == ValueType::Long
|
||||
{
|
||||
write!(
|
||||
|
@ -573,7 +573,7 @@ impl Debug for ColumnConstraint {
|
|||
}
|
||||
write!(f, "1)")
|
||||
}
|
||||
&NotExists(ref ct) => write!(f, "NOT EXISTS {:?}", ct),
|
||||
NotExists(ref ct) => write!(f, "NOT EXISTS {:?}", ct),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -625,15 +625,15 @@ impl Debug for EmptyBecause {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
use self::EmptyBecause::*;
|
||||
match self {
|
||||
&CachedAttributeHasNoEntity {
|
||||
CachedAttributeHasNoEntity {
|
||||
ref value,
|
||||
ref attr,
|
||||
} => write!(f, "(?e, {}, {:?}, _) not present in store", attr, value),
|
||||
&CachedAttributeHasNoValues {
|
||||
CachedAttributeHasNoValues {
|
||||
ref entity,
|
||||
ref attr,
|
||||
} => write!(f, "({}, {}, ?v, _) not present in store", entity, attr),
|
||||
&ConflictingBindings {
|
||||
ConflictingBindings {
|
||||
ref var,
|
||||
ref existing,
|
||||
ref desired,
|
||||
|
@ -642,7 +642,7 @@ impl Debug for EmptyBecause {
|
|||
"Var {:?} can't be {:?} because it's already bound to {:?}",
|
||||
var, desired, existing
|
||||
),
|
||||
&TypeMismatch {
|
||||
TypeMismatch {
|
||||
ref var,
|
||||
ref existing,
|
||||
ref desired,
|
||||
|
@ -651,7 +651,7 @@ impl Debug for EmptyBecause {
|
|||
"Type mismatch: {:?} can't be {:?}, because it's already {:?}",
|
||||
var, desired, existing
|
||||
),
|
||||
&KnownTypeMismatch {
|
||||
KnownTypeMismatch {
|
||||
ref left,
|
||||
ref right,
|
||||
} => write!(
|
||||
|
@ -659,25 +659,25 @@ impl Debug for EmptyBecause {
|
|||
"Type mismatch: {:?} can't be compared to {:?}",
|
||||
left, right
|
||||
),
|
||||
&NoValidTypes(ref var) => write!(f, "Type mismatch: {:?} has no valid types", var),
|
||||
&NonAttributeArgument => write!(f, "Non-attribute argument in attribute place"),
|
||||
&NonInstantArgument => write!(f, "Non-instant argument in instant place"),
|
||||
&NonEntityArgument => write!(f, "Non-entity argument in entity place"),
|
||||
&NonNumericArgument => write!(f, "Non-numeric argument in numeric place"),
|
||||
&NonStringFulltextValue => write!(f, "Non-string argument for fulltext attribute"),
|
||||
&UnresolvedIdent(ref kw) => write!(f, "Couldn't resolve keyword {}", kw),
|
||||
&InvalidAttributeIdent(ref kw) => write!(f, "{} does not name an attribute", kw),
|
||||
&InvalidAttributeEntid(entid) => write!(f, "{} is not an attribute", entid),
|
||||
&NonFulltextAttribute(entid) => write!(f, "{} is not a fulltext attribute", entid),
|
||||
&InvalidBinding(ref column, ref tv) => {
|
||||
NoValidTypes(ref var) => write!(f, "Type mismatch: {:?} has no valid types", var),
|
||||
NonAttributeArgument => write!(f, "Non-attribute argument in attribute place"),
|
||||
NonInstantArgument => write!(f, "Non-instant argument in instant place"),
|
||||
NonEntityArgument => write!(f, "Non-entity argument in entity place"),
|
||||
NonNumericArgument => write!(f, "Non-numeric argument in numeric place"),
|
||||
NonStringFulltextValue => write!(f, "Non-string argument for fulltext attribute"),
|
||||
UnresolvedIdent(ref kw) => write!(f, "Couldn't resolve keyword {}", kw),
|
||||
InvalidAttributeIdent(ref kw) => write!(f, "{} does not name an attribute", kw),
|
||||
InvalidAttributeEntid(entid) => write!(f, "{} is not an attribute", entid),
|
||||
NonFulltextAttribute(entid) => write!(f, "{} is not a fulltext attribute", entid),
|
||||
InvalidBinding(ref column, ref tv) => {
|
||||
write!(f, "{:?} cannot name column {:?}", tv, column)
|
||||
}
|
||||
&ValueTypeMismatch(value_type, ref typed_value) => write!(
|
||||
ValueTypeMismatch(value_type, ref typed_value) => write!(
|
||||
f,
|
||||
"Type mismatch: {:?} doesn't match attribute type {:?}",
|
||||
typed_value, value_type
|
||||
),
|
||||
&AttributeLookupFailed => write!(f, "Attribute lookup failed"),
|
||||
AttributeLookupFailed => write!(f, "Attribute lookup failed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use std::collections::BTreeSet;
|
|||
|
||||
use edn::query::{ContainsVariables, NotJoin, OrJoin, UnifyVars, Variable};
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, Result};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
/// In an `or` expression, every mentioned var is considered 'free'.
|
||||
/// In an `or-join` expression, every var in the var list is 'required'.
|
||||
|
@ -47,7 +47,7 @@ pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> {
|
|||
let template = clauses.next().unwrap().collect_mentioned_variables();
|
||||
for clause in clauses {
|
||||
if template != clause.collect_mentioned_variables() {
|
||||
bail!(AlgebrizerErrorKind::NonMatchingVariablesInOrClause)
|
||||
bail!(AlgebrizerError::NonMatchingVariablesInOrClause)
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -58,7 +58,7 @@ pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> {
|
|||
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
||||
for clause in &or_join.clauses {
|
||||
if !var_set.is_subset(&clause.collect_mentioned_variables()) {
|
||||
bail!(AlgebrizerErrorKind::NonMatchingVariablesInOrClause)
|
||||
bail!(AlgebrizerError::NonMatchingVariablesInOrClause)
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -74,7 +74,7 @@ pub(crate) fn validate_not_join(not_join: &NotJoin) -> Result<()> {
|
|||
// The joined vars must each appear somewhere in the clause's mentioned variables.
|
||||
let var_set: BTreeSet<Variable> = vars.iter().cloned().collect();
|
||||
if !var_set.is_subset(¬_join.collect_mentioned_variables()) {
|
||||
bail!(AlgebrizerErrorKind::NonMatchingVariablesInNotClause)
|
||||
bail!(AlgebrizerError::NonMatchingVariablesInNotClause)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -91,11 +91,11 @@ mod tests {
|
|||
Variable, WhereClause,
|
||||
};
|
||||
|
||||
use clauses::ident;
|
||||
use crate::clauses::ident;
|
||||
|
||||
use super::*;
|
||||
use parse_find_string;
|
||||
use types::FindQuery;
|
||||
use crate::parse_find_string;
|
||||
use crate::types::FindQuery;
|
||||
|
||||
fn value_ident(ns: &str, name: &str) -> PatternValuePlace {
|
||||
Keyword::namespaced(ns, name).into()
|
||||
|
@ -112,7 +112,7 @@ mod tests {
|
|||
match clause {
|
||||
WhereClause::OrJoin(or_join) => {
|
||||
// It's valid: the variables are the same in each branch.
|
||||
assert_eq!((), validate_or_join(&or_join).unwrap());
|
||||
validate_or_join(&or_join).unwrap();
|
||||
assert_eq!(expected_unify, or_join.unify_vars);
|
||||
or_join.clauses
|
||||
}
|
||||
|
@ -254,10 +254,10 @@ mod tests {
|
|||
/// Tests that the top-level form is a valid `not`, returning the clauses.
|
||||
fn valid_not_join(parsed: FindQuery, expected_unify: UnifyVars) -> Vec<WhereClause> {
|
||||
// Filter out all the clauses that are not `not`s.
|
||||
let mut nots = parsed.where_clauses.into_iter().filter(|x| match x {
|
||||
&WhereClause::NotJoin(_) => true,
|
||||
_ => false,
|
||||
});
|
||||
let mut nots = parsed
|
||||
.where_clauses
|
||||
.into_iter()
|
||||
.filter(|x| matches!(x, WhereClause::NotJoin(_)));
|
||||
|
||||
// There should be only one not clause.
|
||||
let clause = nots.next().unwrap();
|
||||
|
@ -266,7 +266,7 @@ mod tests {
|
|||
match clause {
|
||||
WhereClause::NotJoin(not_join) => {
|
||||
// It's valid: the variables are the same in each branch.
|
||||
assert_eq!((), validate_not_join(¬_join).unwrap());
|
||||
validate_not_join(¬_join).unwrap();
|
||||
assert_eq!(expected_unify, not_join.unify_vars);
|
||||
not_join.clauses
|
||||
}
|
||||
|
@ -368,11 +368,10 @@ mod tests {
|
|||
[?release :release/artists "Pink Floyd"]
|
||||
[?release :release/year 1970])]"#;
|
||||
let parsed = parse_find_string(query).expect("expected successful parse");
|
||||
let mut nots = parsed.where_clauses.iter().filter(|&x| match *x {
|
||||
WhereClause::NotJoin(_) => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
let mut nots = parsed
|
||||
.where_clauses
|
||||
.iter()
|
||||
.filter(|&x| matches!(*x, WhereClause::NotJoin(_)));
|
||||
let clause = nots.next().unwrap().clone();
|
||||
assert_eq!(None, nots.next());
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::Keyword;
|
||||
|
||||
use utils::{add_attribute, alg, associate_ident};
|
||||
use crate::utils::{add_attribute, alg, associate_ident};
|
||||
|
||||
use mentat_query_algebrizer::Known;
|
||||
|
||||
|
|
|
@ -24,11 +24,11 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerErrorKind, BindingError};
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError};
|
||||
|
||||
use mentat_query_algebrizer::{ComputedTable, Known, QueryInputs};
|
||||
|
||||
use utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
||||
use crate::utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
let mut schema = Schema::default();
|
||||
|
@ -297,7 +297,7 @@ fn test_ground_coll_heterogeneous_types() {
|
|||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [false 8.5]) [?v ...]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
assert_eq!(bails(known, &q), AlgebrizerErrorKind::InvalidGroundConstant);
|
||||
assert_eq!(bails(known, &q), AlgebrizerError::InvalidGroundConstant);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -305,7 +305,7 @@ fn test_ground_rel_heterogeneous_types() {
|
|||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [[false] [5]]) [[?v]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
assert_eq!(bails(known, &q), AlgebrizerErrorKind::InvalidGroundConstant);
|
||||
assert_eq!(bails(known, &q), AlgebrizerError::InvalidGroundConstant);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -315,7 +315,7 @@ fn test_ground_tuple_duplicate_vars() {
|
|||
let known = Known::for_schema(&schema);
|
||||
assert_eq!(
|
||||
bails(known, &q),
|
||||
AlgebrizerErrorKind::InvalidBinding(
|
||||
AlgebrizerError::InvalidBinding(
|
||||
PlainSymbol::plain("ground"),
|
||||
BindingError::RepeatedBoundVariable
|
||||
)
|
||||
|
@ -329,7 +329,7 @@ fn test_ground_rel_duplicate_vars() {
|
|||
let known = Known::for_schema(&schema);
|
||||
assert_eq!(
|
||||
bails(known, &q),
|
||||
AlgebrizerErrorKind::InvalidBinding(
|
||||
AlgebrizerError::InvalidBinding(
|
||||
PlainSymbol::plain("ground"),
|
||||
BindingError::RepeatedBoundVariable
|
||||
)
|
||||
|
@ -343,7 +343,7 @@ fn test_ground_nonexistent_variable_invalid() {
|
|||
let known = Known::for_schema(&schema);
|
||||
assert_eq!(
|
||||
bails(known, &q),
|
||||
AlgebrizerErrorKind::UnboundVariable(PlainSymbol::plain("?v"))
|
||||
AlgebrizerError::UnboundVariable(PlainSymbol::plain("?v"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -362,6 +362,6 @@ fn test_unbound_input_variable_invalid() {
|
|||
|
||||
assert_eq!(
|
||||
bails_with_inputs(known, &q, i),
|
||||
AlgebrizerErrorKind::UnboundVariable(PlainSymbol::plain("?x"))
|
||||
AlgebrizerError::UnboundVariable(PlainSymbol::plain("?x"))
|
||||
);
|
||||
}
|
||||
|
|
|
@ -22,11 +22,11 @@ use mentat_core::{DateTime, Schema, Utc};
|
|||
|
||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||
|
||||
use query_algebrizer_traits::errors::AlgebrizerErrorKind;
|
||||
use query_algebrizer_traits::errors::AlgebrizerError;
|
||||
|
||||
use mentat_query_algebrizer::{EmptyBecause, Known, QueryInputs};
|
||||
|
||||
use utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
||||
use crate::utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
let mut schema = Schema::default();
|
||||
|
@ -75,7 +75,7 @@ fn test_instant_predicates_require_instants() {
|
|||
[(> ?t "2017-06-16T00:56:41.257Z")]]"#;
|
||||
assert_eq!(
|
||||
bails(known, query),
|
||||
AlgebrizerErrorKind::InvalidArgumentType(
|
||||
AlgebrizerError::InvalidArgumentType(
|
||||
PlainSymbol::plain(">"),
|
||||
ValueTypeSet::of_numeric_and_instant_types(),
|
||||
1
|
||||
|
@ -88,7 +88,7 @@ fn test_instant_predicates_require_instants() {
|
|||
[(> "2017-06-16T00:56:41.257Z", ?t)]]"#;
|
||||
assert_eq!(
|
||||
bails(known, query),
|
||||
AlgebrizerErrorKind::InvalidArgumentType(
|
||||
AlgebrizerError::InvalidArgumentType(
|
||||
PlainSymbol::plain(">"),
|
||||
ValueTypeSet::of_numeric_and_instant_types(),
|
||||
0
|
||||
|
@ -162,7 +162,7 @@ fn test_instant_predicates_accepts_var() {
|
|||
let cc = alg_with_inputs(
|
||||
known,
|
||||
query,
|
||||
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value.clone())]),
|
||||
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value)]),
|
||||
);
|
||||
assert_eq!(
|
||||
cc.known_type(&instant_var).expect("?time is known"),
|
||||
|
@ -202,7 +202,7 @@ fn test_numeric_predicates_accepts_var() {
|
|||
let cc = alg_with_inputs(
|
||||
known,
|
||||
query,
|
||||
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value.clone())]),
|
||||
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value)]),
|
||||
);
|
||||
assert_eq!(
|
||||
cc.known_type(&numeric_var).expect("?long is known"),
|
||||
|
|
|
@ -16,7 +16,7 @@ extern crate query_algebrizer_traits;
|
|||
|
||||
mod utils;
|
||||
|
||||
use utils::{alg, bails, SchemaBuilder};
|
||||
use crate::utils::{alg, bails, SchemaBuilder};
|
||||
|
||||
use core_traits::ValueType;
|
||||
|
||||
|
@ -34,6 +34,7 @@ fn prepopulated_schema() -> Schema {
|
|||
.define_simple_attr("test", "uuid", ValueType::Uuid, false)
|
||||
.define_simple_attr("test", "instant", ValueType::Instant, false)
|
||||
.define_simple_attr("test", "ref", ValueType::Ref, false)
|
||||
.define_simple_attr("test", "bytes", ValueType::Bytes, false)
|
||||
.schema
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ use mentat_query_algebrizer::{
|
|||
// These are helpers that tests use to build Schema instances.
|
||||
pub fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
schema.ident_map.insert(i, e);
|
||||
}
|
||||
|
||||
pub fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "query_projector_traits"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -11,12 +11,12 @@ path = "lib.rs"
|
|||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -16,7 +16,7 @@ use mentat_query_algebrizer::{ColumnName, ConjoiningClauses, VariableColumn};
|
|||
|
||||
use mentat_query_sql::{ColumnOrExpression, Expression, Name, ProjectedColumn};
|
||||
|
||||
use errors::{ProjectorErrorKind, Result};
|
||||
use crate::errors::{ProjectorError, Result};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum SimpleAggregationOp {
|
||||
|
@ -28,14 +28,14 @@ pub enum SimpleAggregationOp {
|
|||
}
|
||||
|
||||
impl SimpleAggregationOp {
|
||||
pub fn to_sql(&self) -> &'static str {
|
||||
pub fn to_sql(self) -> &'static str {
|
||||
use self::SimpleAggregationOp::*;
|
||||
match self {
|
||||
&Avg => "avg",
|
||||
&Count => "count",
|
||||
&Max => "max",
|
||||
&Min => "min",
|
||||
&Sum => "sum",
|
||||
Avg => "avg",
|
||||
Count => "count",
|
||||
Max => "max",
|
||||
Min => "min",
|
||||
Sum => "sum",
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,29 +57,29 @@ impl SimpleAggregationOp {
|
|||
/// but invalid to take `Max` of `{Uuid, String}`.
|
||||
///
|
||||
/// The returned type is the type of the result of the aggregation.
|
||||
pub fn is_applicable_to_types(&self, possibilities: ValueTypeSet) -> Result<ValueType> {
|
||||
pub fn is_applicable_to_types(self, possibilities: ValueTypeSet) -> Result<ValueType> {
|
||||
use self::SimpleAggregationOp::*;
|
||||
if possibilities.is_empty() {
|
||||
bail!(ProjectorErrorKind::CannotProjectImpossibleBinding(*self))
|
||||
bail!(ProjectorError::CannotProjectImpossibleBinding(self))
|
||||
}
|
||||
|
||||
match self {
|
||||
// One can always count results.
|
||||
&Count => Ok(ValueType::Long),
|
||||
Count => Ok(ValueType::Long),
|
||||
|
||||
// Only numeric types can be averaged or summed.
|
||||
&Avg => {
|
||||
Avg => {
|
||||
if possibilities.is_only_numeric() {
|
||||
// The mean of a set of numeric values will always, for our purposes, be a double.
|
||||
Ok(ValueType::Double)
|
||||
} else {
|
||||
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||
*self,
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
))
|
||||
}
|
||||
}
|
||||
&Sum => {
|
||||
Sum => {
|
||||
if possibilities.is_only_numeric() {
|
||||
if possibilities.contains(ValueType::Double) {
|
||||
Ok(ValueType::Double)
|
||||
|
@ -88,19 +88,19 @@ impl SimpleAggregationOp {
|
|||
Ok(ValueType::Long)
|
||||
}
|
||||
} else {
|
||||
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||
*self,
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
&Max | &Min => {
|
||||
Max | Min => {
|
||||
if possibilities.is_unit() {
|
||||
use self::ValueType::*;
|
||||
let the_type = possibilities.exemplar().expect("a type");
|
||||
match the_type {
|
||||
// These types are numerically ordered.
|
||||
// Numerically ordered types.
|
||||
Double | Long | Instant => Ok(the_type),
|
||||
|
||||
// Boolean: false < true.
|
||||
|
@ -109,10 +109,10 @@ impl SimpleAggregationOp {
|
|||
// String: lexicographic order.
|
||||
String => Ok(the_type),
|
||||
|
||||
// These types are unordered.
|
||||
Keyword | Ref | Uuid => {
|
||||
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||
*self,
|
||||
// Unordered types.
|
||||
Keyword | Ref | Uuid | Bytes => {
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
))
|
||||
}
|
||||
|
@ -129,8 +129,8 @@ impl SimpleAggregationOp {
|
|||
Ok(ValueType::Long)
|
||||
}
|
||||
} else {
|
||||
bail!(ProjectorErrorKind::CannotApplyAggregateOperationToTypes(
|
||||
*self,
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
))
|
||||
}
|
||||
|
|
|
@ -15,60 +15,14 @@ use rusqlite;
|
|||
use core_traits::ValueTypeSet;
|
||||
use db_traits::errors::DbError;
|
||||
use edn::query::PlainSymbol;
|
||||
use failure::{ Backtrace, Context, Fail, };
|
||||
use std::fmt;
|
||||
use query_pull_traits::errors::PullError;
|
||||
|
||||
use aggregates::SimpleAggregationOp;
|
||||
use crate::aggregates::SimpleAggregationOp;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, ProjectorError>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ProjectorError(Box<Context<ProjectorErrorKind>>);
|
||||
|
||||
impl Fail for ProjectorError {
|
||||
#[inline]
|
||||
fn cause(&self) -> Option<&Fail> {
|
||||
self.0.cause()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn backtrace(&self) -> Option<&Backtrace> {
|
||||
self.0.backtrace()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ProjectorError {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectorError {
|
||||
#[inline]
|
||||
pub fn kind(&self) -> &ProjectorErrorKind {
|
||||
&*self.0.get_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ProjectorErrorKind> for ProjectorError {
|
||||
#[inline]
|
||||
fn from(kind: ProjectorErrorKind) -> ProjectorError {
|
||||
ProjectorError(Box::new(Context::new(kind)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Context<ProjectorErrorKind>> for ProjectorError {
|
||||
#[inline]
|
||||
fn from(inner: Context<ProjectorErrorKind>) -> ProjectorError {
|
||||
ProjectorError(Box::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum ProjectorErrorKind {
|
||||
pub enum ProjectorError {
|
||||
/// We're just not done yet. Message that the feature is recognized but not yet
|
||||
/// implemented.
|
||||
#[fail(display = "not yet implemented: {}", _0)]
|
||||
|
@ -116,24 +70,6 @@ pub enum ProjectorErrorKind {
|
|||
PullError(#[cause] PullError),
|
||||
}
|
||||
|
||||
impl From<rusqlite::Error> for ProjectorErrorKind {
|
||||
fn from(error: rusqlite::Error) -> ProjectorErrorKind {
|
||||
ProjectorErrorKind::from(error).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_db::DbError> for ProjectorErrorKind {
|
||||
fn from(error: mentat_db::DbError) -> ProjectorErrorKind {
|
||||
ProjectorErrorKind::from(error).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mentat_query_pull::PullError> for ProjectorErrorKind {
|
||||
fn from(error: mentat_query_pull::PullError) -> ProjectorErrorKind {
|
||||
ProjectorErrorKind::from(error).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rusqlite::Error> for ProjectorError {
|
||||
fn from(error: rusqlite::Error) -> ProjectorError {
|
||||
ProjectorError::RusqliteError(error.to_string())
|
||||
|
|
|
@ -28,7 +28,7 @@ use mentat_query_projector::query_projection;
|
|||
// These are helpers that tests use to build Schema instances.
|
||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
schema.ident_map.insert(i, e);
|
||||
}
|
||||
|
||||
fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||
|
@ -101,9 +101,9 @@ fn test_the_without_max_or_min() {
|
|||
// … when we look at the projection list, we cannot reconcile the types.
|
||||
let projection = query_projection(&schema, &algebrized);
|
||||
assert!(projection.is_err());
|
||||
use query_projector_traits::errors::ProjectorErrorKind;
|
||||
use query_projector_traits::errors::ProjectorError;
|
||||
match projection.err().expect("expected failure") {
|
||||
ProjectorErrorKind::InvalidProjection(s) => {
|
||||
ProjectorError::InvalidProjection(s) => {
|
||||
assert_eq!(s.as_str(), "Warning: used `the` without `min` or `max`.");
|
||||
}
|
||||
_ => panic!(),
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
[package]
|
||||
name = "mentat_query_projector"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
||||
[features]
|
||||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "0.1"
|
||||
indexmap = "1.3"
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use core_traits::Binding;
|
||||
|
||||
use query_projector_traits::errors::{ProjectorErrorKind, Result};
|
||||
use query_projector_traits::errors::{ProjectorError, Result};
|
||||
|
||||
/// A `BindingTuple` is any type that can accommodate a Mentat tuple query result of fixed length.
|
||||
///
|
||||
|
@ -27,7 +27,7 @@ impl BindingTuple for Vec<Binding> {
|
|||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -43,13 +43,13 @@ impl BindingTuple for Vec<Binding> {
|
|||
impl BindingTuple for (Binding,) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 1 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(1, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(1, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -65,13 +65,13 @@ impl BindingTuple for (Binding,) {
|
|||
impl BindingTuple for (Binding, Binding) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 2 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(2, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(2, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -87,13 +87,13 @@ impl BindingTuple for (Binding, Binding) {
|
|||
impl BindingTuple for (Binding, Binding, Binding) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 3 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(3, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(3, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -113,13 +113,13 @@ impl BindingTuple for (Binding, Binding, Binding) {
|
|||
impl BindingTuple for (Binding, Binding, Binding, Binding) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 4 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(4, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(4, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -140,13 +140,13 @@ impl BindingTuple for (Binding, Binding, Binding, Binding) {
|
|||
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 5 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(5, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(5, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
@ -170,13 +170,13 @@ impl BindingTuple for (Binding, Binding, Binding, Binding, Binding) {
|
|||
impl BindingTuple for (Binding, Binding, Binding, Binding, Binding, Binding) {
|
||||
fn from_binding_vec(expected: usize, vec: Option<Vec<Binding>>) -> Result<Option<Self>> {
|
||||
if expected != 6 {
|
||||
return Err(ProjectorErrorKind::UnexpectedResultsTupleLength(6, expected));
|
||||
return Err(ProjectorError::UnexpectedResultsTupleLength(6, expected));
|
||||
}
|
||||
match vec {
|
||||
None => Ok(None),
|
||||
Some(vec) => {
|
||||
if expected != vec.len() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(
|
||||
expected,
|
||||
vec.len(),
|
||||
))
|
||||
|
|
|
@ -50,26 +50,26 @@ use mentat_query_sql::{GroupBy, Projection};
|
|||
pub mod translate;
|
||||
|
||||
mod binding_tuple;
|
||||
pub use binding_tuple::BindingTuple;
|
||||
pub use crate::binding_tuple::BindingTuple;
|
||||
mod project;
|
||||
mod projectors;
|
||||
mod pull;
|
||||
mod relresult;
|
||||
|
||||
use project::{project_elements, ProjectedElements};
|
||||
use crate::project::{project_elements, ProjectedElements};
|
||||
|
||||
pub use project::projected_column_for_var;
|
||||
pub use crate::project::projected_column_for_var;
|
||||
|
||||
pub use projectors::{ConstantProjector, Projector};
|
||||
pub use crate::projectors::{ConstantProjector, Projector};
|
||||
|
||||
use projectors::{
|
||||
use crate::projectors::{
|
||||
CollProjector, CollTwoStagePullProjector, RelProjector, RelTwoStagePullProjector,
|
||||
ScalarProjector, ScalarTwoStagePullProjector, TupleProjector, TupleTwoStagePullProjector,
|
||||
};
|
||||
|
||||
pub use relresult::{RelResult, StructuredRelResult};
|
||||
pub use crate::relresult::{RelResult, StructuredRelResult};
|
||||
|
||||
use query_projector_traits::errors::{ProjectorErrorKind, Result};
|
||||
use query_projector_traits::errors::{ProjectorError, Result};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct QueryOutput {
|
||||
|
@ -94,11 +94,11 @@ impl From<QueryOutput> for QueryResults {
|
|||
impl QueryOutput {
|
||||
pub fn empty_factory(spec: &FindSpec) -> Box<dyn Fn() -> QueryResults> {
|
||||
use self::FindSpec::*;
|
||||
match spec {
|
||||
&FindScalar(_) => Box::new(|| QueryResults::Scalar(None)),
|
||||
&FindTuple(_) => Box::new(|| QueryResults::Tuple(None)),
|
||||
&FindColl(_) => Box::new(|| QueryResults::Coll(vec![])),
|
||||
&FindRel(ref es) => {
|
||||
match *spec {
|
||||
FindScalar(_) => Box::new(|| QueryResults::Scalar(None)),
|
||||
FindTuple(_) => Box::new(|| QueryResults::Tuple(None)),
|
||||
FindColl(_) => Box::new(|| QueryResults::Coll(vec![])),
|
||||
FindRel(ref es) => {
|
||||
let width = es.len();
|
||||
Box::new(move || QueryResults::Rel(RelResult::empty(width)))
|
||||
}
|
||||
|
@ -115,48 +115,48 @@ impl QueryOutput {
|
|||
|
||||
pub fn empty(spec: &Rc<FindSpec>) -> QueryOutput {
|
||||
use self::FindSpec::*;
|
||||
let results = match &**spec {
|
||||
&FindScalar(_) => QueryResults::Scalar(None),
|
||||
&FindTuple(_) => QueryResults::Tuple(None),
|
||||
&FindColl(_) => QueryResults::Coll(vec![]),
|
||||
&FindRel(ref es) => QueryResults::Rel(RelResult::empty(es.len())),
|
||||
let results = match **spec {
|
||||
FindScalar(_) => QueryResults::Scalar(None),
|
||||
FindTuple(_) => QueryResults::Tuple(None),
|
||||
FindColl(_) => QueryResults::Coll(vec![]),
|
||||
FindRel(ref es) => QueryResults::Rel(RelResult::empty(es.len())),
|
||||
};
|
||||
QueryOutput {
|
||||
spec: spec.clone(),
|
||||
results: results,
|
||||
results,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_constants(spec: &Rc<FindSpec>, bindings: VariableBindings) -> QueryResults {
|
||||
use self::FindSpec::*;
|
||||
match &**spec {
|
||||
&FindScalar(Element::Variable(ref var))
|
||||
| &FindScalar(Element::Corresponding(ref var)) => {
|
||||
match **spec {
|
||||
FindScalar(Element::Variable(ref var))
|
||||
| FindScalar(Element::Corresponding(ref var)) => {
|
||||
let val = bindings.get(var).cloned().map(|v| v.into());
|
||||
QueryResults::Scalar(val)
|
||||
}
|
||||
&FindScalar(Element::Aggregate(ref _agg)) => {
|
||||
FindScalar(Element::Aggregate(ref _agg)) => {
|
||||
// TODO: static aggregates.
|
||||
unimplemented!();
|
||||
}
|
||||
&FindScalar(Element::Pull(ref _pull)) => {
|
||||
FindScalar(Element::Pull(ref _pull)) => {
|
||||
// TODO: static pull.
|
||||
unimplemented!();
|
||||
}
|
||||
&FindTuple(ref elements) => {
|
||||
FindTuple(ref elements) => {
|
||||
let values = elements
|
||||
.iter()
|
||||
.map(|e| match e {
|
||||
&Element::Variable(ref var) | &Element::Corresponding(ref var) => bindings
|
||||
.map(|e| match *e {
|
||||
Element::Variable(ref var) | Element::Corresponding(ref var) => bindings
|
||||
.get(var)
|
||||
.cloned()
|
||||
.expect("every var to have a binding")
|
||||
.into(),
|
||||
&Element::Pull(ref _pull) => {
|
||||
Element::Pull(ref _pull) => {
|
||||
// TODO: static pull.
|
||||
unreachable!();
|
||||
}
|
||||
&Element::Aggregate(ref _agg) => {
|
||||
Element::Aggregate(ref _agg) => {
|
||||
// TODO: static computation of aggregates, then
|
||||
// implement the condition in `is_fully_bound`.
|
||||
unreachable!();
|
||||
|
@ -165,7 +165,7 @@ impl QueryOutput {
|
|||
.collect();
|
||||
QueryResults::Tuple(Some(values))
|
||||
}
|
||||
&FindColl(Element::Variable(ref var)) | &FindColl(Element::Corresponding(ref var)) => {
|
||||
FindColl(Element::Variable(ref var)) | FindColl(Element::Corresponding(ref var)) => {
|
||||
let val = bindings
|
||||
.get(var)
|
||||
.cloned()
|
||||
|
@ -173,32 +173,32 @@ impl QueryOutput {
|
|||
.into();
|
||||
QueryResults::Coll(vec![val])
|
||||
}
|
||||
&FindColl(Element::Pull(ref _pull)) => {
|
||||
FindColl(Element::Pull(ref _pull)) => {
|
||||
// TODO: static pull.
|
||||
unimplemented!();
|
||||
}
|
||||
&FindColl(Element::Aggregate(ref _agg)) => {
|
||||
FindColl(Element::Aggregate(ref _agg)) => {
|
||||
// Does it even make sense to write
|
||||
// [:find [(max ?x) ...] :where [_ :foo/bar ?x]]
|
||||
// ?
|
||||
// TODO
|
||||
unimplemented!();
|
||||
}
|
||||
&FindRel(ref elements) => {
|
||||
FindRel(ref elements) => {
|
||||
let width = elements.len();
|
||||
let values = elements
|
||||
.iter()
|
||||
.map(|e| match e {
|
||||
&Element::Variable(ref var) | &Element::Corresponding(ref var) => bindings
|
||||
.map(|e| match *e {
|
||||
Element::Variable(ref var) | Element::Corresponding(ref var) => bindings
|
||||
.get(var)
|
||||
.cloned()
|
||||
.expect("every var to have a binding")
|
||||
.into(),
|
||||
&Element::Pull(ref _pull) => {
|
||||
Element::Pull(ref _pull) => {
|
||||
// TODO: static pull.
|
||||
unreachable!();
|
||||
}
|
||||
&Element::Aggregate(ref _agg) => {
|
||||
Element::Aggregate(ref _agg) => {
|
||||
// TODO: static computation of aggregates, then
|
||||
// implement the condition in `is_fully_bound`.
|
||||
unreachable!();
|
||||
|
@ -241,77 +241,77 @@ impl QueryOutput {
|
|||
|
||||
impl QueryResults {
|
||||
pub fn len(&self) -> usize {
|
||||
use QueryResults::*;
|
||||
match self {
|
||||
&Scalar(ref o) => {
|
||||
use crate::QueryResults::*;
|
||||
match *self {
|
||||
Scalar(ref o) => {
|
||||
if o.is_some() {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
&Tuple(ref o) => {
|
||||
Tuple(ref o) => {
|
||||
if o.is_some() {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
&Coll(ref v) => v.len(),
|
||||
&Rel(ref r) => r.row_count(),
|
||||
Coll(ref v) => v.len(),
|
||||
Rel(ref r) => r.row_count(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
use QueryResults::*;
|
||||
match self {
|
||||
&Scalar(ref o) => o.is_none(),
|
||||
&Tuple(ref o) => o.is_none(),
|
||||
&Coll(ref v) => v.is_empty(),
|
||||
&Rel(ref r) => r.is_empty(),
|
||||
use crate::QueryResults::*;
|
||||
match *self {
|
||||
Scalar(ref o) => o.is_none(),
|
||||
Tuple(ref o) => o.is_none(),
|
||||
Coll(ref v) => v.is_empty(),
|
||||
Rel(ref r) => r.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_scalar(self) -> Result<Option<Binding>> {
|
||||
match self {
|
||||
QueryResults::Scalar(o) => Ok(o),
|
||||
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "scalar")),
|
||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "scalar")),
|
||||
QueryResults::Tuple(_) => {
|
||||
bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "scalar"))
|
||||
bail!(ProjectorError::UnexpectedResultsType("tuple", "scalar"))
|
||||
}
|
||||
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "scalar")),
|
||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "scalar")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_coll(self) -> Result<Vec<Binding>> {
|
||||
match self {
|
||||
QueryResults::Scalar(_) => {
|
||||
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "coll"))
|
||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "coll"))
|
||||
}
|
||||
QueryResults::Coll(c) => Ok(c),
|
||||
QueryResults::Tuple(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "coll")),
|
||||
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "coll")),
|
||||
QueryResults::Tuple(_) => bail!(ProjectorError::UnexpectedResultsType("tuple", "coll")),
|
||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "coll")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_tuple(self) -> Result<Option<Vec<Binding>>> {
|
||||
match self {
|
||||
QueryResults::Scalar(_) => {
|
||||
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "tuple"))
|
||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "tuple"))
|
||||
}
|
||||
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "tuple")),
|
||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "tuple")),
|
||||
QueryResults::Tuple(t) => Ok(t),
|
||||
QueryResults::Rel(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("rel", "tuple")),
|
||||
QueryResults::Rel(_) => bail!(ProjectorError::UnexpectedResultsType("rel", "tuple")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_rel(self) -> Result<RelResult<Binding>> {
|
||||
match self {
|
||||
QueryResults::Scalar(_) => {
|
||||
bail!(ProjectorErrorKind::UnexpectedResultsType("scalar", "rel"))
|
||||
bail!(ProjectorError::UnexpectedResultsType("scalar", "rel"))
|
||||
}
|
||||
QueryResults::Coll(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("coll", "rel")),
|
||||
QueryResults::Tuple(_) => bail!(ProjectorErrorKind::UnexpectedResultsType("tuple", "rel")),
|
||||
QueryResults::Coll(_) => bail!(ProjectorError::UnexpectedResultsType("coll", "rel")),
|
||||
QueryResults::Tuple(_) => bail!(ProjectorError::UnexpectedResultsType("tuple", "rel")),
|
||||
QueryResults::Rel(r) => Ok(r),
|
||||
}
|
||||
}
|
||||
|
@ -339,16 +339,16 @@ impl TypedIndex {
|
|||
/// This function will return a runtime error if the type tag is unknown, or the value is
|
||||
/// otherwise not convertible by the DB layer.
|
||||
fn lookup<'a>(&self, row: &Row<'a>) -> Result<Binding> {
|
||||
use TypedIndex::*;
|
||||
use crate::TypedIndex::*;
|
||||
|
||||
match self {
|
||||
&Known(value_index, value_type) => {
|
||||
match *self {
|
||||
Known(value_index, value_type) => {
|
||||
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
||||
TypedValue::from_sql_value_pair(v, value_type)
|
||||
.map(|v| v.into())
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
&Unknown(value_index, type_index) => {
|
||||
Unknown(value_index, type_index) => {
|
||||
let v: rusqlite::types::Value = row.get(value_index).unwrap();
|
||||
let value_type_tag: i32 = row.get(type_index).unwrap();
|
||||
TypedValue::from_sql_value_pair(v, value_type_tag)
|
||||
|
@ -403,10 +403,7 @@ trait IsPull {
|
|||
|
||||
impl IsPull for Element {
|
||||
fn is_pull(&self) -> bool {
|
||||
match self {
|
||||
&Element::Pull(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self, Element::Pull(_))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -430,16 +427,16 @@ pub fn query_projection(
|
|||
|
||||
let variables: BTreeSet<Variable> = spec
|
||||
.columns()
|
||||
.map(|e| match e {
|
||||
&Element::Variable(ref var) | &Element::Corresponding(ref var) => var.clone(),
|
||||
.map(|e| match *e {
|
||||
Element::Variable(ref var) | Element::Corresponding(ref var) => var.clone(),
|
||||
|
||||
// Pull expressions can never be fully bound.
|
||||
// TODO: but the interior can be, in which case we
|
||||
// can handle this and simply project.
|
||||
&Element::Pull(_) => {
|
||||
Element::Pull(_) => {
|
||||
unreachable!();
|
||||
}
|
||||
&Element::Aggregate(ref _agg) => {
|
||||
Element::Aggregate(ref _agg) => {
|
||||
// TODO: static computation of aggregates, then
|
||||
// implement the condition in `is_fully_bound`.
|
||||
unreachable!();
|
||||
|
@ -525,12 +522,14 @@ fn test_into_tuple() {
|
|||
))
|
||||
);
|
||||
|
||||
match query_output.clone().into_tuple() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
match query_output.into_tuple() {
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
assert_eq!((expected, got), (3, 2));
|
||||
}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
}
|
||||
|
||||
let query_output = QueryOutput {
|
||||
|
@ -544,14 +543,18 @@ fn test_into_tuple() {
|
|||
match query_output.clone().into_tuple() {
|
||||
Ok(None) => {}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _))) | _ => panic!("expected error"),
|
||||
Ok(Some((_, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
}
|
||||
|
||||
match query_output.clone().into_tuple() {
|
||||
Err(ProjectorErrorKind::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
match query_output.into_tuple() {
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
assert_eq!((expected, got), (3, 2));
|
||||
}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue