Compare commits
No commits in common. "master" and "gburd/replace-option-option" have entirely different histories.
master
...
gburd/repl
139 changed files with 1621 additions and 2199 deletions
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
|
@ -1,3 +0,0 @@
|
|||
liberapay: svartalf
|
||||
patreon: svartalf
|
||||
custom: ["https://svartalf.info/donate/", "https://www.buymeacoffee.com/svartalf"]
|
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
|
@ -1,11 +0,0 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "daily"
|
20
.github/workflows/audit.yml
vendored
20
.github/workflows/audit.yml
vendored
|
@ -1,20 +0,0 @@
|
|||
name: Security audit
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 * *'
|
||||
push:
|
||||
paths:
|
||||
- '**/Cargo.toml'
|
||||
- '**/Cargo.lock'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/audit-check@issue-104
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
13
.github/workflows/clippy-ng.yml
vendored
13
.github/workflows/clippy-ng.yml
vendored
|
@ -1,13 +0,0 @@
|
|||
on: [push, pull_request]
|
||||
name: Clippy (new version test, don't use it!)
|
||||
jobs:
|
||||
clippy_check_ng:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy
|
||||
override: true
|
||||
- uses: actions-rs/clippy@master
|
16
.github/workflows/clippy_check.yml
vendored
16
.github/workflows/clippy_check.yml
vendored
|
@ -1,16 +0,0 @@
|
|||
on: [push, pull_request]
|
||||
name: Clippy check
|
||||
jobs:
|
||||
clippy_check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy
|
||||
override: true
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
args: --all-targets --all-features -- -D warnings
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
28
.github/workflows/cross_compile.yml
vendored
28
.github/workflows/cross_compile.yml
vendored
|
@ -1,28 +0,0 @@
|
|||
# We could use `@actions-rs/cargo` Action ability to automatically install `cross` tool
|
||||
# in order to compile our application for some unusual targets.
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: Cross-compile
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- powerpc64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: true
|
||||
command: build
|
||||
args: --release --target=${{ matrix.target }}
|
66
.github/workflows/grcov.yml
vendored
66
.github/workflows/grcov.yml
vendored
|
@ -1,66 +0,0 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
name: Code coverage with grcov
|
||||
|
||||
jobs:
|
||||
grcov:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
# - windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
|
||||
- name: Execute tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests"
|
||||
|
||||
# Note that `actions-rs/grcov` Action can install `grcov` too,
|
||||
# but can't use faster installation methods yet.
|
||||
# As a temporary experiment `actions-rs/install` Action plugged in here.
|
||||
# Consider **NOT** to copy that into your workflow,
|
||||
# but use `actions-rs/grcov` only
|
||||
- name: Pre-installing grcov
|
||||
uses: actions-rs/install@v0.1
|
||||
with:
|
||||
crate: grcov
|
||||
use-tool-cache: true
|
||||
|
||||
- name: Gather coverage data
|
||||
id: coverage
|
||||
uses: actions-rs/grcov@v0.1
|
||||
with:
|
||||
coveralls-token: ${{ secrets.COVERALLS_TOKEN }}
|
||||
|
||||
- name: Coveralls upload
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
parallel: true
|
||||
path-to-lcov: ${{ steps.coverage.outputs.report }}
|
||||
|
||||
grcov_finalize:
|
||||
runs-on: ubuntu-latest
|
||||
needs: grcov
|
||||
steps:
|
||||
- name: Coveralls finalization
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
parallel-finished: true
|
110
.github/workflows/msrv.yml
vendored
110
.github/workflows/msrv.yml
vendored
|
@ -1,110 +0,0 @@
|
|||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/msrv.md
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: MSRV
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: check
|
||||
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: test
|
||||
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
rust:
|
||||
- stable
|
||||
- 1.31.0
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Install clippy
|
||||
run: rustup component add clippy
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
78
.github/workflows/nightly_lints.yml
vendored
78
.github/workflows/nightly_lints.yml
vendored
|
@ -1,78 +0,0 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
name: Nightly lints
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain with clippy available
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: clippy
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
rustfmt:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain with rustfmt available
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
combo:
|
||||
name: Clippy + rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install nightly toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
79
.github/workflows/quickstart.yml
vendored
79
.github/workflows/quickstart.yml
vendored
|
@ -1,79 +0,0 @@
|
|||
# Based on https://github.com/actions-rs/meta/blob/master/recipes/quickstart.md
|
||||
#
|
||||
# While our "example" application has the platform-specific code,
|
||||
# for simplicity we are compiling and testing everything on the Ubuntu environment only.
|
||||
# For multi-OS testing see the `cross.yml` workflow.
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
name: Quickstart
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: check
|
||||
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: test
|
||||
|
||||
lints:
|
||||
name: Lints
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
continue-on-error: true # WARNING: only for this example, remove it!
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -3,7 +3,7 @@
|
|||
*.jar
|
||||
*jar
|
||||
*~
|
||||
**/*.rs.bk
|
||||
*.rs.bk
|
||||
.s*
|
||||
.*.sw*
|
||||
*.rs.bak
|
||||
|
@ -15,8 +15,6 @@
|
|||
.lein-plugins/
|
||||
.lein-repl-history
|
||||
.nrepl-port
|
||||
.bundle/
|
||||
docs/vendor/
|
||||
/.lein-*
|
||||
/.nrepl-port
|
||||
Cargo.lock
|
||||
|
@ -24,7 +22,7 @@ Cargo.lock
|
|||
/classes/
|
||||
/node_modules/
|
||||
/out/
|
||||
/target
|
||||
target/
|
||||
pom.xml
|
||||
pom.xml.asc
|
||||
/.cljs_node_repl/
|
||||
|
@ -94,3 +92,4 @@ build.xcarchive
|
|||
docs/_site
|
||||
docs/.sass-cache
|
||||
docs/.jekyll-metadata
|
||||
|
||||
|
|
51
.travis.yml
51
.travis.yml
|
@ -1,40 +1,9 @@
|
|||
language: rust
|
||||
env:
|
||||
- CARGO_INCREMENTAL=0
|
||||
# https://bheisler.github.io/post/efficient-use-of-travis-ci-cache-for-rust/
|
||||
before_cache:
|
||||
# Delete loose files in the debug directory
|
||||
- find ./target/debug -maxdepth 1 -type f -delete
|
||||
# Delete the test and benchmark executables. Finding these all might take some
|
||||
# experimentation.
|
||||
- rm -rf ./target/debug/deps/criterion*
|
||||
- rm -rf ./target/debug/deps/bench*
|
||||
# Delete the associated metadata files for those executables
|
||||
- rm -rf ./target/debug/.fingerprint/criterion*
|
||||
- rm -rf ./target/debug/.fingerprint/bench*
|
||||
# Note that all of the above need to be repeated for `release/` instead of
|
||||
# `debug/` if your build script builds artifacts in release mode.
|
||||
# This is just more metadata
|
||||
- rm -f ./target/.rustc_info.json
|
||||
# Also delete the saved benchmark data from the test benchmarks. If you
|
||||
# have Criterion.rs benchmarks, you'll probably want to do this as well, or set
|
||||
# the CRITERION_HOME environment variable to move that data out of the
|
||||
# `target/` directory.
|
||||
- rm -rf ./target/criterion
|
||||
# Also delete cargo's registry index. This is updated on every build, but it's
|
||||
# way cheaper to re-download than the whole cache is.
|
||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/index/"
|
||||
- rm -rf "$TRAVIS_HOME/.cargo/registry/src"
|
||||
cache:
|
||||
directories:
|
||||
- ./target
|
||||
- $TRAVIS_HOME/.cache/sccache
|
||||
- $TRAVIS_HOME/.cargo/
|
||||
- $TRAVIS_HOME/.rustup/
|
||||
cache: cargo # cache cargo-audit once installed
|
||||
before_script:
|
||||
# - cargo install --force clippy
|
||||
- cargo install --force cargo-audit
|
||||
- cargo generate-lockfile
|
||||
- rustup component add clippy-preview
|
||||
script:
|
||||
- cargo audit
|
||||
# We use OSX so that we can get a reasonably up to date version of SQLCipher.
|
||||
|
@ -43,36 +12,34 @@ os: osx
|
|||
before_install:
|
||||
- brew install sqlcipher
|
||||
rust:
|
||||
- 1.43.0
|
||||
- 1.44.0
|
||||
- 1.45.0
|
||||
- 1.46.0
|
||||
- 1.47.0
|
||||
- 1.41.0
|
||||
- stable
|
||||
- beta
|
||||
- nightly
|
||||
matrix:
|
||||
allow_failures:
|
||||
- rust: stable
|
||||
- rust: nightly
|
||||
fast_finish: true
|
||||
jobs:
|
||||
include:
|
||||
- stage: "Test iOS"
|
||||
rust: 1.47.0
|
||||
rust: 1.41.0
|
||||
script: ./scripts/test-ios.sh
|
||||
- stage: "Docs"
|
||||
rust: 1.47.0
|
||||
rust: 1.41.0
|
||||
script: ./scripts/cargo-doc.sh
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo clippy --all-targets --all-features -- -D warnings -A clippy::comparison-chain -A clippy::many-single-char-names # Check tests and non-default crate features.
|
||||
# - cargo clippy --all-targets --all-features -- -D warnings # Check tests and non-default crate features.
|
||||
- cargo test --verbose --all
|
||||
- cargo test --features edn/serde_support --verbose --all
|
||||
# We can't pick individual features out with `cargo test --all` (At the time of this writing, this
|
||||
# works but does the wrong thing because of a bug in cargo, but its fix will be to disallow doing
|
||||
# this all-together, see https://github.com/rust-lang/cargo/issues/5364 for more information). To
|
||||
# work around this, we run tests individually for sub-crates that rely on `rusqlite`.
|
||||
# work around this, we run tests individually for subcrates that rely on `rusqlite`.
|
||||
- |
|
||||
for crate in "" "db" "db-traits" "ffi" "public-traits" "query-projector" "query-projector-traits" "query-pull" "sql" "tolstoy" "tolstoy-traits" "transaction" "tools/cli"; do
|
||||
cargo test --manifest-path ./$crate/Cargo.toml --verbose --no-default-features --features sqlcipher
|
||||
done
|
||||
cache: cargo
|
||||
|
|
43
Cargo.toml
43
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[package]
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
authors = [
|
||||
"Richard Newman <rnewman@twinql.com>",
|
||||
"Nicholas Alexander <nalexander@mozilla.com>",
|
||||
|
@ -11,10 +11,9 @@ authors = [
|
|||
"Kit Cambridge <kit@yakshaving.ninja>",
|
||||
"Edouard Oger <eoger@fastmail.com>",
|
||||
"Thom Chiovoloni <tchiovoloni@mozilla.com>",
|
||||
"Gregory Burd <greg@burd.me>",
|
||||
]
|
||||
name = "mentat"
|
||||
version = "0.14.0"
|
||||
version = "0.11.2"
|
||||
build = "build/version.rs"
|
||||
|
||||
[features]
|
||||
|
@ -24,37 +23,31 @@ sqlcipher = ["rusqlite/sqlcipher", "mentat_db/sqlcipher"]
|
|||
syncable = ["mentat_tolstoy", "tolstoy_traits", "mentat_db/syncable"]
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"tools/cli",
|
||||
"ffi", "core", "core-traits","db", "db-traits", "edn", "public-traits", "query-algebrizer",
|
||||
"query-algebrizer-traits", "query-projector", "query-projector-traits","query-pull",
|
||||
"query-sql", "sql", "sql-traits", "tolstoy-traits", "tolstoy", "transaction"
|
||||
]
|
||||
members = ["tools/cli", "ffi"]
|
||||
|
||||
[build-dependencies]
|
||||
rustc_version = "~0.4"
|
||||
rustc_version = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_approx_eq = "~1.1"
|
||||
|
||||
#[dev-dependencies.cargo-husky]
|
||||
#version = "1"
|
||||
#default-features = false # Disable features which are enabled by default
|
||||
#features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
||||
[dev-dependencies.cargo-husky]
|
||||
version = "1"
|
||||
default-features = false # Disable features which are enabled by default
|
||||
features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
||||
# cargo audit
|
||||
# cargo outdated
|
||||
|
||||
[dependencies]
|
||||
chrono = "~0.4"
|
||||
failure = "~0.1"
|
||||
lazy_static = "~1.4"
|
||||
time = "0.3.1"
|
||||
log = "~0.4"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
chrono = "0.4"
|
||||
failure = "0.1.6"
|
||||
lazy_static = "1.4.0"
|
||||
time = "0.2"
|
||||
log = "0.4"
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21.0"
|
||||
# System sqlite might be very old.
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "edn"
|
||||
|
|
3
Makefile
3
Makefile
|
@ -1,4 +1,3 @@
|
|||
.PHONY: outdated fix
|
||||
|
||||
outdated:
|
||||
for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo outdated -R); done
|
||||
|
@ -7,5 +6,3 @@ outdated:
|
|||
fix:
|
||||
$(for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo fix --allow-dirty --broken-code --edition-idioms); done)
|
||||
|
||||
upgrades:
|
||||
cargo upgrades
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::process::exit;
|
|||
|
||||
/// MIN_VERSION should be changed when there's a new minimum version of rustc required
|
||||
/// to build the project.
|
||||
static MIN_VERSION: &str = "1.69.0";
|
||||
static MIN_VERSION: &str = "1.40.0";
|
||||
|
||||
fn main() {
|
||||
let ver = version().unwrap();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "core_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -8,15 +8,14 @@ name = "core_traits"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0.8"
|
||||
lazy_static = "~1.4"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", features = ["rc"] }
|
||||
serde_derive = "~1.0"
|
||||
bytes = { version = "1.0.1", features = ["serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
enum-set = "0.0.8"
|
||||
lazy_static = "1.4.0"
|
||||
indexmap = "1.3.1"
|
||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "1.0", features = ["rc"] }
|
||||
serde_derive = "1.0"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -14,7 +14,6 @@ extern crate indexmap;
|
|||
extern crate ordered_float;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate bytes;
|
||||
extern crate edn;
|
||||
extern crate uuid;
|
||||
#[macro_use]
|
||||
|
@ -34,7 +33,6 @@ use std::sync::Arc;
|
|||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use bytes::Bytes;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use enum_set::EnumSet;
|
||||
|
@ -54,7 +52,7 @@ use edn::entities::{
|
|||
mod value_type_set;
|
||||
pub mod values;
|
||||
|
||||
pub use crate::value_type_set::ValueTypeSet;
|
||||
pub use value_type_set::ValueTypeSet;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! bail {
|
||||
|
@ -111,7 +109,7 @@ pub enum AttributeBitFlags {
|
|||
}
|
||||
|
||||
pub mod attribute {
|
||||
use crate::TypedValue;
|
||||
use TypedValue;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
pub enum Unique {
|
||||
|
@ -282,7 +280,6 @@ pub enum ValueType {
|
|||
String,
|
||||
Keyword,
|
||||
Uuid,
|
||||
Bytes,
|
||||
}
|
||||
|
||||
impl ValueType {
|
||||
|
@ -297,7 +294,6 @@ impl ValueType {
|
|||
s.insert(ValueType::String);
|
||||
s.insert(ValueType::Keyword);
|
||||
s.insert(ValueType::Uuid);
|
||||
s.insert(ValueType::Bytes);
|
||||
s
|
||||
}
|
||||
}
|
||||
|
@ -325,7 +321,6 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -343,7 +338,6 @@ impl ValueType {
|
|||
"string" => Some(ValueType::String),
|
||||
"keyword" => Some(ValueType::Keyword),
|
||||
"uuid" => Some(ValueType::Uuid),
|
||||
"bytes" => Some(ValueType::Bytes),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -361,7 +355,6 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -376,12 +369,14 @@ impl ValueType {
|
|||
ValueType::String => values::DB_TYPE_STRING.clone(),
|
||||
ValueType::Keyword => values::DB_TYPE_KEYWORD.clone(),
|
||||
ValueType::Uuid => values::DB_TYPE_UUID.clone(),
|
||||
ValueType::Bytes => values::DB_TYPE_BYTES.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_numeric(self) -> bool {
|
||||
matches!(self, ValueType::Long | ValueType::Double)
|
||||
match self {
|
||||
ValueType::Long | ValueType::Double => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -399,7 +394,6 @@ impl fmt::Display for ValueType {
|
|||
ValueType::String => ":db.type/string",
|
||||
ValueType::Keyword => ":db.type/keyword",
|
||||
ValueType::Uuid => ":db.type/uuid",
|
||||
ValueType::Bytes => ":db.type/bytes",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -423,7 +417,6 @@ pub enum TypedValue {
|
|||
String(ValueRc<String>),
|
||||
Keyword(ValueRc<Keyword>),
|
||||
Uuid(Uuid), // It's only 128 bits, so this should be acceptable to clone.
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
impl From<KnownEntid> for TypedValue {
|
||||
|
@ -455,7 +448,6 @@ impl TypedValue {
|
|||
TypedValue::String(_) => ValueType::String,
|
||||
TypedValue::Keyword(_) => ValueType::Keyword,
|
||||
TypedValue::Uuid(_) => ValueType::Uuid,
|
||||
TypedValue::Bytes(_) => ValueType::Bytes,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -582,7 +574,7 @@ impl TypedValue {
|
|||
match self {
|
||||
TypedValue::Uuid(v) => {
|
||||
// Get an independent copy of the string.
|
||||
let s: String = v.hyphenated().to_string();
|
||||
let s: String = v.to_hyphenated().to_string();
|
||||
|
||||
// Make a CString out of the new bytes.
|
||||
let c: CString = CString::new(s).expect("String conversion failed!");
|
||||
|
@ -603,14 +595,7 @@ impl TypedValue {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
TypedValue::Uuid(v) => Some(v.hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_bytes(self) -> Option<Bytes> {
|
||||
match self {
|
||||
TypedValue::Bytes(b) => Some(b),
|
||||
TypedValue::Uuid(v) => Some(v.to_hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -704,12 +689,6 @@ impl From<f64> for TypedValue {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&[u8]> for TypedValue {
|
||||
fn from(bslice: &[u8]) -> Self {
|
||||
TypedValue::Bytes(Bytes::copy_from_slice(bslice))
|
||||
}
|
||||
}
|
||||
|
||||
trait MicrosecondPrecision {
|
||||
/// Truncate the provided `DateTime` to microsecond precision.
|
||||
fn microsecond_precision(self) -> Self;
|
||||
|
@ -963,7 +942,7 @@ impl Binding {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.hyphenated().to_string()),
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.to_hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use enum_set::EnumSet;
|
||||
|
||||
use crate::ValueType;
|
||||
use ValueType;
|
||||
|
||||
trait EnumSetExtensions<T: ::enum_set::CLike + Clone> {
|
||||
/// Return a set containing both `x` and `y`.
|
||||
|
|
|
@ -58,7 +58,6 @@ lazy_static_namespaced_keyword_value!(DB_TYPE_REF, "db.type", "ref");
|
|||
lazy_static_namespaced_keyword_value!(DB_TYPE_STRING, "db.type", "string");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_URI, "db.type", "uri");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_UUID, "db.type", "uuid");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_BYTES, "db.type", "bytes");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE, "db", "unique");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_IDENTITY, "db.unique", "identity");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_VALUE, "db.unique", "value");
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
[package]
|
||||
name = "mentat_core"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0"
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
enum-set = "0.0"
|
||||
failure = "0.1"
|
||||
indexmap = "1.3"
|
||||
ordered-float = { version = "1.0", features = ["serde"] }
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -13,7 +13,7 @@ use std::collections::BTreeSet;
|
|||
|
||||
use core_traits::{Entid, TypedValue};
|
||||
|
||||
use crate::Schema;
|
||||
use Schema;
|
||||
|
||||
pub trait CachedAttributes {
|
||||
fn is_attribute_cached_reverse(&self, entid: Entid) -> bool;
|
||||
|
|
|
@ -35,18 +35,18 @@ pub use chrono::{
|
|||
pub use edn::parse::parse_query;
|
||||
pub use edn::{Cloned, FromMicros, FromRc, Keyword, ToMicros, Utc, ValueRc};
|
||||
|
||||
pub use crate::cache::{CachedAttributes, UpdateableCache};
|
||||
pub use cache::{CachedAttributes, UpdateableCache};
|
||||
|
||||
mod sql_types;
|
||||
mod tx_report;
|
||||
/// Core types defining a Mentat knowledge base.
|
||||
mod types;
|
||||
|
||||
pub use crate::tx_report::TxReport;
|
||||
pub use tx_report::TxReport;
|
||||
|
||||
pub use crate::types::ValueTypeTag;
|
||||
pub use types::ValueTypeTag;
|
||||
|
||||
pub use crate::sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
||||
pub use sql_types::{SQLTypeAffinity, SQLValueType, SQLValueTypeSet};
|
||||
|
||||
/// Map `Keyword` idents (`:db/ident`) to positive integer entids (`1`).
|
||||
pub type IdentMap = BTreeMap<Keyword, Entid>;
|
||||
|
|
|
@ -12,7 +12,7 @@ use std::collections::BTreeSet;
|
|||
|
||||
use core_traits::{ValueType, ValueTypeSet};
|
||||
|
||||
use crate::types::ValueTypeTag;
|
||||
use types::ValueTypeTag;
|
||||
|
||||
/// Type safe representation of the possible return values from SQLite's `typeof`
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -51,7 +51,6 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => (10, None),
|
||||
ValueType::Uuid => (11, None),
|
||||
ValueType::Keyword => (13, None),
|
||||
ValueType::Bytes => (15, Some(SQLTypeAffinity::Blob)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,7 +62,7 @@ impl SQLValueType for ValueType {
|
|||
/// Returns true if the provided integer is in the SQLite value space of this type. For
|
||||
/// example, `1` is how we encode `true`.
|
||||
fn accommodates_integer(&self, int: i64) -> bool {
|
||||
use crate::ValueType::*;
|
||||
use ValueType::*;
|
||||
match *self {
|
||||
Instant => false, // Always use #inst.
|
||||
Long | Double => true,
|
||||
|
@ -72,7 +71,6 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => false,
|
||||
Keyword => false,
|
||||
Uuid => false,
|
||||
Bytes => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -125,8 +123,8 @@ impl SQLValueTypeSet for ValueTypeSet {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::sql_types::SQLValueType;
|
||||
use core_traits::ValueType;
|
||||
use sql_types::SQLValueType;
|
||||
|
||||
#[test]
|
||||
fn test_accommodates_integer() {
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::BTreeMap;
|
|||
|
||||
use core_traits::Entid;
|
||||
|
||||
use crate::{DateTime, Utc};
|
||||
use {DateTime, Utc};
|
||||
|
||||
/// A transaction report summarizes an applied transaction.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "db_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -11,8 +11,8 @@ path = "lib.rs"
|
|||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -21,5 +21,5 @@ path = "../edn"
|
|||
path = "../core-traits"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
|
|
@ -118,10 +118,10 @@ impl ::std::fmt::Display for InputError {
|
|||
match self {
|
||||
BadDbId => {
|
||||
writeln!(f, ":db/id in map notation must either not be present or be an entid, an ident, or a tempid")
|
||||
}
|
||||
},
|
||||
BadEntityPlace => {
|
||||
writeln!(f, "cannot convert value place into entity place")
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "mentat_db"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[features]
|
||||
|
@ -9,21 +9,21 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
syncable = ["serde", "serde_json", "serde_derive"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
itertools = "~0.10"
|
||||
lazy_static = "~1.4"
|
||||
log = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
time = "~0.3"
|
||||
petgraph = "~0.6"
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_json = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
failure = "0.1.6"
|
||||
indexmap = "1.3.1"
|
||||
itertools = "0.8"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4"
|
||||
ordered-float = "1.0.2"
|
||||
time = "0.2"
|
||||
petgraph = "0.5"
|
||||
serde = { version = "1.0", optional = true }
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
serde_derive = { version = "1.0", optional = true }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -40,10 +40,9 @@ path = "../db-traits"
|
|||
[dependencies.mentat_sql]
|
||||
path = "../sql"
|
||||
|
||||
# TODO: This should be in dev-dependencies.
|
||||
# Should be dev-dependencies.
|
||||
[dependencies.tabwriter]
|
||||
version = "~1.2"
|
||||
version = "1.2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.9"
|
||||
#tabwriter = { version = "1.2.1" }
|
||||
env_logger = "0.7"
|
||||
|
|
|
@ -10,19 +10,19 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::db::TypedSQLValue;
|
||||
use crate::entids;
|
||||
use db::TypedSQLValue;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn;
|
||||
use edn::entities::Entity;
|
||||
use edn::symbols;
|
||||
use edn::types::Value;
|
||||
use entids;
|
||||
|
||||
use core_traits::{values, TypedValue};
|
||||
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::types::{Partition, PartitionMap};
|
||||
use mentat_core::{IdentMap, Schema};
|
||||
use schema::SchemaBuilding;
|
||||
use types::{Partition, PartitionMap};
|
||||
|
||||
/// The first transaction ID applied to the knowledge base.
|
||||
///
|
||||
|
|
|
@ -61,7 +61,6 @@ use std::iter::Peekable;
|
|||
use failure::ResultExt;
|
||||
|
||||
use rusqlite;
|
||||
use rusqlite::params_from_iter;
|
||||
|
||||
use core_traits::{Binding, Entid, TypedValue};
|
||||
|
||||
|
@ -73,11 +72,11 @@ use mentat_sql::{QueryBuilder, SQLQuery, SQLiteQueryBuilder};
|
|||
|
||||
use edn::entities::OpType;
|
||||
|
||||
use crate::db::TypedSQLValue;
|
||||
use db::TypedSQLValue;
|
||||
|
||||
use db_traits::errors::{DbError, DbErrorKind, Result};
|
||||
|
||||
use crate::watcher::TransactWatcher;
|
||||
use watcher::TransactWatcher;
|
||||
|
||||
// Right now we use BTreeMap, because we expect few cached attributes.
|
||||
pub type CacheMap<K, V> = BTreeMap<K, V>;
|
||||
|
@ -199,7 +198,9 @@ impl AevFactory {
|
|||
let a: Entid = row.get_unwrap(0);
|
||||
let e: Entid = row.get_unwrap(1);
|
||||
let value_type_tag: i32 = row.get_unwrap(3);
|
||||
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag).unwrap();
|
||||
let v = TypedValue::from_sql_value_pair(row.get_unwrap(2), value_type_tag)
|
||||
.map(|x| x)
|
||||
.unwrap();
|
||||
(a, e, self.intern(v))
|
||||
}
|
||||
}
|
||||
|
@ -374,7 +375,7 @@ impl RemoveFromCache for MultiValAttributeCache {
|
|||
|
||||
impl CardinalityManyCache for MultiValAttributeCache {
|
||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||
self.e_vs.entry(e).or_insert_with(Vec::new).push(v)
|
||||
self.e_vs.entry(e).or_insert_with(|| vec![]).push(v)
|
||||
}
|
||||
|
||||
fn set(&mut self, e: Entid, vs: Vec<TypedValue>) {
|
||||
|
@ -1072,9 +1073,7 @@ impl AttributeCaches {
|
|||
replacing: bool,
|
||||
) -> Result<()> {
|
||||
let mut aev_factory = AevFactory::new();
|
||||
let rows = statement.query_map(params_from_iter(&args), |row| {
|
||||
Ok(aev_factory.row_to_aev(row))
|
||||
})?;
|
||||
let rows = statement.query_map(&args, |row| Ok(aev_factory.row_to_aev(row)))?;
|
||||
let aevs = AevRows { rows };
|
||||
self.accumulate_into_cache(
|
||||
None,
|
||||
|
|
39
db/src/db.rs
39
db/src/db.rs
|
@ -22,16 +22,15 @@ use itertools;
|
|||
use itertools::Itertools;
|
||||
use rusqlite;
|
||||
use rusqlite::limits::Limit;
|
||||
use rusqlite::params_from_iter;
|
||||
use rusqlite::types::{ToSql, ToSqlOutput};
|
||||
use rusqlite::TransactionBehavior;
|
||||
|
||||
use crate::bootstrap;
|
||||
use crate::{repeat_values, to_namespaced_keyword};
|
||||
use bootstrap;
|
||||
use {repeat_values, to_namespaced_keyword};
|
||||
|
||||
use edn::{DateTime, Utc, Uuid, Value};
|
||||
|
||||
use crate::entids;
|
||||
use entids;
|
||||
|
||||
use core_traits::{attribute, Attribute, AttributeBitFlags, Entid, TypedValue, ValueType};
|
||||
|
||||
|
@ -39,13 +38,13 @@ use mentat_core::{AttributeMap, FromMicros, IdentMap, Schema, ToMicros, ValueRc}
|
|||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
use crate::metadata;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx::transact;
|
||||
use crate::types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
||||
use metadata;
|
||||
use schema::SchemaBuilding;
|
||||
use tx::transact;
|
||||
use types::{AVMap, AVPair, Partition, PartitionMap, DB};
|
||||
|
||||
use crate::watcher::NullWatcher;
|
||||
use std::convert::TryInto;
|
||||
use watcher::NullWatcher;
|
||||
|
||||
// In PRAGMA foo='bar', `'bar'` must be a constant string (it cannot be a
|
||||
// bound parameter), so we need to escape manually. According to
|
||||
|
@ -315,7 +314,7 @@ fn create_current_partition_view(conn: &rusqlite::Connection) -> Result<()> {
|
|||
max(e) + 1 AS idx
|
||||
FROM timelined_transactions WHERE timeline = {} GROUP BY part",
|
||||
case.join(" "),
|
||||
crate::TIMELINE_MAIN
|
||||
::TIMELINE_MAIN
|
||||
);
|
||||
|
||||
conn.execute(&view_stmt, rusqlite::params![])?;
|
||||
|
@ -434,7 +433,6 @@ impl TypedSQLValue for TypedValue {
|
|||
Ok(TypedValue::Uuid(u))
|
||||
}
|
||||
(13, rusqlite::types::Value::Text(x)) => to_namespaced_keyword(&x).map(|k| k.into()),
|
||||
(15, rusqlite::types::Value::Blob(x)) => Ok(TypedValue::Bytes(x.into())),
|
||||
(_, value) => bail!(DbErrorKind::BadSQLValuePair(value, value_type_tag)),
|
||||
}
|
||||
}
|
||||
|
@ -455,7 +453,6 @@ impl TypedSQLValue for TypedValue {
|
|||
Value::Float(ref x) => Some(TypedValue::Double(*x)),
|
||||
Value::Text(ref x) => Some(x.clone().into()),
|
||||
Value::Keyword(ref x) => Some(x.clone().into()),
|
||||
Value::Bytes(b) => Some(TypedValue::Bytes(b.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -472,7 +469,6 @@ impl TypedSQLValue for TypedValue {
|
|||
TypedValue::String(ref x) => (x.as_str().into(), 10),
|
||||
TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
||||
TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
||||
TypedValue::Bytes(b) => (b.to_vec().into(), 15),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -487,7 +483,6 @@ impl TypedSQLValue for TypedValue {
|
|||
TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
||||
TypedValue::Uuid(ref u) => (Value::Uuid(*u), ValueType::Uuid),
|
||||
TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
||||
TypedValue::Bytes(b) => (Value::Bytes(b.clone()), ValueType::Bytes),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -809,7 +804,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
values);
|
||||
let mut stmt: rusqlite::Statement = self.prepare(s.as_str())?;
|
||||
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(params_from_iter(¶ms), |row| -> Result<(i64, Entid)> {
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(¶ms, |row| -> Result<(i64, Entid)> {
|
||||
Ok((row.get(0)?, row.get(1)?))
|
||||
})?.collect();
|
||||
m
|
||||
|
@ -913,7 +908,6 @@ impl MentatStoring for rusqlite::Connection {
|
|||
// We must keep these computed values somewhere to reference them later, so we can't
|
||||
// combine this map and the subsequent flat_map.
|
||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||
#[allow(clippy::type_complexity)]
|
||||
let block: Result<Vec<(i64 /* e */,
|
||||
i64 /* a */,
|
||||
ToSqlOutput<'a> /* value */,
|
||||
|
@ -953,7 +947,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(params_from_iter(¶ms))
|
||||
stmt.execute(¶ms)
|
||||
.context(DbErrorKind::NonFtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
|
@ -990,7 +984,6 @@ impl MentatStoring for rusqlite::Connection {
|
|||
// We must keep these computed values somewhere to reference them later, so we can't
|
||||
// combine this map and the subsequent flat_map.
|
||||
// (e0, a0, v0, value_type_tag0, added0, flags0)
|
||||
#[allow(clippy::type_complexity)]
|
||||
let block: Result<Vec<(i64 /* e */,
|
||||
i64 /* a */,
|
||||
Option<ToSqlOutput<'a>> /* value */,
|
||||
|
@ -1047,7 +1040,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(fts_s.as_str())?;
|
||||
stmt.execute(params_from_iter(&fts_params)).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
stmt.execute(&fts_params).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
|
||||
// Second, insert searches.
|
||||
// `params` reference computed values in `block`.
|
||||
|
@ -1075,7 +1068,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(params_from_iter(¶ms)).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
stmt.execute(¶ms).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
}).collect::<Result<Vec<()>>>();
|
||||
|
@ -1181,7 +1174,7 @@ pub fn update_metadata(
|
|||
new_schema: &Schema,
|
||||
metadata_report: &metadata::MetadataReport,
|
||||
) -> Result<()> {
|
||||
use crate::metadata::AttributeAlteration::*;
|
||||
use metadata::AttributeAlteration::*;
|
||||
|
||||
// Populate the materialized view directly from datoms (and, potentially in the future,
|
||||
// transactions). This might generalize nicely as we expand the set of materialized views.
|
||||
|
@ -1338,12 +1331,12 @@ mod tests {
|
|||
use std::borrow::Borrow;
|
||||
|
||||
use super::*;
|
||||
use crate::debug::{tempids, TestConn};
|
||||
use crate::internal_types::Term;
|
||||
use core_traits::{attribute, KnownEntid};
|
||||
use db_traits::errors;
|
||||
use debug::{tempids, TestConn};
|
||||
use edn::entities::OpType;
|
||||
use edn::{self, InternSet};
|
||||
use internal_types::Term;
|
||||
use mentat_core::util::Either::*;
|
||||
use mentat_core::{HasSchema, Keyword};
|
||||
use std::collections::BTreeMap;
|
||||
|
|
|
@ -66,23 +66,23 @@ use rusqlite::types::ToSql;
|
|||
use rusqlite::TransactionBehavior;
|
||||
use tabwriter::TabWriter;
|
||||
|
||||
use crate::bootstrap;
|
||||
use crate::db::*;
|
||||
use crate::db::{read_attribute_map, read_ident_map};
|
||||
use crate::entids;
|
||||
use bootstrap;
|
||||
use db::*;
|
||||
use db::{read_attribute_map, read_ident_map};
|
||||
use db_traits::errors::Result;
|
||||
use edn;
|
||||
use entids;
|
||||
|
||||
use core_traits::{Entid, TypedValue, ValueType};
|
||||
|
||||
use crate::internal_types::TermWithTempIds;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx::{transact, transact_terms};
|
||||
use crate::types::*;
|
||||
use crate::watcher::NullWatcher;
|
||||
use edn::entities::{EntidOrIdent, TempId};
|
||||
use edn::InternSet;
|
||||
use internal_types::TermWithTempIds;
|
||||
use mentat_core::{HasSchema, SQLValueType, TxReport};
|
||||
use schema::SchemaBuilding;
|
||||
use tx::{transact, transact_terms};
|
||||
use types::*;
|
||||
use watcher::NullWatcher;
|
||||
|
||||
/// Represents a *datom* (assertion) in the store.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -306,9 +306,10 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
|||
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
|
||||
let mut stmt: rusqlite::Statement =
|
||||
conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
|
||||
let params: &[i32; 0] = &[];
|
||||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then([], |row| {
|
||||
.query_and_then(params, |row| {
|
||||
let rowid: i64 = row.get(0)?;
|
||||
let text: String = row.get(1)?;
|
||||
Ok((rowid, text))
|
||||
|
@ -340,7 +341,7 @@ pub fn dump_sql_query(
|
|||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then(params, |row| {
|
||||
for i in 0..row.as_ref().column_count() {
|
||||
for i in 0..row.column_count() {
|
||||
let value: rusqlite::types::Value = row.get(i)?;
|
||||
write!(&mut tw, "{:?}\t", value).unwrap();
|
||||
}
|
||||
|
|
|
@ -63,8 +63,7 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
|||
if attribute >= DB_DOC {
|
||||
return false;
|
||||
}
|
||||
matches!(
|
||||
attribute,
|
||||
match attribute {
|
||||
// Idents.
|
||||
DB_IDENT |
|
||||
// Schema.
|
||||
|
@ -73,22 +72,19 @@ pub fn might_update_metadata(attribute: Entid) -> bool {
|
|||
DB_INDEX |
|
||||
DB_IS_COMPONENT |
|
||||
DB_UNIQUE |
|
||||
DB_VALUE_TYPE
|
||||
)
|
||||
DB_VALUE_TYPE =>
|
||||
true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return 'false' if the given attribute might be used to describe a schema attribute.
|
||||
pub fn is_a_schema_attribute(attribute: Entid) -> bool {
|
||||
matches!(
|
||||
attribute,
|
||||
DB_IDENT
|
||||
| DB_CARDINALITY
|
||||
| DB_FULLTEXT
|
||||
| DB_INDEX
|
||||
| DB_IS_COMPONENT
|
||||
| DB_UNIQUE
|
||||
| DB_VALUE_TYPE
|
||||
)
|
||||
match attribute {
|
||||
DB_IDENT | DB_CARDINALITY | DB_FULLTEXT | DB_INDEX | DB_IS_COMPONENT | DB_UNIQUE
|
||||
| DB_VALUE_TYPE => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
|
|
|
@ -23,10 +23,10 @@ use edn::entities;
|
|||
use edn::entities::{EntityPlace, OpType, TempId, TxFunction};
|
||||
use edn::{SpannedValue, ValueAndSpan, ValueRc};
|
||||
|
||||
use crate::schema::SchemaTypeChecking;
|
||||
use crate::types::{AVMap, AVPair, Schema, TransactableValue};
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use schema::SchemaTypeChecking;
|
||||
use types::{AVMap, AVPair, Schema, TransactableValue};
|
||||
|
||||
impl TransactableValue for ValueAndSpan {
|
||||
fn into_typed_value(self, schema: &Schema, value_type: ValueType) -> Result<TypedValue> {
|
||||
|
@ -75,14 +75,18 @@ impl TransactableValue for ValueAndSpan {
|
|||
}
|
||||
}
|
||||
Nil | Boolean(_) | Instant(_) | BigInteger(_) | Float(_) | Uuid(_) | PlainSymbol(_)
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) | Bytes(_) => {
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_tempid(&self) -> Option<TempId> {
|
||||
self.inner.as_text().cloned().map(TempId::External)
|
||||
self.inner
|
||||
.as_text()
|
||||
.cloned()
|
||||
.map(TempId::External)
|
||||
.map(|v| v)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,8 +109,7 @@ impl TransactableValue for TypedValue {
|
|||
| TypedValue::Long(_)
|
||||
| TypedValue::Double(_)
|
||||
| TypedValue::Instant(_)
|
||||
| TypedValue::Uuid(_)
|
||||
| TypedValue::Bytes(_) => {
|
||||
| TypedValue::Uuid(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,30 +60,30 @@ mod upsert_resolution;
|
|||
mod watcher;
|
||||
|
||||
// Export these for reference from sync code and tests.
|
||||
pub use crate::bootstrap::{TX0, USER0, V1_PARTS};
|
||||
pub use bootstrap::{TX0, USER0, V1_PARTS};
|
||||
|
||||
pub static TIMELINE_MAIN: i64 = 0;
|
||||
|
||||
pub use crate::schema::{AttributeBuilder, AttributeValidation};
|
||||
pub use schema::{AttributeBuilder, AttributeValidation};
|
||||
|
||||
pub use crate::bootstrap::CORE_SCHEMA_VERSION;
|
||||
pub use bootstrap::CORE_SCHEMA_VERSION;
|
||||
|
||||
use edn::symbols;
|
||||
|
||||
pub use crate::entids::DB_SCHEMA_CORE;
|
||||
pub use entids::DB_SCHEMA_CORE;
|
||||
|
||||
pub use crate::db::{new_connection, TypedSQLValue};
|
||||
pub use db::{new_connection, TypedSQLValue};
|
||||
|
||||
#[cfg(feature = "sqlcipher")]
|
||||
pub use db::{change_encryption_key, new_connection_with_key};
|
||||
|
||||
pub use crate::watcher::TransactWatcher;
|
||||
pub use watcher::TransactWatcher;
|
||||
|
||||
pub use crate::tx::{transact, transact_terms};
|
||||
pub use tx::{transact, transact_terms};
|
||||
|
||||
pub use crate::tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
||||
pub use tx_observer::{InProgressObserverTransactWatcher, TxObservationService, TxObserver};
|
||||
|
||||
pub use crate::types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
||||
pub use types::{AttributeSet, Partition, PartitionMap, TransactableValue, DB};
|
||||
|
||||
pub fn to_namespaced_keyword(s: &str) -> Result<symbols::Keyword> {
|
||||
let splits = [':', '/'];
|
||||
|
|
|
@ -29,18 +29,18 @@ use failure::ResultExt;
|
|||
use std::collections::btree_map::Entry;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use crate::add_retract_alter_set::AddRetractAlterSet;
|
||||
use crate::entids;
|
||||
use add_retract_alter_set::AddRetractAlterSet;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::symbols;
|
||||
use entids;
|
||||
|
||||
use core_traits::{attribute, Entid, TypedValue, ValueType};
|
||||
|
||||
use mentat_core::{AttributeMap, Schema};
|
||||
|
||||
use crate::schema::{AttributeBuilder, AttributeValidation};
|
||||
use schema::{AttributeBuilder, AttributeValidation};
|
||||
|
||||
use crate::types::EAV;
|
||||
use types::EAV;
|
||||
|
||||
/// An alteration to an attribute.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -111,7 +111,7 @@ fn update_attribute_map_from_schema_retractions(
|
|||
let mut eas = BTreeMap::new();
|
||||
for (e, a, v) in retractions.into_iter() {
|
||||
if entids::is_a_schema_attribute(a) {
|
||||
eas.entry(e).or_insert_with(Vec::new).push(a);
|
||||
eas.entry(e).or_insert_with(|| vec![]).push(a);
|
||||
suspect_retractions.push((e, a, v));
|
||||
} else {
|
||||
filtered_retractions.push((e, a, v));
|
||||
|
@ -248,7 +248,6 @@ pub fn update_attribute_map_from_entid_triples(
|
|||
TypedValue::Ref(entids::DB_TYPE_REF) => { builder.value_type(ValueType::Ref); },
|
||||
TypedValue::Ref(entids::DB_TYPE_STRING) => { builder.value_type(ValueType::String); },
|
||||
TypedValue::Ref(entids::DB_TYPE_UUID) => { builder.value_type(ValueType::Uuid); },
|
||||
TypedValue::Ref(entids::DB_TYPE_BYTES) => { builder.value_type(ValueType::Bytes); },
|
||||
_ => bail!(DbErrorKind::BadSchemaAssertion(format!("Expected [... :db/valueType :db.type/*] but got [... :db/valueType {:?}] for entid {} and attribute {}", value, entid, attr)))
|
||||
}
|
||||
},
|
||||
|
|
|
@ -10,16 +10,16 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::db::TypedSQLValue;
|
||||
use db::TypedSQLValue;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn;
|
||||
use edn::symbols;
|
||||
|
||||
use core_traits::{attribute, Attribute, Entid, KnownEntid, TypedValue, ValueType};
|
||||
|
||||
use crate::metadata;
|
||||
use crate::metadata::AttributeAlteration;
|
||||
use mentat_core::{AttributeMap, EntidMap, HasSchema, IdentMap, Schema};
|
||||
use metadata;
|
||||
use metadata::AttributeAlteration;
|
||||
|
||||
pub trait AttributeValidation {
|
||||
fn validate<F>(&self, ident: F) -> Result<()>
|
||||
|
@ -362,7 +362,6 @@ impl SchemaTypeChecking for Schema {
|
|||
(ValueType::Uuid, tv @ TypedValue::Uuid(_)) => Ok(tv),
|
||||
(ValueType::Instant, tv @ TypedValue::Instant(_)) => Ok(tv),
|
||||
(ValueType::Keyword, tv @ TypedValue::Keyword(_)) => Ok(tv),
|
||||
(ValueType::Bytes, tv @ TypedValue::Bytes(_)) => Ok(tv),
|
||||
// Ref coerces a little: we interpret some things depending on the schema as a Ref.
|
||||
(ValueType::Ref, TypedValue::Long(x)) => Ok(TypedValue::Ref(x)),
|
||||
(ValueType::Ref, TypedValue::Keyword(ref x)) => {
|
||||
|
@ -380,7 +379,6 @@ impl SchemaTypeChecking for Schema {
|
|||
| (vt @ ValueType::Uuid, _)
|
||||
| (vt @ ValueType::Instant, _)
|
||||
| (vt @ ValueType::Keyword, _)
|
||||
| (vt @ ValueType::Bytes, _)
|
||||
| (vt @ ValueType::Ref, _) => {
|
||||
bail!(DbErrorKind::BadValuePair(format!("{}", value), vt))
|
||||
}
|
||||
|
@ -396,7 +394,7 @@ mod test {
|
|||
|
||||
fn add_attribute(schema: &mut Schema, ident: Keyword, entid: Entid, attribute: Attribute) {
|
||||
schema.entid_map.insert(entid, ident.clone());
|
||||
schema.ident_map.insert(ident, entid);
|
||||
schema.ident_map.insert(ident.clone(), entid);
|
||||
|
||||
if attribute.component {
|
||||
schema.component_attributes.push(entid);
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::ops::RangeFrom;
|
||||
|
||||
use rusqlite::{self, params_from_iter};
|
||||
use rusqlite;
|
||||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
|
@ -22,16 +22,16 @@ use edn::InternSet;
|
|||
|
||||
use edn::entities::OpType;
|
||||
|
||||
use crate::db;
|
||||
use crate::db::TypedSQLValue;
|
||||
use db;
|
||||
use db::TypedSQLValue;
|
||||
|
||||
use crate::tx::{transact_terms_with_action, TransactorAction};
|
||||
use tx::{transact_terms_with_action, TransactorAction};
|
||||
|
||||
use crate::types::PartitionMap;
|
||||
use types::PartitionMap;
|
||||
|
||||
use crate::internal_types::{Term, TermWithoutTempIds};
|
||||
use internal_types::{Term, TermWithoutTempIds};
|
||||
|
||||
use crate::watcher::NullWatcher;
|
||||
use watcher::NullWatcher;
|
||||
|
||||
/// Collects a supplied tx range into an DESC ordered Vec of valid txs,
|
||||
/// ensuring they all belong to the same timeline.
|
||||
|
@ -79,9 +79,12 @@ fn move_transactions_to(
|
|||
&format!(
|
||||
"UPDATE timelined_transactions SET timeline = {} WHERE tx IN {}",
|
||||
new_timeline,
|
||||
crate::repeat_values(tx_ids.len(), 1)
|
||||
::repeat_values(tx_ids.len(), 1)
|
||||
),
|
||||
params_from_iter(tx_ids.iter()),
|
||||
&(tx_ids
|
||||
.iter()
|
||||
.map(|x| x as &dyn rusqlite::types::ToSql)
|
||||
.collect::<Vec<_>>()),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -106,7 +109,7 @@ fn reversed_terms_for(
|
|||
) -> Result<Vec<TermWithoutTempIds>> {
|
||||
let mut stmt = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM timelined_transactions WHERE tx = ? AND timeline = ? ORDER BY tx DESC")?;
|
||||
let rows = stmt.query_and_then(
|
||||
&[&tx_id, &crate::TIMELINE_MAIN],
|
||||
&[&tx_id, &::TIMELINE_MAIN],
|
||||
|row| -> Result<TermWithoutTempIds> {
|
||||
let op = if row.get(5)? {
|
||||
OpType::Retract
|
||||
|
@ -138,7 +141,7 @@ pub fn move_from_main_timeline(
|
|||
txs_from: RangeFrom<Entid>,
|
||||
new_timeline: Entid,
|
||||
) -> Result<(Option<Schema>, PartitionMap)> {
|
||||
if new_timeline == crate::TIMELINE_MAIN {
|
||||
if new_timeline == ::TIMELINE_MAIN {
|
||||
bail!(DbErrorKind::NotYetImplemented(
|
||||
"Can't move transactions to main timeline".to_string()
|
||||
));
|
||||
|
@ -151,7 +154,7 @@ pub fn move_from_main_timeline(
|
|||
bail!(DbErrorKind::TimelinesMoveToNonEmpty);
|
||||
}
|
||||
|
||||
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, crate::TIMELINE_MAIN)?;
|
||||
let txs_to_move = collect_ordered_txs_to_move(conn, txs_from, ::TIMELINE_MAIN)?;
|
||||
|
||||
let mut last_schema = None;
|
||||
for tx_id in &txs_to_move {
|
||||
|
@ -196,16 +199,16 @@ mod tests {
|
|||
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use crate::debug::TestConn;
|
||||
use debug::TestConn;
|
||||
|
||||
use crate::bootstrap;
|
||||
use bootstrap;
|
||||
|
||||
// For convenience during testing.
|
||||
// Real consumers will perform similar operations when appropriate.
|
||||
fn update_conn(conn: &mut TestConn, schema: &Option<Schema>, pmap: &PartitionMap) {
|
||||
match schema {
|
||||
Some(ref s) => conn.schema = s.clone(),
|
||||
None => (),
|
||||
&Some(ref s) => conn.schema = s.clone(),
|
||||
&None => (),
|
||||
};
|
||||
conn.partition_map = pmap.clone();
|
||||
}
|
||||
|
@ -238,7 +241,7 @@ mod tests {
|
|||
assert_matches!(conn.transactions(), "[]");
|
||||
assert_eq!(new_partition_map, partition_map0);
|
||||
|
||||
conn.partition_map = partition_map0;
|
||||
conn.partition_map = partition_map0.clone();
|
||||
let report2 = assert_transact!(conn, t);
|
||||
let partition_map2 = conn.partition_map.clone();
|
||||
|
||||
|
|
27
db/src/tx.rs
27
db/src/tx.rs
|
@ -49,17 +49,17 @@ use std::borrow::Cow;
|
|||
use std::collections::{BTreeMap, BTreeSet, VecDeque};
|
||||
use std::iter::once;
|
||||
|
||||
use crate::db;
|
||||
use crate::db::MentatStoring;
|
||||
use crate::entids;
|
||||
use crate::internal_types::{
|
||||
use db;
|
||||
use db::MentatStoring;
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::{InternSet, Keyword};
|
||||
use entids;
|
||||
use internal_types::{
|
||||
replace_lookup_ref, AEVTrie, AddAndRetract, KnownEntidOr, LookupRef, LookupRefOrTempId,
|
||||
TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithTempIdsAndLookupRefs,
|
||||
TermWithoutTempIds, TypedValueOr,
|
||||
};
|
||||
use db_traits::errors;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use edn::{InternSet, Keyword};
|
||||
|
||||
use mentat_core::util::Either;
|
||||
|
||||
|
@ -67,15 +67,15 @@ use core_traits::{attribute, now, Attribute, Entid, KnownEntid, TypedValue, Valu
|
|||
|
||||
use mentat_core::{DateTime, Schema, TxReport, Utc};
|
||||
|
||||
use crate::metadata;
|
||||
use crate::schema::SchemaBuilding;
|
||||
use crate::tx_checking;
|
||||
use crate::types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
||||
use crate::upsert_resolution::{FinalPopulations, Generation};
|
||||
use crate::watcher::TransactWatcher;
|
||||
use edn::entities as entmod;
|
||||
use edn::entities::{AttributePlace, Entity, OpType, TempId};
|
||||
use metadata;
|
||||
use rusqlite;
|
||||
use schema::SchemaBuilding;
|
||||
use tx_checking;
|
||||
use types::{AVMap, AVPair, PartitionMap, TransactableValue};
|
||||
use upsert_resolution::{FinalPopulations, Generation};
|
||||
use watcher::TransactWatcher;
|
||||
|
||||
/// Defines transactor's high level behaviour.
|
||||
pub(crate) enum TransactorAction {
|
||||
|
@ -1058,7 +1058,6 @@ where
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn transact_terms_with_action<'conn, 'a, I, W>(
|
||||
conn: &'conn rusqlite::Connection,
|
||||
partition_map: PartitionMap,
|
||||
|
|
|
@ -14,7 +14,7 @@ use core_traits::{Entid, TypedValue, ValueType};
|
|||
|
||||
use db_traits::errors::CardinalityConflict;
|
||||
|
||||
use crate::internal_types::AEVTrie;
|
||||
use internal_types::AEVTrie;
|
||||
|
||||
/// Map from found [e a v] to expected type.
|
||||
pub(crate) type TypeDisagreements = BTreeMap<(Entid, Entid, TypedValue), ValueType>;
|
||||
|
|
|
@ -24,12 +24,11 @@ use edn::entities::OpType;
|
|||
|
||||
use db_traits::errors::Result;
|
||||
|
||||
use crate::types::AttributeSet;
|
||||
use types::AttributeSet;
|
||||
|
||||
use crate::watcher::TransactWatcher;
|
||||
use watcher::TransactWatcher;
|
||||
|
||||
pub struct TxObserver {
|
||||
#[allow(clippy::type_complexity)]
|
||||
notify_fn: Arc<Box<dyn Fn(&str, IndexMap<&Entid, &AttributeSet>) + Send + Sync>>,
|
||||
attributes: AttributeSet,
|
||||
}
|
||||
|
@ -132,7 +131,6 @@ impl TxObservationService {
|
|||
}
|
||||
|
||||
let executor = self.executor.get_or_insert_with(|| {
|
||||
#[allow(clippy::type_complexity)]
|
||||
let (tx, rx): (
|
||||
Sender<Box<dyn Command + Send>>,
|
||||
Receiver<Box<dyn Command + Send>>,
|
||||
|
|
|
@ -18,19 +18,19 @@ use std::collections::{BTreeMap, BTreeSet};
|
|||
use indexmap;
|
||||
use petgraph::unionfind;
|
||||
|
||||
use crate::internal_types::{
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use internal_types::{
|
||||
Population, TempIdHandle, TempIdMap, Term, TermWithTempIds, TermWithoutTempIds, TypedValueOr,
|
||||
};
|
||||
use crate::types::AVPair;
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
use types::AVPair;
|
||||
|
||||
use mentat_core::util::Either::*;
|
||||
|
||||
use core_traits::{attribute, Attribute, Entid, TypedValue};
|
||||
|
||||
use crate::schema::SchemaBuilding;
|
||||
use edn::entities::OpType;
|
||||
use mentat_core::Schema;
|
||||
use schema::SchemaBuilding;
|
||||
|
||||
/// A "Simple upsert" that looks like [:db/add TEMPID a v], where a is :db.unique/identity.
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq)]
|
||||
|
@ -276,7 +276,7 @@ impl Generation {
|
|||
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||
tempid_avs
|
||||
.entry((*a, Right(t2.clone())))
|
||||
.or_insert_with(Vec::new)
|
||||
.or_insert_with(|| vec![])
|
||||
.push(t1.clone());
|
||||
}
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ impl Generation {
|
|||
if attribute.unique == Some(attribute::Unique::Identity) {
|
||||
tempid_avs
|
||||
.entry((*a, x.clone()))
|
||||
.or_insert_with(Vec::new)
|
||||
.or_insert_with(|| vec![])
|
||||
.push(t.clone());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,11 +67,6 @@ fn test_from_sql_value_pair() {
|
|||
.unwrap(),
|
||||
TypedValue::typed_ns_keyword("db", "keyword")
|
||||
);
|
||||
assert_eq!(
|
||||
TypedValue::from_sql_value_pair(rusqlite::types::Value::Blob(vec![1, 2, 3, 42]), 15)
|
||||
.unwrap(),
|
||||
TypedValue::Bytes((vec![1, 2, 3, 42]).into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -11,7 +11,7 @@ source "https://rubygems.org"
|
|||
# gem "jekyll", "~> 3.7.3"
|
||||
|
||||
# This is the default theme for new Jekyll sites. You may change this to anything you like.
|
||||
gem "minima", "~> 2.5.1"
|
||||
gem "minima", "~> 2.0"
|
||||
|
||||
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
||||
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
||||
|
@ -19,9 +19,9 @@ gem "minima", "~> 2.5.1"
|
|||
|
||||
# If you have any plugins, put them here!
|
||||
group :jekyll_plugins do
|
||||
gem "jekyll-feed", "~> 0.15.1"
|
||||
gem "github-pages", "~> 215"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.6"
|
||||
gem "jekyll-feed", "~> 0.9.3"
|
||||
gem "github-pages", "~> 186"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.5"
|
||||
end
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
|
|
|
@ -1,161 +1,148 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (6.0.4)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
activesupport (4.2.10)
|
||||
i18n (~> 0.7)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
zeitwerk (~> 2.2, >= 2.2.2)
|
||||
addressable (2.8.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
addressable (2.5.2)
|
||||
public_suffix (>= 2.0.2, < 4.0)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.11.1)
|
||||
colorator (1.1.0)
|
||||
commonmarker (0.17.13)
|
||||
commonmarker (0.17.9)
|
||||
ruby-enum (~> 0.5)
|
||||
concurrent-ruby (1.1.9)
|
||||
dnsruby (1.61.7)
|
||||
simpleidn (~> 0.1)
|
||||
em-websocket (0.5.2)
|
||||
concurrent-ruby (1.0.5)
|
||||
dnsruby (1.60.2)
|
||||
em-websocket (0.5.1)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
ethon (0.14.0)
|
||||
ffi (>= 1.15.0)
|
||||
ethon (0.11.0)
|
||||
ffi (>= 1.3.0)
|
||||
eventmachine (1.2.7)
|
||||
execjs (2.8.1)
|
||||
faraday (1.4.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.1)
|
||||
execjs (2.7.0)
|
||||
faraday (0.15.2)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-net_http (1.0.1)
|
||||
faraday-net_http_persistent (1.1.0)
|
||||
ffi (1.15.3)
|
||||
ffi (1.9.25)
|
||||
forwardable-extended (2.6.0)
|
||||
gemoji (3.0.1)
|
||||
github-pages (215)
|
||||
github-pages-health-check (= 1.17.2)
|
||||
jekyll (= 3.9.0)
|
||||
jekyll-avatar (= 0.7.0)
|
||||
gemoji (3.0.0)
|
||||
github-pages (186)
|
||||
activesupport (= 4.2.10)
|
||||
github-pages-health-check (= 1.8.1)
|
||||
jekyll (= 3.7.3)
|
||||
jekyll-avatar (= 0.5.0)
|
||||
jekyll-coffeescript (= 1.1.1)
|
||||
jekyll-commonmark-ghpages (= 0.1.6)
|
||||
jekyll-commonmark-ghpages (= 0.1.5)
|
||||
jekyll-default-layout (= 0.1.4)
|
||||
jekyll-feed (= 0.15.1)
|
||||
jekyll-feed (= 0.9.3)
|
||||
jekyll-gist (= 1.5.0)
|
||||
jekyll-github-metadata (= 2.13.0)
|
||||
jekyll-mentions (= 1.6.0)
|
||||
jekyll-optional-front-matter (= 0.3.2)
|
||||
jekyll-github-metadata (= 2.9.4)
|
||||
jekyll-mentions (= 1.3.0)
|
||||
jekyll-optional-front-matter (= 0.3.0)
|
||||
jekyll-paginate (= 1.1.0)
|
||||
jekyll-readme-index (= 0.3.0)
|
||||
jekyll-redirect-from (= 0.16.0)
|
||||
jekyll-relative-links (= 0.6.1)
|
||||
jekyll-remote-theme (= 0.4.3)
|
||||
jekyll-readme-index (= 0.2.0)
|
||||
jekyll-redirect-from (= 0.13.0)
|
||||
jekyll-relative-links (= 0.5.3)
|
||||
jekyll-remote-theme (= 0.3.1)
|
||||
jekyll-sass-converter (= 1.5.2)
|
||||
jekyll-seo-tag (= 2.7.1)
|
||||
jekyll-sitemap (= 1.4.0)
|
||||
jekyll-swiss (= 1.0.0)
|
||||
jekyll-seo-tag (= 2.4.0)
|
||||
jekyll-sitemap (= 1.2.0)
|
||||
jekyll-swiss (= 0.4.0)
|
||||
jekyll-theme-architect (= 0.1.1)
|
||||
jekyll-theme-cayman (= 0.1.1)
|
||||
jekyll-theme-dinky (= 0.1.1)
|
||||
jekyll-theme-hacker (= 0.1.2)
|
||||
jekyll-theme-hacker (= 0.1.1)
|
||||
jekyll-theme-leap-day (= 0.1.1)
|
||||
jekyll-theme-merlot (= 0.1.1)
|
||||
jekyll-theme-midnight (= 0.1.1)
|
||||
jekyll-theme-minimal (= 0.1.1)
|
||||
jekyll-theme-modernist (= 0.1.1)
|
||||
jekyll-theme-primer (= 0.5.4)
|
||||
jekyll-theme-primer (= 0.5.3)
|
||||
jekyll-theme-slate (= 0.1.1)
|
||||
jekyll-theme-tactile (= 0.1.1)
|
||||
jekyll-theme-time-machine (= 0.1.1)
|
||||
jekyll-titles-from-headings (= 0.5.3)
|
||||
jemoji (= 0.12.0)
|
||||
kramdown (= 2.3.1)
|
||||
kramdown-parser-gfm (= 1.1.0)
|
||||
liquid (= 4.0.3)
|
||||
jekyll-titles-from-headings (= 0.5.1)
|
||||
jemoji (= 0.9.0)
|
||||
kramdown (= 1.16.2)
|
||||
liquid (= 4.0.0)
|
||||
listen (= 3.1.5)
|
||||
mercenary (~> 0.3)
|
||||
minima (= 2.5.1)
|
||||
nokogiri (>= 1.10.4, < 2.0)
|
||||
rouge (= 3.26.0)
|
||||
minima (= 2.4.1)
|
||||
nokogiri (>= 1.8.2, < 2.0)
|
||||
rouge (= 2.2.1)
|
||||
terminal-table (~> 1.4)
|
||||
github-pages-health-check (1.17.2)
|
||||
github-pages-health-check (1.8.1)
|
||||
addressable (~> 2.3)
|
||||
dnsruby (~> 1.60)
|
||||
octokit (~> 4.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
public_suffix (~> 2.0)
|
||||
typhoeus (~> 1.3)
|
||||
html-pipeline (2.14.0)
|
||||
html-pipeline (2.8.0)
|
||||
activesupport (>= 2)
|
||||
nokogiri (>= 1.4)
|
||||
http_parser.rb (0.6.0)
|
||||
i18n (0.9.5)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (3.9.0)
|
||||
jekyll (3.7.3)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
i18n (~> 0.7)
|
||||
jekyll-sass-converter (~> 1.0)
|
||||
jekyll-watch (~> 2.0)
|
||||
kramdown (>= 1.17, < 3)
|
||||
kramdown (~> 1.14)
|
||||
liquid (~> 4.0)
|
||||
mercenary (~> 0.3.3)
|
||||
pathutil (~> 0.9)
|
||||
rouge (>= 1.7, < 4)
|
||||
safe_yaml (~> 1.0)
|
||||
jekyll-avatar (0.7.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-avatar (0.5.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-coffeescript (1.1.1)
|
||||
coffee-script (~> 2.2)
|
||||
coffee-script-source (~> 1.11.1)
|
||||
jekyll-commonmark (1.3.1)
|
||||
jekyll-commonmark (1.2.0)
|
||||
commonmarker (~> 0.14)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-commonmark-ghpages (0.1.6)
|
||||
jekyll (>= 3.0, < 4.0)
|
||||
jekyll-commonmark-ghpages (0.1.5)
|
||||
commonmarker (~> 0.17.6)
|
||||
jekyll-commonmark (~> 1.2)
|
||||
rouge (>= 2.0, < 4.0)
|
||||
jekyll-commonmark (~> 1)
|
||||
rouge (~> 2)
|
||||
jekyll-default-layout (0.1.4)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-feed (0.15.1)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-feed (0.9.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-gist (1.5.0)
|
||||
octokit (~> 4.2)
|
||||
jekyll-github-metadata (2.13.0)
|
||||
jekyll (>= 3.4, < 5.0)
|
||||
jekyll-github-metadata (2.9.4)
|
||||
jekyll (~> 3.1)
|
||||
octokit (~> 4.0, != 4.4.0)
|
||||
jekyll-mentions (1.6.0)
|
||||
jekyll-mentions (1.3.0)
|
||||
activesupport (~> 4.0)
|
||||
html-pipeline (~> 2.3)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-optional-front-matter (0.3.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-optional-front-matter (0.3.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-paginate (1.1.0)
|
||||
jekyll-readme-index (0.3.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-redirect-from (0.16.0)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-relative-links (0.6.1)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-remote-theme (0.4.3)
|
||||
addressable (~> 2.0)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
|
||||
rubyzip (>= 1.3.0, < 3.0)
|
||||
jekyll-readme-index (0.2.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-redirect-from (0.13.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-relative-links (0.5.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-remote-theme (0.3.1)
|
||||
jekyll (~> 3.5)
|
||||
rubyzip (>= 1.2.1, < 3.0)
|
||||
jekyll-sass-converter (1.5.2)
|
||||
sass (~> 3.4)
|
||||
jekyll-seo-tag (2.7.1)
|
||||
jekyll (>= 3.8, < 5.0)
|
||||
jekyll-sitemap (1.4.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-swiss (1.0.0)
|
||||
jekyll-seo-tag (2.4.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-sitemap (1.2.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-swiss (0.4.0)
|
||||
jekyll-theme-architect (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
|
@ -165,8 +152,8 @@ GEM
|
|||
jekyll-theme-dinky (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-hacker (0.1.2)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-theme-hacker (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-leap-day (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
|
@ -183,8 +170,8 @@ GEM
|
|||
jekyll-theme-modernist (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-primer (0.5.4)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-theme-primer (0.5.3)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-github-metadata (~> 2.9)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-slate (0.1.1)
|
||||
|
@ -196,82 +183,71 @@ GEM
|
|||
jekyll-theme-time-machine (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-titles-from-headings (0.5.3)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-watch (2.2.1)
|
||||
jekyll-titles-from-headings (0.5.1)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-watch (2.0.0)
|
||||
listen (~> 3.0)
|
||||
jemoji (0.12.0)
|
||||
jemoji (0.9.0)
|
||||
activesupport (~> 4.0, >= 4.2.9)
|
||||
gemoji (~> 3.0)
|
||||
html-pipeline (~> 2.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
kramdown (2.3.1)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
liquid (4.0.3)
|
||||
listen (3.5.1)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
jekyll (~> 3.0)
|
||||
kramdown (1.16.2)
|
||||
liquid (4.0.0)
|
||||
listen (3.1.5)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
ruby_dep (~> 1.2)
|
||||
mercenary (0.3.6)
|
||||
mini_portile2 (2.6.1)
|
||||
minima (2.5.1)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
mini_portile2 (2.3.0)
|
||||
minima (2.4.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-feed (~> 0.9)
|
||||
jekyll-seo-tag (~> 2.1)
|
||||
minitest (5.14.4)
|
||||
multipart-post (2.1.1)
|
||||
nokogiri (1.12.5)
|
||||
mini_portile2 (~> 2.6.1)
|
||||
racc (~> 1.4)
|
||||
octokit (4.21.0)
|
||||
faraday (>= 0.9)
|
||||
minitest (5.11.3)
|
||||
multipart-post (2.0.0)
|
||||
nokogiri (1.8.3)
|
||||
mini_portile2 (~> 2.3.0)
|
||||
octokit (4.9.0)
|
||||
sawyer (~> 0.8.0, >= 0.5.3)
|
||||
pathutil (0.16.2)
|
||||
pathutil (0.16.1)
|
||||
forwardable-extended (~> 2.6)
|
||||
public_suffix (4.0.6)
|
||||
racc (1.5.2)
|
||||
rb-fsevent (0.11.0)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.5)
|
||||
rouge (3.26.0)
|
||||
ruby-enum (0.9.0)
|
||||
public_suffix (2.0.5)
|
||||
rb-fsevent (0.10.3)
|
||||
rb-inotify (0.9.10)
|
||||
ffi (>= 0.5.0, < 2)
|
||||
rouge (2.2.1)
|
||||
ruby-enum (0.7.2)
|
||||
i18n
|
||||
ruby2_keywords (0.0.4)
|
||||
rubyzip (2.3.0)
|
||||
safe_yaml (1.0.5)
|
||||
sass (3.7.4)
|
||||
ruby_dep (1.5.0)
|
||||
rubyzip (1.2.1)
|
||||
safe_yaml (1.0.4)
|
||||
sass (3.5.6)
|
||||
sass-listen (~> 4.0.0)
|
||||
sass-listen (4.0.0)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
sawyer (0.8.2)
|
||||
addressable (>= 2.3.5)
|
||||
faraday (> 0.8, < 2.0)
|
||||
simpleidn (0.2.1)
|
||||
unf (~> 0.1.4)
|
||||
sawyer (0.8.1)
|
||||
addressable (>= 2.3.5, < 2.6)
|
||||
faraday (~> 0.8, < 1.0)
|
||||
terminal-table (1.8.0)
|
||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||
thread_safe (0.3.6)
|
||||
typhoeus (1.4.0)
|
||||
typhoeus (1.3.0)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (1.2.9)
|
||||
tzinfo (1.2.5)
|
||||
thread_safe (~> 0.1)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.7.7)
|
||||
unicode-display_width (1.7.0)
|
||||
zeitwerk (2.4.2)
|
||||
unicode-display_width (1.4.0)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
github-pages (~> 215)
|
||||
jekyll-commonmark-ghpages (~> 0.1.6)
|
||||
jekyll-feed (~> 0.15.1)
|
||||
minima (~> 2.5.1)
|
||||
github-pages (~> 186)
|
||||
jekyll-commonmark-ghpages (~> 0.1.5)
|
||||
jekyll-feed (~> 0.9.3)
|
||||
minima (~> 2.0)
|
||||
tzinfo-data
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.21
|
||||
1.16.2
|
||||
|
|
|
@ -10,21 +10,19 @@ description = "EDN parser for Project Mentat"
|
|||
readme = "./README.md"
|
||||
|
||||
[dependencies]
|
||||
chrono = "~0.4"
|
||||
itertools = "~0.10"
|
||||
num = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
pretty = "~0.12"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
peg = "~0.8"
|
||||
bytes = "1.0.1"
|
||||
hex = "0.4.3"
|
||||
chrono = "0.4"
|
||||
itertools = "0.8"
|
||||
num = "0.2"
|
||||
ordered-float = "1.0"
|
||||
pretty = "0.9"
|
||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "1.0", optional = true }
|
||||
serde_derive = { version = "1.0", optional = true }
|
||||
peg = "0.6"
|
||||
|
||||
[dev-dependencies]
|
||||
serde_test = "~1.0"
|
||||
serde_json = "~1.0"
|
||||
serde_test = "1.0"
|
||||
serde_json = "1.0"
|
||||
|
||||
[features]
|
||||
serde_support = ["serde", "serde_derive"]
|
||||
|
|
|
@ -13,11 +13,11 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
|
||||
use crate::value_rc::ValueRc;
|
||||
use value_rc::ValueRc;
|
||||
|
||||
use crate::symbols::{Keyword, PlainSymbol};
|
||||
use symbols::{Keyword, PlainSymbol};
|
||||
|
||||
use crate::types::ValueAndSpan;
|
||||
use types::ValueAndSpan;
|
||||
|
||||
/// `EntityPlace` and `ValuePlace` embed values, either directly (i.e., `ValuePlace::Atom`) or
|
||||
/// indirectly (i.e., `EntityPlace::LookupRef`). In order to maintain the graph of `Into` and
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::collections::HashSet;
|
|||
use std::hash::Hash;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use crate::ValueRc;
|
||||
use ValueRc;
|
||||
|
||||
/// An `InternSet` allows to "intern" some potentially large values, maintaining a single value
|
||||
/// instance owned by the `InternSet` and leaving consumers with lightweight ref-counted handles to
|
||||
|
|
|
@ -8,9 +8,7 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
extern crate bytes;
|
||||
extern crate chrono;
|
||||
extern crate hex;
|
||||
extern crate itertools;
|
||||
extern crate num;
|
||||
extern crate ordered_float;
|
||||
|
@ -27,7 +25,7 @@ extern crate serde_derive;
|
|||
|
||||
pub mod entities;
|
||||
pub mod intern_set;
|
||||
pub use crate::intern_set::InternSet;
|
||||
pub use intern_set::InternSet;
|
||||
// Intentionally not pub.
|
||||
pub mod matcher;
|
||||
mod namespaceable_name;
|
||||
|
@ -37,22 +35,20 @@ pub mod symbols;
|
|||
pub mod types;
|
||||
pub mod utils;
|
||||
pub mod value_rc;
|
||||
pub use crate::value_rc::{Cloned, FromRc, ValueRc};
|
||||
pub use value_rc::{Cloned, FromRc, ValueRc};
|
||||
|
||||
// Re-export the types we use.
|
||||
use bytes::Bytes;
|
||||
pub use chrono::{DateTime, Utc};
|
||||
use hex::decode;
|
||||
pub use num::BigInt;
|
||||
pub use ordered_float::OrderedFloat;
|
||||
pub use uuid::Uuid;
|
||||
|
||||
// Export from our modules.
|
||||
pub use crate::types::{
|
||||
pub use types::{
|
||||
FromMicros, FromMillis, Span, SpannedValue, ToMicros, ToMillis, Value, ValueAndSpan,
|
||||
};
|
||||
|
||||
pub use crate::symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
||||
pub use symbols::{Keyword, NamespacedSymbol, PlainSymbol};
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||
use std::f64::{INFINITY, NAN, NEG_INFINITY};
|
||||
|
@ -60,8 +56,8 @@ use std::iter::FromIterator;
|
|||
|
||||
use chrono::TimeZone;
|
||||
|
||||
use crate::entities::*;
|
||||
use crate::query::FromValue;
|
||||
use entities::*;
|
||||
use query::FromValue;
|
||||
|
||||
// Goal: Be able to parse https://github.com/edn-format/edn
|
||||
// Also extensible to help parse http://docs.datomic.com/query.html
|
||||
|
@ -128,7 +124,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
// result = r#""foo\\bar""#
|
||||
// For the typical case, string_normal_chars will match multiple, leading to a single-element vec.
|
||||
pub rule raw_text() -> String = "\"" t:((string_special_char() / string_normal_chars())*) "\""
|
||||
{ t.join("") }
|
||||
{ t.join(&"") }
|
||||
|
||||
pub rule text() -> SpannedValue
|
||||
= v:raw_text() { SpannedValue::Text(v) }
|
||||
|
@ -153,16 +149,16 @@ peg::parser!(pub grammar parse() for str {
|
|||
"#instmicros" whitespace()+ d:$( digit()+ ) {
|
||||
let micros = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = micros / 1_000_000;
|
||||
let nanos: u32 = ((micros % 1_000_000).unsigned_abs() as u32) * 1000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
let nanos: u32 = ((micros % 1_000_000).abs() as u32) * 1000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
}
|
||||
|
||||
rule inst_millis() -> DateTime<Utc> =
|
||||
"#instmillis" whitespace()+ d:$( digit()+ ) {
|
||||
let millis = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = millis / 1000;
|
||||
let nanos: u32 = ((millis % 1000).unsigned_abs() as u32) * 1_000_000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
let nanos: u32 = ((millis % 1000).abs() as u32) * 1_000_000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
}
|
||||
|
||||
rule inst() -> SpannedValue = t:(inst_millis() / inst_micros() / inst_string())
|
||||
|
@ -176,14 +172,6 @@ peg::parser!(pub grammar parse() for str {
|
|||
pub rule uuid() -> SpannedValue = "#uuid" whitespace()+ u:uuid_string()
|
||||
{ SpannedValue::Uuid(u) }
|
||||
|
||||
rule byte_buffer() -> Bytes =
|
||||
u:$( hex()+ ) {
|
||||
let b = decode(u).expect("this is a valid hex byte string");
|
||||
Bytes::copy_from_slice(&b)
|
||||
}
|
||||
pub rule bytes() -> SpannedValue = "#bytes" whitespace()+ u:byte_buffer()
|
||||
{ SpannedValue::Bytes(u) }
|
||||
|
||||
rule namespace_divider() = "."
|
||||
rule namespace_separator() = "/"
|
||||
|
||||
|
@ -231,7 +219,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
|
||||
// Note: It's important that float comes before integer or the parser assumes that floats are integers and fails to parse.
|
||||
pub rule value() -> ValueAndSpan =
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / bytes() / text() / keyword() / symbol() / list() / vector() / map() / set() ) end:position!() __ {
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / text() / keyword() / symbol() / list() / vector() / map() / set()) end:position!() __ {
|
||||
ValueAndSpan {
|
||||
inner: v,
|
||||
span: Span::new(start, end)
|
||||
|
@ -323,7 +311,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
/ __ v:atom() __ { ValuePlace::Atom(v) }
|
||||
|
||||
pub rule entity() -> Entity<ValueAndSpan>
|
||||
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e, a: AttributePlace::Entid(a), v } }
|
||||
= __ "[" __ op:(op()) __ e:(entity_place()) __ a:(forward_entid()) __ v:(value_place()) __ "]" __ { Entity::AddOrRetract { op, e: e, a: AttributePlace::Entid(a), v: v } }
|
||||
/ __ "[" __ op:(op()) __ e:(value_place()) __ a:(backward_entid()) __ v:(entity_place()) __ "]" __ { Entity::AddOrRetract { op, e: v, a: AttributePlace::Entid(a), v: e } }
|
||||
/ __ map:map_notation() __ { Entity::MapNotation(map) }
|
||||
/ expected!("entity")
|
||||
|
@ -365,7 +353,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
query::PullAttributeSpec::Attribute(
|
||||
query::NamedPullAttribute {
|
||||
attribute,
|
||||
alias,
|
||||
alias: alias,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -482,7 +470,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
query::WhereClause::Pred(
|
||||
query::Predicate {
|
||||
operator: func.0,
|
||||
args,
|
||||
args: args,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -491,7 +479,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
query::WhereClause::WhereFn(
|
||||
query::WhereFn {
|
||||
operator: func.0,
|
||||
args,
|
||||
args: args,
|
||||
binding,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,8 +12,8 @@ use itertools::diff_with;
|
|||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::symbols;
|
||||
use crate::types::Value;
|
||||
use symbols;
|
||||
use types::Value;
|
||||
|
||||
/// A trait defining pattern matching rules for any given pattern of type `T`.
|
||||
trait PatternMatchingRules<'a, T> {
|
||||
|
@ -87,7 +87,7 @@ impl<'a> Matcher<'a> {
|
|||
where
|
||||
T: PatternMatchingRules<'a, Value>,
|
||||
{
|
||||
use crate::Value::*;
|
||||
use Value::*;
|
||||
|
||||
if T::matches_any(pattern) {
|
||||
true
|
||||
|
@ -140,7 +140,7 @@ impl Value {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::parse;
|
||||
use parse;
|
||||
|
||||
macro_rules! assert_match {
|
||||
( $pattern:tt, $value:tt, $expected:expr ) => {
|
||||
|
|
|
@ -121,7 +121,7 @@ impl NamespaceableName {
|
|||
if name.starts_with('_') {
|
||||
Self::new(self.namespace(), &name[1..])
|
||||
} else {
|
||||
Self::new(self.namespace(), format!("_{}", name))
|
||||
Self::new(self.namespace(), &format!("_{}", name))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -205,8 +205,8 @@ impl fmt::Display for NamespaceableName {
|
|||
// friendly and automatic (e.g. `derive`d), and just pass all work off to it in our custom
|
||||
// implementation of Serialize and Deserialize.
|
||||
#[cfg(feature = "serde_support")]
|
||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||
struct SerializedNamespaceableName<'a> {
|
||||
namespace: Option<&'a str>,
|
||||
name: &'a str,
|
||||
|
@ -309,6 +309,17 @@ mod test {
|
|||
|
||||
arr.sort();
|
||||
|
||||
assert_eq!(arr, [n0, n2, n1, n3, n4, n5, n6,]);
|
||||
assert_eq!(
|
||||
arr,
|
||||
[
|
||||
n0.clone(),
|
||||
n2.clone(),
|
||||
n1.clone(),
|
||||
n3.clone(),
|
||||
n4.clone(),
|
||||
n5.clone(),
|
||||
n6.clone(),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use pretty;
|
|||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
use crate::types::Value;
|
||||
use types::Value;
|
||||
|
||||
impl Value {
|
||||
/// Return a pretty string representation of this `Value`.
|
||||
|
@ -57,11 +57,10 @@ impl Value {
|
|||
{
|
||||
let open = open.into();
|
||||
let n = open.len() as isize;
|
||||
let i = {
|
||||
let this = vs.into_iter().map(|v| v.as_doc(allocator));
|
||||
let element = allocator.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
let i = vs
|
||||
.into_iter()
|
||||
.map(|v| v.as_doc(allocator))
|
||||
.intersperse(allocator.line());
|
||||
allocator
|
||||
.text(open)
|
||||
.append(allocator.concat(i).nest(n))
|
||||
|
@ -82,14 +81,11 @@ impl Value {
|
|||
Value::List(ref vs) => self.bracket(pp, "(", vs, ")"),
|
||||
Value::Set(ref vs) => self.bracket(pp, "#{", vs, "}"),
|
||||
Value::Map(ref vs) => {
|
||||
let xs = {
|
||||
let this = vs
|
||||
let xs = vs
|
||||
.iter()
|
||||
.rev()
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group());
|
||||
let element = pp.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group())
|
||||
.intersperse(pp.line());
|
||||
pp.text("{")
|
||||
.append(pp.concat(xs).nest(1))
|
||||
.append(pp.text("}"))
|
||||
|
@ -101,7 +97,7 @@ impl Value {
|
|||
Value::Text(ref v) => pp.text("\"").append(v.as_str()).append("\""),
|
||||
Value::Uuid(ref u) => pp
|
||||
.text("#uuid \"")
|
||||
.append(u.hyphenated().to_string())
|
||||
.append(u.to_hyphenated().to_string())
|
||||
.append("\""),
|
||||
Value::Instant(ref v) => pp
|
||||
.text("#inst \"")
|
||||
|
@ -114,7 +110,7 @@ impl Value {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::parse;
|
||||
use parse;
|
||||
|
||||
#[test]
|
||||
fn test_pp_io() {
|
||||
|
|
|
@ -35,11 +35,11 @@ use std;
|
|||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
||||
use {BigInt, DateTime, OrderedFloat, Utc, Uuid};
|
||||
|
||||
use crate::value_rc::{FromRc, ValueRc};
|
||||
use value_rc::{FromRc, ValueRc};
|
||||
|
||||
pub use crate::{Keyword, PlainSymbol};
|
||||
pub use {Keyword, PlainSymbol};
|
||||
|
||||
pub type SrcVarName = String; // Do not include the required syntactic '$'.
|
||||
|
||||
|
@ -64,15 +64,15 @@ impl Variable {
|
|||
}
|
||||
|
||||
pub trait FromValue<T> {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<T>;
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<T>;
|
||||
}
|
||||
|
||||
/// If the provided EDN value is a PlainSymbol beginning with '?', return
|
||||
/// it wrapped in a Variable. If not, return None.
|
||||
/// TODO: intern strings. #398.
|
||||
impl FromValue<Variable> for Variable {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<Variable> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<Variable> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
Variable::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -115,8 +115,8 @@ impl std::fmt::Display for Variable {
|
|||
pub struct QueryFunction(pub PlainSymbol);
|
||||
|
||||
impl FromValue<QueryFunction> for QueryFunction {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<QueryFunction> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<QueryFunction> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
QueryFunction::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -154,8 +154,8 @@ pub enum SrcVar {
|
|||
}
|
||||
|
||||
impl FromValue<SrcVar> for SrcVar {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<SrcVar> {
|
||||
if let crate::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<SrcVar> {
|
||||
if let ::SpannedValue::PlainSymbol(ref s) = v.inner {
|
||||
SrcVar::from_symbol(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -213,8 +213,8 @@ pub enum FnArg {
|
|||
}
|
||||
|
||||
impl FromValue<FnArg> for FnArg {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<FnArg> {
|
||||
use crate::SpannedValue::*;
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<FnArg> {
|
||||
use SpannedValue::*;
|
||||
match v.inner {
|
||||
Integer(x) => Some(FnArg::EntidOrInteger(x)),
|
||||
PlainSymbol(ref x) if x.is_src_symbol() => SrcVar::from_symbol(x).map(FnArg::SrcVar),
|
||||
|
@ -233,7 +233,7 @@ impl FromValue<FnArg> for FnArg {
|
|||
{
|
||||
Some(FnArg::Constant(x.clone().into()))
|
||||
}
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) | Bytes(_) => None,
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -316,16 +316,16 @@ impl PatternNonValuePlace {
|
|||
}
|
||||
|
||||
impl FromValue<PatternNonValuePlace> for PatternNonValuePlace {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<PatternNonValuePlace> {
|
||||
match v.inner {
|
||||
crate::SpannedValue::Integer(x) => {
|
||||
::SpannedValue::Integer(x) => {
|
||||
if x >= 0 {
|
||||
Some(PatternNonValuePlace::Entid(x))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
||||
::SpannedValue::PlainSymbol(ref x) => {
|
||||
if x.0.as_str() == "_" {
|
||||
Some(PatternNonValuePlace::Placeholder)
|
||||
} else if let Some(v) = Variable::from_symbol(x) {
|
||||
|
@ -334,7 +334,7 @@ impl FromValue<PatternNonValuePlace> for PatternNonValuePlace {
|
|||
None
|
||||
}
|
||||
}
|
||||
crate::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
||||
::SpannedValue::Keyword(ref x) => Some(x.clone().into()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -371,46 +371,45 @@ impl From<Keyword> for PatternValuePlace {
|
|||
}
|
||||
|
||||
impl FromValue<PatternValuePlace> for PatternValuePlace {
|
||||
fn from_value(v: &crate::ValueAndSpan) -> Option<PatternValuePlace> {
|
||||
fn from_value(v: &::ValueAndSpan) -> Option<PatternValuePlace> {
|
||||
match v.inner {
|
||||
crate::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
||||
crate::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
||||
::SpannedValue::Integer(x) => Some(PatternValuePlace::EntidOrInteger(x)),
|
||||
::SpannedValue::PlainSymbol(ref x) if x.0.as_str() == "_" => {
|
||||
Some(PatternValuePlace::Placeholder)
|
||||
}
|
||||
crate::SpannedValue::PlainSymbol(ref x) => {
|
||||
::SpannedValue::PlainSymbol(ref x) => {
|
||||
Variable::from_symbol(x).map(PatternValuePlace::Variable)
|
||||
}
|
||||
crate::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
||||
crate::SpannedValue::Boolean(x) => {
|
||||
::SpannedValue::Keyword(ref x) if x.is_namespaced() => Some(x.clone().into()),
|
||||
::SpannedValue::Boolean(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Boolean(x)))
|
||||
}
|
||||
crate::SpannedValue::Float(x) => {
|
||||
::SpannedValue::Float(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Float(x)))
|
||||
}
|
||||
crate::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
||||
::SpannedValue::BigInteger(ref x) => Some(PatternValuePlace::Constant(
|
||||
NonIntegerConstant::BigInteger(x.clone()),
|
||||
)),
|
||||
crate::SpannedValue::Instant(x) => {
|
||||
::SpannedValue::Instant(x) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Instant(x)))
|
||||
}
|
||||
crate::SpannedValue::Text(ref x) =>
|
||||
::SpannedValue::Text(ref x) =>
|
||||
// TODO: intern strings. #398.
|
||||
{
|
||||
Some(PatternValuePlace::Constant(x.clone().into()))
|
||||
}
|
||||
crate::SpannedValue::Uuid(ref u) => {
|
||||
::SpannedValue::Uuid(ref u) => {
|
||||
Some(PatternValuePlace::Constant(NonIntegerConstant::Uuid(*u)))
|
||||
}
|
||||
|
||||
// These don't appear in queries.
|
||||
crate::SpannedValue::Nil => None,
|
||||
crate::SpannedValue::NamespacedSymbol(_) => None,
|
||||
crate::SpannedValue::Keyword(_) => None, // … yet.
|
||||
crate::SpannedValue::Map(_) => None,
|
||||
crate::SpannedValue::List(_) => None,
|
||||
crate::SpannedValue::Set(_) => None,
|
||||
crate::SpannedValue::Vector(_) => None,
|
||||
crate::SpannedValue::Bytes(_) => None,
|
||||
::SpannedValue::Nil => None,
|
||||
::SpannedValue::NamespacedSymbol(_) => None,
|
||||
::SpannedValue::Keyword(_) => None, // … yet.
|
||||
::SpannedValue::Map(_) => None,
|
||||
::SpannedValue::List(_) => None,
|
||||
::SpannedValue::Set(_) => None,
|
||||
::SpannedValue::Vector(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -883,7 +882,10 @@ pub enum UnifyVars {
|
|||
|
||||
impl WhereClause {
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
matches!(self, WhereClause::Pattern(_))
|
||||
match self {
|
||||
WhereClause::Pattern(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1028,8 +1030,8 @@ impl ParsedQuery {
|
|||
Ok(ParsedQuery {
|
||||
find_spec: find_spec.ok_or("expected :find")?,
|
||||
default_source: SrcVar::DefaultSrc,
|
||||
with: with.unwrap_or_default(),
|
||||
in_vars: in_vars.unwrap_or_default(),
|
||||
with: with.unwrap_or_else(|| vec![]),
|
||||
in_vars: in_vars.unwrap_or_else(|| vec![]),
|
||||
in_sources: BTreeSet::default(),
|
||||
limit: limit.unwrap_or(Limit::None),
|
||||
where_clauses: where_clauses.ok_or("expected :where")?,
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::fmt::{Display, Formatter, Write};
|
||||
|
||||
use crate::namespaceable_name::NamespaceableName;
|
||||
use namespaceable_name::NamespaceableName;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! ns_keyword {
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
#![allow(redundant_semicolons)]
|
||||
#![allow(redundant_semicolon)]
|
||||
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::collections::{BTreeMap, BTreeSet, LinkedList};
|
||||
|
@ -25,10 +25,8 @@ use num::BigInt;
|
|||
use ordered_float::OrderedFloat;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::symbols;
|
||||
use symbols;
|
||||
|
||||
use bytes::Bytes;
|
||||
use hex::encode;
|
||||
/// Value represents one of the allowed values in an EDN string.
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub enum Value {
|
||||
|
@ -54,7 +52,6 @@ pub enum Value {
|
|||
// See https://internals.rust-lang.org/t/implementing-hash-for-hashset-hashmap/3817/1
|
||||
Set(BTreeSet<Value>),
|
||||
Map(BTreeMap<Value, Value>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// `SpannedValue` is the parallel to `Value` but used in `ValueAndSpan`.
|
||||
|
@ -76,7 +73,6 @@ pub enum SpannedValue {
|
|||
List(LinkedList<ValueAndSpan>),
|
||||
Set(BTreeSet<ValueAndSpan>),
|
||||
Map(BTreeMap<ValueAndSpan, ValueAndSpan>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// Span represents the current offset (start, end) into the input string.
|
||||
|
@ -143,7 +139,7 @@ impl Value {
|
|||
/// But right now, it's used in the bootstrapper. We'll fix that soon.
|
||||
pub fn with_spans(self) -> ValueAndSpan {
|
||||
let s = self.to_pretty(120).unwrap();
|
||||
use crate::parse;
|
||||
use parse;
|
||||
let with_spans = parse::value(&s).unwrap();
|
||||
assert_eq!(self, with_spans.clone().without_spans());
|
||||
with_spans
|
||||
|
@ -176,7 +172,6 @@ impl From<SpannedValue> for Value {
|
|||
.map(|(x, y)| (x.without_spans(), y.without_spans()))
|
||||
.collect(),
|
||||
),
|
||||
SpannedValue::Bytes(b) => Value::Bytes(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -214,7 +209,10 @@ macro_rules! def_from_option {
|
|||
macro_rules! def_is {
|
||||
($name: ident, $pat: pat) => {
|
||||
pub fn $name(&self) -> bool {
|
||||
matches!(*self, $pat)
|
||||
match *self {
|
||||
$pat => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -333,7 +331,6 @@ macro_rules! def_common_value_methods {
|
|||
def_is!(is_list, $t::List(_));
|
||||
def_is!(is_set, $t::Set(_));
|
||||
def_is!(is_map, $t::Map(_));
|
||||
def_is!(is_bytes, $t::Bytes(_));
|
||||
|
||||
pub fn is_keyword(&self) -> bool {
|
||||
match self {
|
||||
|
@ -366,7 +363,6 @@ macro_rules! def_common_value_methods {
|
|||
def_as_ref!(as_uuid, $t::Uuid, Uuid);
|
||||
def_as_ref!(as_symbol, $t::PlainSymbol, symbols::PlainSymbol);
|
||||
def_as_ref!(as_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol);
|
||||
def_as_ref!(as_bytes, $t::Bytes, Bytes);
|
||||
|
||||
pub fn as_keyword(&self) -> Option<&symbols::Keyword> {
|
||||
match self {
|
||||
|
@ -404,7 +400,6 @@ macro_rules! def_common_value_methods {
|
|||
def_into!(into_uuid, $t::Uuid, Uuid,);
|
||||
def_into!(into_symbol, $t::PlainSymbol, symbols::PlainSymbol,);
|
||||
def_into!(into_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol,);
|
||||
def_into!(into_bytes, $t::Bytes, Bytes,);
|
||||
|
||||
pub fn into_keyword(self) -> Option<symbols::Keyword> {
|
||||
match self {
|
||||
|
@ -475,7 +470,6 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => 13,
|
||||
$t::Set(_) => 14,
|
||||
$t::Map(_) => 15,
|
||||
$t::Bytes(_) => 16,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -496,7 +490,6 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => true,
|
||||
$t::Set(_) => true,
|
||||
$t::Map(_) => true,
|
||||
$t::Bytes(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -534,7 +527,6 @@ macro_rules! def_common_value_ord {
|
|||
(&$t::List(ref a), &$t::List(ref b)) => b.cmp(a),
|
||||
(&$t::Set(ref a), &$t::Set(ref b)) => b.cmp(a),
|
||||
(&$t::Map(ref a), &$t::Map(ref b)) => b.cmp(a),
|
||||
(&$t::Bytes(ref a), &$t::Bytes(ref b)) => b.cmp(a),
|
||||
_ => $value.precedence().cmp(&$other.precedence()),
|
||||
}
|
||||
};
|
||||
|
@ -569,7 +561,7 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
// TODO: EDN escaping.
|
||||
$t::Text(ref v) => write!($f, "\"{}\"", v),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.hyphenated().to_string()),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.to_hyphenated().to_string()),
|
||||
$t::PlainSymbol(ref v) => v.fmt($f),
|
||||
$t::NamespacedSymbol(ref v) => v.fmt($f),
|
||||
$t::Keyword(ref v) => v.fmt($f),
|
||||
|
@ -601,10 +593,6 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
write!($f, " }}")
|
||||
}
|
||||
$t::Bytes(ref v) => {
|
||||
let s = encode(v);
|
||||
write!($f, "#bytes {}", s)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -668,7 +656,7 @@ pub trait FromMicros {
|
|||
|
||||
impl FromMicros for DateTime<Utc> {
|
||||
fn from_micros(ts: i64) -> Self {
|
||||
Utc.timestamp_opt(ts / 1_000_000, ((ts % 1_000_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
Utc.timestamp(ts / 1_000_000, ((ts % 1_000_000).abs() as u32) * 1_000)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -690,7 +678,7 @@ pub trait FromMillis {
|
|||
|
||||
impl FromMillis for DateTime<Utc> {
|
||||
fn from_millis(ts: i64) -> Self {
|
||||
Utc.timestamp_opt(ts / 1_000, ((ts % 1_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
Utc.timestamp(ts / 1_000, ((ts % 1_000).abs() as u32) * 1_000)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -719,7 +707,7 @@ mod test {
|
|||
use std::f64;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use crate::parse;
|
||||
use parse;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use num::BigInt;
|
||||
|
@ -752,12 +740,12 @@ mod test {
|
|||
fn test_print_edn() {
|
||||
assert_eq!("1234N", Value::from_bigint("1234").unwrap().to_string());
|
||||
|
||||
let string = "[ 1 2 ( 7.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
||||
let string = "[ 1 2 ( 3.14 ) #{ 4N } { foo/bar 42 :baz/boz 43 } [ ] :five :six/seven eight nine/ten true false nil #f NaN #f -Infinity #f +Infinity ]";
|
||||
|
||||
let data = Value::Vector(vec![
|
||||
Value::Integer(1),
|
||||
Value::Integer(2),
|
||||
Value::List(LinkedList::from_iter(vec![Value::from_float(7.14)])),
|
||||
Value::List(LinkedList::from_iter(vec![Value::from_float(3.14)])),
|
||||
Value::Set(BTreeSet::from_iter(vec![Value::from_bigint("4").unwrap()])),
|
||||
Value::Map(BTreeMap::from_iter(vec![
|
||||
(Value::from_symbol("foo", "bar"), Value::Integer(42)),
|
||||
|
@ -859,10 +847,10 @@ mod test {
|
|||
|
||||
assert!(n_v.clone().into_keyword().is_some());
|
||||
assert!(n_v.clone().into_plain_keyword().is_none());
|
||||
assert!(n_v.into_namespaced_keyword().is_some());
|
||||
assert!(n_v.clone().into_namespaced_keyword().is_some());
|
||||
|
||||
assert!(p_v.clone().into_keyword().is_some());
|
||||
assert!(p_v.clone().into_plain_keyword().is_some());
|
||||
assert!(p_v.into_namespaced_keyword().is_none());
|
||||
assert!(p_v.clone().into_namespaced_keyword().is_none());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::types::Value;
|
||||
use types::Value;
|
||||
|
||||
/// Merge the EDN `Value::Map` instance `right` into `left`. Returns `None` if either `left` or
|
||||
/// `right` is not a `Value::Map`.
|
||||
|
@ -21,9 +21,9 @@ use crate::types::Value;
|
|||
/// TODO: implement `merge` for [Value], following the `concat`/`SliceConcatExt` pattern.
|
||||
pub fn merge(left: &Value, right: &Value) -> Option<Value> {
|
||||
match (left, right) {
|
||||
(Value::Map(l), Value::Map(r)) => {
|
||||
(&Value::Map(ref l), &Value::Map(ref r)) => {
|
||||
let mut result = l.clone();
|
||||
result.extend(r.clone());
|
||||
result.extend(r.clone().into_iter());
|
||||
Some(Value::Map(result))
|
||||
}
|
||||
_ => None,
|
||||
|
|
|
@ -82,7 +82,6 @@ fn_parse_into_value!(vector);
|
|||
fn_parse_into_value!(set);
|
||||
fn_parse_into_value!(map);
|
||||
fn_parse_into_value!(value);
|
||||
fn_parse_into_value!(bytes);
|
||||
|
||||
#[test]
|
||||
fn test_nil() {
|
||||
|
@ -317,38 +316,6 @@ fn test_uuid() {
|
|||
assert_eq!(value.to_pretty(100).unwrap(), s);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bytes() {
|
||||
assert!(parse::bytes("#bytes01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes _ZZ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes 01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#01 ").is_err()); // No whitespace.
|
||||
|
||||
let expected = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let s = format!("{} {}", "#bytes", hex::encode(expected.clone()));
|
||||
let actual: Value = parse::bytes(&s).expect("parse success").into();
|
||||
assert!(actual.is_bytes());
|
||||
assert_eq!(expected, actual.as_bytes().unwrap().to_vec());
|
||||
|
||||
assert_eq!(
|
||||
self::bytes("#bytes 010203050403022a").unwrap(),
|
||||
Value::Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
let data =
|
||||
r#"[ { :test/instant #inst "2018-01-01T11:00:00Z" :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let result = parse::value(data).unwrap().without_spans().to_string();
|
||||
assert_eq!(data, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_entities() {
|
||||
let d2 = r#"[ { :test/boolean true :test/long 33 :test/double 1.4 :test/string "foo" :test/keyword :foo/bar :test/uuid #uuid "12341234-1234-1234-1234-123412341234" :test/instant #inst "2018-01-01T11:00:00Z" :test/ref 1 :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let r2 = parse::entities(d2);
|
||||
assert!(r2.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inst() {
|
||||
assert!(parse::value("#inst\"2016-01-01T11:00:00.000Z\"").is_err()); // No whitespace.
|
||||
|
@ -617,12 +584,6 @@ fn test_value() {
|
|||
value("#inst \"2017-04-28T20:23:05.187Z\"").unwrap(),
|
||||
Instant(Utc.timestamp(1493410985, 187000000))
|
||||
);
|
||||
assert_eq!(
|
||||
value("#bytes 010203050403022a").unwrap(),
|
||||
Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1536,7 +1497,7 @@ macro_rules! def_test_into_type {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(clippy::float_cmp, clippy::unit_cmp))]
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(float_cmp))]
|
||||
fn test_is_and_as_type_helper_functions() {
|
||||
let max_i64 = i64::max_value().to_bigint().unwrap();
|
||||
let bigger = &max_i64 * &max_i64;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "mentat_ffi"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
authors = ["Emily Toop <etoop@mozilla.com>"]
|
||||
|
||||
[lib]
|
||||
|
@ -13,7 +13,7 @@ sqlcipher = ["mentat/sqlcipher"]
|
|||
bundled_sqlite3 = ["mentat/bundled_sqlite3"]
|
||||
|
||||
[dependencies]
|
||||
libc = "~0.2"
|
||||
libc = "0.2"
|
||||
|
||||
[dependencies.mentat]
|
||||
path = "../"
|
||||
|
|
305
ffi/src/lib.rs
305
ffi/src/lib.rs
|
@ -70,7 +70,6 @@
|
|||
//! (for `Result<(), T>`). Callers are responsible for freeing the `message` field of `ExternError`.
|
||||
|
||||
#![allow(unused_doc_comments)]
|
||||
#![allow(clippy::missing_safety_doc)]
|
||||
|
||||
extern crate core;
|
||||
extern crate libc;
|
||||
|
@ -177,12 +176,6 @@ pub unsafe extern "C" fn store_open(uri: *const c_char, error: *mut ExternError)
|
|||
}
|
||||
|
||||
/// Variant of store_open that opens an encrypted database.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `store_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
#[cfg(feature = "sqlcipher")]
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn store_open_encrypted(
|
||||
|
@ -253,11 +246,6 @@ pub unsafe extern "C" fn in_progress_transact<'m>(
|
|||
/// Commit all the transacts that have been performed using this
|
||||
/// in progress transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
///
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_commit<'m>(
|
||||
|
@ -272,12 +260,6 @@ pub unsafe extern "C" fn in_progress_commit<'m>(
|
|||
/// Rolls back all the transacts that have been performed using this
|
||||
/// in progress transaction.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Callers are responsible for managing the memory for the return value.
|
||||
/// A destructor `tx_report_destroy` is provided for releasing the memory for this
|
||||
/// pointer type.
|
||||
///
|
||||
/// TODO: Document the errors that can result from rollback
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_rollback<'m>(
|
||||
|
@ -360,7 +342,7 @@ pub unsafe extern "C" fn store_in_progress_builder<'a, 'c>(
|
|||
let store = &mut *store;
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.map(|in_progress| in_progress.builder());
|
||||
.and_then(|in_progress| Ok(in_progress.builder()));
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -383,7 +365,7 @@ pub unsafe extern "C" fn store_entity_builder_from_temp_id<'a, 'c>(
|
|||
let temp_id = c_char_to_string(temp_id);
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.map(|in_progress| in_progress.builder().describe_tempid(&temp_id));
|
||||
.and_then(|in_progress| Ok(in_progress.builder().describe_tempid(&temp_id)));
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -405,7 +387,7 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
|||
let store = &mut *store;
|
||||
let result = store
|
||||
.begin_transaction()
|
||||
.map(|in_progress| in_progress.builder().describe(KnownEntid(entid)));
|
||||
.and_then(|in_progress| Ok(in_progress.builder().describe(KnownEntid(entid))));
|
||||
translate_result(result, error)
|
||||
}
|
||||
|
||||
|
@ -417,12 +399,10 @@ pub unsafe extern "C" fn store_entity_builder_from_entid<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_string(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_string<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -442,13 +422,10 @@ pub unsafe extern "C" fn in_progress_builder_add_string(
|
|||
/// If `entid` is not present in the store.
|
||||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_long(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_long<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -469,13 +446,10 @@ pub unsafe extern "C" fn in_progress_builder_add_long(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If `value` is not present as an Entid in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_ref(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_ref<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -497,12 +471,10 @@ pub unsafe extern "C" fn in_progress_builder_add_ref(
|
|||
/// If `value` is not present as an attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_keyword(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_keyword<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -523,12 +495,10 @@ pub unsafe extern "C" fn in_progress_builder_add_keyword(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_boolean(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_boolean<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
|
@ -549,12 +519,10 @@ pub unsafe extern "C" fn in_progress_builder_add_boolean(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_double(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_double<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
|
@ -575,12 +543,10 @@ pub unsafe extern "C" fn in_progress_builder_add_double(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_timestamp(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_timestamp<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -601,12 +567,10 @@ pub unsafe extern "C" fn in_progress_builder_add_timestamp(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_add_uuid(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_add_uuid<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
|
@ -629,12 +593,10 @@ pub unsafe extern "C" fn in_progress_builder_add_uuid(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_string(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_string<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -655,12 +617,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_string(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_long(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_long<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -681,12 +641,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_long(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_ref(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_ref<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -707,12 +665,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_ref(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_keyword(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_keyword<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
|
@ -733,12 +689,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_keyword(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_boolean(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_boolean<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
|
@ -759,12 +713,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_boolean(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_double(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_double<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
|
@ -785,12 +737,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_double(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_timestamp(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_timestamp<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
|
@ -811,13 +761,12 @@ pub unsafe extern "C" fn in_progress_builder_retract_timestamp(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO don't panic if the UUID is not valid - return result instead.
|
||||
//
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_uuid(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_retract_uuid<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
entid: c_longlong,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
|
@ -837,12 +786,10 @@ pub unsafe extern "C" fn in_progress_builder_retract_uuid(
|
|||
///
|
||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn in_progress_builder_commit(
|
||||
builder: *mut InProgressBuilder,
|
||||
pub unsafe extern "C" fn in_progress_builder_commit<'a, 'c>(
|
||||
builder: *mut InProgressBuilder<'a, 'c>,
|
||||
error: *mut ExternError,
|
||||
) -> *mut TxReport {
|
||||
assert_not_null!(builder);
|
||||
|
@ -881,12 +828,10 @@ pub unsafe extern "C" fn in_progress_builder_transact<'a, 'c>(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_string(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_string<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -906,12 +851,10 @@ pub unsafe extern "C" fn entity_builder_add_string(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_long(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_long<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -931,12 +874,10 @@ pub unsafe extern "C" fn entity_builder_add_long(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_ref(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_ref<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -956,12 +897,10 @@ pub unsafe extern "C" fn entity_builder_add_ref(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_keyword(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_keyword<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -981,12 +920,10 @@ pub unsafe extern "C" fn entity_builder_add_keyword(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_boolean(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_boolean<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
error: *mut ExternError,
|
||||
|
@ -1006,12 +943,10 @@ pub unsafe extern "C" fn entity_builder_add_boolean(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_double(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_double<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
error: *mut ExternError,
|
||||
|
@ -1031,12 +966,10 @@ pub unsafe extern "C" fn entity_builder_add_double(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_timestamp(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_timestamp<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1056,12 +989,10 @@ pub unsafe extern "C" fn entity_builder_add_timestamp(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_add_uuid(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_add_uuid<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
error: *mut ExternError,
|
||||
|
@ -1083,12 +1014,10 @@ pub unsafe extern "C" fn entity_builder_add_uuid(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/string`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_string(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_string<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -1108,12 +1037,10 @@ pub unsafe extern "C" fn entity_builder_retract_string(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/long`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_long(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_long<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1133,12 +1060,10 @@ pub unsafe extern "C" fn entity_builder_retract_long(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/ref`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_ref(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_ref<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1158,12 +1083,10 @@ pub unsafe extern "C" fn entity_builder_retract_ref(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/keyword`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_keyword(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_keyword<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const c_char,
|
||||
error: *mut ExternError,
|
||||
|
@ -1183,12 +1106,10 @@ pub unsafe extern "C" fn entity_builder_retract_keyword(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/boolean`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_boolean(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_boolean<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: bool,
|
||||
error: *mut ExternError,
|
||||
|
@ -1208,12 +1129,10 @@ pub unsafe extern "C" fn entity_builder_retract_boolean(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/double`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_double(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_double<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: f64,
|
||||
error: *mut ExternError,
|
||||
|
@ -1233,12 +1152,10 @@ pub unsafe extern "C" fn entity_builder_retract_double(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/instant`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_timestamp(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_timestamp<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: c_longlong,
|
||||
error: *mut ExternError,
|
||||
|
@ -1258,13 +1175,11 @@ pub unsafe extern "C" fn entity_builder_retract_timestamp(
|
|||
/// If `kw` is not a valid attribute in the store.
|
||||
/// If the `:db/type` of the attribute described by `kw` is not `:db.type/uuid`.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
// TODO: Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO: don't panic if the UUID is not valid - return result instead.
|
||||
// TODO Generalise with macro https://github.com/mozilla/mentat/issues/703
|
||||
// TODO don't panic if the UUID is not valid - return result instead.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_retract_uuid(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_retract_uuid<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
kw: *const c_char,
|
||||
value: *const [u8; 16],
|
||||
error: *mut ExternError,
|
||||
|
@ -1306,12 +1221,10 @@ pub unsafe extern "C" fn entity_builder_transact<'a, 'c>(
|
|||
///
|
||||
/// This consumes the builder and the enclosed [InProgress](mentat::InProgress) transaction.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn entity_builder_commit(
|
||||
builder: *mut EntityBuilder<InProgressBuilder>,
|
||||
pub unsafe extern "C" fn entity_builder_commit<'a, 'c>(
|
||||
builder: *mut EntityBuilder<InProgressBuilder<'a, 'c>>,
|
||||
error: *mut ExternError,
|
||||
) -> *mut TxReport {
|
||||
assert_not_null!(builder);
|
||||
|
@ -1321,8 +1234,6 @@ pub unsafe extern "C" fn entity_builder_commit(
|
|||
|
||||
/// Performs a single transaction against the store.
|
||||
///
|
||||
/// # Safety
|
||||
/// TODO:
|
||||
/// TODO: Document the errors that can result from transact
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn store_transact(
|
||||
|
@ -1342,7 +1253,6 @@ pub unsafe extern "C" fn store_transact(
|
|||
}
|
||||
|
||||
/// Fetches the `tx_id` for the given [TxReport](mentat::TxReport)`.
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_longlong {
|
||||
assert_not_null!(tx_report);
|
||||
|
@ -1351,7 +1261,6 @@ pub unsafe extern "C" fn tx_report_get_entid(tx_report: *mut TxReport) -> c_long
|
|||
}
|
||||
|
||||
/// Fetches the `tx_instant` for the given [TxReport](mentat::TxReport).
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn tx_report_get_tx_instant(tx_report: *mut TxReport) -> c_longlong {
|
||||
assert_not_null!(tx_report);
|
||||
|
@ -1374,7 +1283,7 @@ pub unsafe extern "C" fn tx_report_entity_for_temp_id(
|
|||
let tx_report = &*tx_report;
|
||||
let key = c_char_to_string(tempid);
|
||||
if let Some(entid) = tx_report.tempids.get(key) {
|
||||
Box::into_raw(Box::new(*entid as c_longlong))
|
||||
Box::into_raw(Box::new(entid.clone() as c_longlong))
|
||||
} else {
|
||||
std::ptr::null_mut()
|
||||
}
|
||||
|
@ -1499,7 +1408,7 @@ pub unsafe extern "C" fn query_builder_bind_ref_kw(
|
|||
let kw = kw_from_string(c_char_to_string(value));
|
||||
let query_builder = &mut *query_builder;
|
||||
if let Some(err) = query_builder.bind_ref_from_kw(&var, kw).err() {
|
||||
std::panic::panic_any(err);
|
||||
panic!(err);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2159,7 +2068,7 @@ pub unsafe extern "C" fn store_register_observer(
|
|||
.map(|(tx_id, changes)| {
|
||||
(
|
||||
*tx_id,
|
||||
changes.iter().map(|eid| *eid as c_longlong).collect(),
|
||||
changes.into_iter().map(|eid| *eid as c_longlong).collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -14,12 +14,9 @@ pub mod strings {
|
|||
|
||||
use mentat::Keyword;
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// This function TODO
|
||||
pub unsafe fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
||||
pub fn c_char_to_string(cchar: *const c_char) -> &'static str {
|
||||
assert!(!cchar.is_null());
|
||||
let c_str = CStr::from_ptr(cchar);
|
||||
let c_str = unsafe { CStr::from_ptr(cchar) };
|
||||
c_str.to_str().unwrap_or("")
|
||||
}
|
||||
|
||||
|
@ -32,8 +29,8 @@ pub mod strings {
|
|||
|
||||
pub fn kw_from_string(keyword_string: &'static str) -> Keyword {
|
||||
// TODO: validate. The input might not be a keyword!
|
||||
let attr_name = keyword_string.trim_start_matches(':');
|
||||
let parts: Vec<&str> = attr_name.split('/').collect();
|
||||
let attr_name = keyword_string.trim_start_matches(":");
|
||||
let parts: Vec<&str> = attr_name.split("/").collect();
|
||||
Keyword::namespaced(parts[0], parts[1])
|
||||
}
|
||||
}
|
||||
|
@ -110,8 +107,6 @@ pub mod error {
|
|||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||
/// message (which was allocated on the heap and should eventually be freed) into
|
||||
/// `error.message`
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_result<T, E>(result: Result<T, E>, error: *mut ExternError) -> *mut T
|
||||
where
|
||||
E: Display,
|
||||
|
@ -138,8 +133,6 @@ pub mod error {
|
|||
/// - If `result` is `Err(e)`, returns a null pointer and stores a string representing the error
|
||||
/// message (which was allocated on the heap and should eventually be freed) into
|
||||
/// `error.message`
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_opt_result<T, E>(
|
||||
result: Result<Option<T>, E>,
|
||||
error: *mut ExternError,
|
||||
|
@ -162,8 +155,6 @@ pub mod error {
|
|||
|
||||
/// Identical to `translate_result`, but with additional type checking for the case that we have
|
||||
/// a `Result<(), E>` (which we're about to drop on the floor).
|
||||
/// # Safety
|
||||
/// Be afraid... TODO
|
||||
pub unsafe fn translate_void_result<E>(result: Result<(), E>, error: *mut ExternError)
|
||||
where
|
||||
E: Display,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "public_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -13,23 +13,15 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
syncable = ["tolstoy_traits", "hyper", "serde_json"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
http = "~0.2"
|
||||
tokio = { version = "1.8.0", features = ["full"] }
|
||||
uuid = "~1.0"
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
http = "0.2"
|
||||
tokio-core = "0.1"
|
||||
uuid = "0.8"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.hyper]
|
||||
version = "~0.14"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "~1.0"
|
||||
optional = true
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -55,3 +47,11 @@ path = "../sql-traits"
|
|||
[dependencies.tolstoy_traits]
|
||||
path = "../tolstoy-traits"
|
||||
optional = true
|
||||
|
||||
[dependencies.hyper]
|
||||
version = "0.13"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0"
|
||||
optional = true
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "query_algebrizer_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -8,8 +8,8 @@ name = "query_algebrizer_traits"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "mentat_query_algebrizer"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure = "0.1.1"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
@ -19,4 +19,4 @@ path = "../core-traits"
|
|||
path = "../query-algebrizer-traits"
|
||||
|
||||
[dev-dependencies]
|
||||
itertools = "~0.10"
|
||||
itertools = "0.8"
|
||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, SQLValueType, Schema};
|
|||
|
||||
use edn::query::{FnArg, NonIntegerConstant, Variable};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::types::EmptyBecause;
|
||||
use types::EmptyBecause;
|
||||
|
||||
macro_rules! coerce_to_typed_value {
|
||||
($var: ident, $val: ident, $types: expr, $type: path, $constructor: path) => {{
|
||||
|
|
|
@ -16,16 +16,16 @@ use mentat_core::util::Either;
|
|||
|
||||
use edn::query::{Binding, FnArg, NonIntegerConstant, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
Column, ColumnConstraint, DatomsColumn, DatomsTable, EmptyBecause, FulltextColumn,
|
||||
QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
#[allow(unused_variables)]
|
||||
|
@ -311,7 +311,7 @@ mod testing {
|
|||
|
||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
|
||||
#[test]
|
||||
fn test_apply_fulltext() {
|
||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::{Binding, FnArg, Variable, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
||||
use clauses::{ConjoiningClauses, PushComputed};
|
||||
|
||||
use crate::clauses::convert::ValueConversion;
|
||||
use clauses::convert::ValueConversion;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use crate::types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
||||
use types::{ComputedTable, EmptyBecause, SourceAlias, VariableColumn};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
/// Take a relation: a matrix of values which will successively bind to named variables of
|
||||
|
@ -129,7 +129,7 @@ impl ConjoiningClauses {
|
|||
if where_fn.binding.is_empty() {
|
||||
// The binding must introduce at least one bound variable.
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator,
|
||||
where_fn.operator.clone(),
|
||||
BindingError::NoBoundVariable
|
||||
));
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ impl ConjoiningClauses {
|
|||
if !where_fn.binding.is_valid() {
|
||||
// The binding must not duplicate bound variables.
|
||||
bail!(AlgebrizerError::InvalidBinding(
|
||||
where_fn.operator,
|
||||
where_fn.operator.clone(),
|
||||
BindingError::RepeatedBoundVariable
|
||||
));
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ mod testing {
|
|||
|
||||
use edn::query::{Binding, FnArg, Keyword, PlainSymbol, Variable};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
|
||||
#[test]
|
||||
fn test_apply_ground() {
|
||||
|
|
|
@ -26,7 +26,7 @@ use edn::query::{Element, FindSpec, Keyword, PatternNonValuePlace, Pull, Variabl
|
|||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
Column, ColumnConstraint, ColumnIntersection, ComputedTable, DatomsColumn, DatomsTable,
|
||||
EmptyBecause, EvolvedNonValuePlace, EvolvedPattern, EvolvedValuePlace, FulltextColumn,
|
||||
PlaceOrEmpty, QualifiedAlias, QueryValue, SourceAlias, TableAlias,
|
||||
|
@ -45,11 +45,11 @@ mod ground;
|
|||
mod tx_log_api;
|
||||
mod where_fn;
|
||||
|
||||
use crate::validate::{validate_not_join, validate_or_join};
|
||||
use validate::{validate_not_join, validate_or_join};
|
||||
|
||||
pub use self::inputs::QueryInputs;
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
trait Contains<K, T> {
|
||||
fn when_contains<F: FnOnce() -> T>(&self, k: &K, f: F) -> Option<T>;
|
||||
|
@ -508,7 +508,7 @@ impl ConjoiningClauses {
|
|||
|
||||
self.column_bindings
|
||||
.entry(var)
|
||||
.or_insert_with(Vec::new)
|
||||
.or_insert_with(|| vec![])
|
||||
.push(alias);
|
||||
}
|
||||
|
||||
|
@ -1227,7 +1227,7 @@ impl PushComputed for Vec<ComputedTable> {
|
|||
#[cfg(test)]
|
||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i, e);
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -10,13 +10,13 @@
|
|||
|
||||
use edn::query::{ContainsVariables, NotJoin, UnifyVars};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::types::{ColumnConstraint, ComputedTable};
|
||||
use types::{ColumnConstraint, ComputedTable};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
|
||||
|
@ -87,16 +87,16 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, PlainSymbol, Variable};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident, QueryInputs};
|
||||
use clauses::{add_attribute, associate_ident, QueryInputs};
|
||||
|
||||
use query_algebrizer_traits::errors::AlgebrizerError;
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||
DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
|
||||
use crate::{algebrize, algebrize_with_inputs, parse_find_string};
|
||||
use {algebrize, algebrize_with_inputs, parse_find_string};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let known = Known::for_schema(schema);
|
||||
|
@ -216,17 +216,26 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e),
|
||||
d1a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), ambar)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2v.clone(),
|
||||
daphne,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -238,8 +247,14 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
john
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
|
@ -302,14 +317,17 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vy.clone(), vec![d0v.clone(), d3v.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d3a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d3e),
|
||||
d3a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v,
|
||||
QueryValue::Column(d3v),
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d3e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
QueryValue::Column(d3v.clone()),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -318,15 +336,24 @@ mod testing {
|
|||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vy, ValueTypeSet::of_one(ValueType::String));
|
||||
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
let expected_wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, eleven)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, name)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a.clone(), knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a.clone(),
|
||||
age.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
eleven,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
name.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v.clone(), john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(Box::new(subquery)),
|
||||
)),
|
||||
|
@ -396,17 +423,29 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone(), d2e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e),
|
||||
d1a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
john.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d2v.clone(),
|
||||
daphne.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
)),
|
||||
]);
|
||||
|
||||
|
@ -418,10 +457,13 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
age.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||
operator: Inequality::LessThan,
|
||||
left: QueryValue::Column(d0v),
|
||||
left: QueryValue::Column(d0v.clone()),
|
||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||
}),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
|
@ -448,7 +490,7 @@ mod testing {
|
|||
let d0 = "datoms00".to_string();
|
||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
||||
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||
|
||||
let d1 = "datoms01".to_string();
|
||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||
|
@ -492,34 +534,49 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a,
|
||||
d1a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, ambar)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v.clone(),
|
||||
ambar,
|
||||
)),
|
||||
]),
|
||||
])),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2a, parent)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d2v, daphne)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e),
|
||||
d2a.clone(),
|
||||
parent,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e,
|
||||
QueryValue::Column(d2e),
|
||||
d2v.clone(),
|
||||
daphne,
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d2e.clone()),
|
||||
)),
|
||||
]);
|
||||
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
||||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
bill
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
|
@ -554,7 +611,7 @@ mod testing {
|
|||
let d0 = "datoms00".to_string();
|
||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||
let d0a = QualifiedAlias::new(d0.clone(), DatomsColumn::Attribute);
|
||||
let d0v = QualifiedAlias::new(d0, DatomsColumn::Value);
|
||||
let d0v = QualifiedAlias::new(d0.clone(), DatomsColumn::Value);
|
||||
|
||||
let d1 = "datoms01".to_string();
|
||||
let d1e = QualifiedAlias::new(d1.clone(), DatomsColumn::Entity);
|
||||
|
@ -567,17 +624,20 @@ mod testing {
|
|||
.column_bindings
|
||||
.insert(vx.clone(), vec![d0e.clone(), d1e.clone()]);
|
||||
subquery.wheres = ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1a, knows.clone())),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v, john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e,
|
||||
QueryValue::Column(d1e),
|
||||
d1a.clone(),
|
||||
knows.clone(),
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d1v.clone(), john)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0e.clone(),
|
||||
QueryValue::Column(d1e.clone()),
|
||||
)),
|
||||
]);
|
||||
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vx, ValueTypeSet::of_one(ValueType::Ref));
|
||||
.insert(vx.clone(), ValueTypeSet::of_one(ValueType::Ref));
|
||||
subquery
|
||||
.known_types
|
||||
.insert(vy.clone(), ValueTypeSet::of_one(ValueType::String));
|
||||
|
@ -587,14 +647,20 @@ mod testing {
|
|||
subquery.input_variables = input_vars;
|
||||
subquery
|
||||
.value_bindings
|
||||
.insert(vy, TypedValue::typed_string("John"));
|
||||
.insert(vy.clone(), TypedValue::typed_string("John"));
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0v, bill)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v.clone(),
|
||||
bill
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::NotExists(
|
||||
ComputedTable::Subquery(Box::new(subquery))
|
||||
)),
|
||||
|
|
|
@ -18,17 +18,17 @@ use edn::query::{
|
|||
WhereClause,
|
||||
};
|
||||
|
||||
use crate::clauses::{ConjoiningClauses, PushComputed};
|
||||
use clauses::{ConjoiningClauses, PushComputed};
|
||||
|
||||
use query_algebrizer_traits::errors::Result;
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
ColumnAlternation, ColumnConstraintOrAlternation, ColumnIntersection, ComputedTable,
|
||||
DatomsTable, EmptyBecause, EvolvedPattern, PlaceOrEmpty, QualifiedAlias, SourceAlias,
|
||||
VariableColumn,
|
||||
};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
/// Return true if both left and right are the same variable or both are non-variable.
|
||||
fn _simply_matches_place(left: &PatternNonValuePlace, right: &PatternNonValuePlace) -> bool {
|
||||
|
@ -727,7 +727,7 @@ fn union_types(
|
|||
for (var, new_types) in additional_types {
|
||||
match into.entry(var.clone()) {
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(*new_types);
|
||||
e.insert(new_types.clone());
|
||||
}
|
||||
Entry::Occupied(mut e) => {
|
||||
let new = e.get().union(*new_types);
|
||||
|
@ -750,14 +750,14 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, Variable};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident};
|
||||
use clauses::{add_attribute, associate_ident};
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
ColumnConstraint, DatomsColumn, DatomsTable, Inequality, QualifiedAlias, QueryValue,
|
||||
SourceAlias,
|
||||
};
|
||||
|
||||
use crate::{algebrize, algebrize_with_counter, parse_find_string};
|
||||
use {algebrize, algebrize_with_counter, parse_find_string};
|
||||
|
||||
fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
|
@ -920,10 +920,12 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a, knows
|
||||
d0a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0v, daphne
|
||||
d0v.clone(),
|
||||
daphne
|
||||
))
|
||||
]),
|
||||
])
|
||||
|
@ -965,7 +967,10 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, name)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
name.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
|
@ -989,10 +994,12 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a, knows
|
||||
d1a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v, daphne
|
||||
d1v.clone(),
|
||||
daphne
|
||||
))
|
||||
]),
|
||||
])),
|
||||
|
@ -1044,10 +1051,13 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, age)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
age.clone()
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Inequality {
|
||||
operator: Inequality::LessThan,
|
||||
left: QueryValue::Column(d0v),
|
||||
left: QueryValue::Column(d0v.clone()),
|
||||
right: QueryValue::TypedValue(TypedValue::Long(30)),
|
||||
}),
|
||||
ColumnConstraintOrAlternation::Alternation(ColumnAlternation(vec![
|
||||
|
@ -1063,10 +1073,12 @@ mod testing {
|
|||
]),
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1a, knows
|
||||
d1a.clone(),
|
||||
knows
|
||||
)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d1v, daphne
|
||||
d1v.clone(),
|
||||
daphne
|
||||
))
|
||||
]),
|
||||
])),
|
||||
|
@ -1112,7 +1124,10 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.wheres,
|
||||
ColumnIntersection(vec![
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(d0a, knows)),
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
d0a.clone(),
|
||||
knows.clone()
|
||||
)),
|
||||
// The outer pattern joins against the `or` on the entity, but not value -- ?y means
|
||||
// different things in each place.
|
||||
ColumnConstraintOrAlternation::Constraint(ColumnConstraint::Equals(
|
||||
|
|
|
@ -18,14 +18,14 @@ use edn::query::{
|
|||
NonIntegerConstant, Pattern, PatternNonValuePlace, PatternValuePlace, SrcVar, Variable,
|
||||
};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
ColumnConstraint, DatomsColumn, EmptyBecause, EvolvedNonValuePlace, EvolvedPattern,
|
||||
EvolvedValuePlace, PlaceOrEmpty, SourceAlias,
|
||||
};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
pub fn into_typed_value(nic: NonIntegerConstant) -> TypedValue {
|
||||
match nic {
|
||||
|
@ -696,13 +696,11 @@ mod testing {
|
|||
|
||||
use edn::query::{Keyword, Variable};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
||||
use clauses::{add_attribute, associate_ident, ident, QueryInputs};
|
||||
|
||||
use crate::types::{
|
||||
Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias,
|
||||
};
|
||||
use types::{Column, ColumnConstraint, DatomsTable, QualifiedAlias, QueryValue, SourceAlias};
|
||||
|
||||
use crate::{algebrize, parse_find_string};
|
||||
use {algebrize, parse_find_string};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
|
@ -797,7 +795,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.a = 99
|
||||
|
@ -846,7 +844,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.v = true
|
||||
|
@ -890,7 +888,7 @@ mod testing {
|
|||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
|
@ -915,7 +913,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&v), Some(ValueType::Boolean));
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(
|
||||
cc.wheres,
|
||||
vec![ColumnConstraint::Equals(d0_a, QueryValue::Entid(99)),].into()
|
||||
|
@ -940,9 +938,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
value: PatternValuePlace::Variable(v),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -969,8 +967,8 @@ mod testing {
|
|||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a),
|
||||
value: PatternValuePlace::Variable(v),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
value: PatternValuePlace::Variable(v.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -992,7 +990,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
assert_eq!(cc.wheres, vec![].into());
|
||||
}
|
||||
|
||||
|
@ -1033,7 +1031,7 @@ mod testing {
|
|||
assert_eq!(cc.known_type(&x).unwrap(), ValueType::Ref);
|
||||
|
||||
// ?x is bound to datoms0.e.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
|
||||
// Our 'where' clauses are two:
|
||||
// - datoms0.v = 'hello'
|
||||
|
@ -1095,7 +1093,7 @@ mod testing {
|
|||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1204,7 +1202,7 @@ mod testing {
|
|||
assert!(!cc.column_bindings.contains_key(&y));
|
||||
|
||||
// ?x is bound to the entity.
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e]);
|
||||
assert_eq!(cc.column_bindings.get(&x).unwrap(), &vec![d0_e.clone()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1239,9 +1237,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1284,9 +1282,9 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
);
|
||||
|
@ -1340,7 +1338,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -1354,7 +1352,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: y,
|
||||
var: y.clone(),
|
||||
existing: ValueTypeSet::of_one(ValueType::String),
|
||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||
}
|
||||
|
@ -1391,8 +1389,8 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(z),
|
||||
attribute: PatternNonValuePlace::Variable(y),
|
||||
entity: PatternNonValuePlace::Variable(z.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(y.clone()),
|
||||
value: PatternValuePlace::Variable(x.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
},
|
||||
|
@ -1405,7 +1403,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: x,
|
||||
var: x.clone(),
|
||||
existing: ValueTypeSet::of_one(ValueType::Ref),
|
||||
desired: ValueTypeSet::of_one(ValueType::Boolean),
|
||||
}
|
||||
|
|
|
@ -14,15 +14,15 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::{FnArg, PlainSymbol, Predicate, TypeAnnotation};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use crate::clauses::convert::ValueTypes;
|
||||
use clauses::convert::ValueTypes;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
||||
use types::{ColumnConstraint, EmptyBecause, Inequality, QueryValue};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
/// Application of predicates.
|
||||
impl ConjoiningClauses {
|
||||
|
@ -98,7 +98,7 @@ impl ConjoiningClauses {
|
|||
.intersection(supported_types);
|
||||
if left_types.is_empty() {
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
predicate.operator.clone(),
|
||||
supported_types,
|
||||
0
|
||||
));
|
||||
|
@ -109,7 +109,7 @@ impl ConjoiningClauses {
|
|||
.intersection(supported_types);
|
||||
if right_types.is_empty() {
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
predicate.operator.clone(),
|
||||
supported_types,
|
||||
1
|
||||
));
|
||||
|
@ -161,7 +161,7 @@ impl ConjoiningClauses {
|
|||
right_v = self.resolve_ref_argument(known.schema, &predicate.operator, 1, right)?;
|
||||
} else {
|
||||
bail!(AlgebrizerError::InvalidArgumentType(
|
||||
predicate.operator,
|
||||
predicate.operator.clone(),
|
||||
supported_types,
|
||||
0
|
||||
));
|
||||
|
@ -206,9 +206,9 @@ mod testing {
|
|||
FnArg, Keyword, Pattern, PatternNonValuePlace, PatternValuePlace, PlainSymbol, Variable,
|
||||
};
|
||||
|
||||
use crate::clauses::{add_attribute, associate_ident, ident};
|
||||
use clauses::{add_attribute, associate_ident, ident};
|
||||
|
||||
use crate::types::{ColumnConstraint, EmptyBecause, QueryValue};
|
||||
use types::{ColumnConstraint, EmptyBecause, QueryValue};
|
||||
|
||||
#[test]
|
||||
/// Apply two patterns: a pattern and a numeric predicate.
|
||||
|
@ -235,7 +235,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -348,7 +348,7 @@ mod testing {
|
|||
known,
|
||||
Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x),
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "roz"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
|
@ -362,7 +362,7 @@ mod testing {
|
|||
assert_eq!(
|
||||
cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
var: y,
|
||||
var: y.clone(),
|
||||
existing: ValueTypeSet::of_numeric_types(),
|
||||
desired: ValueTypeSet::of_one(ValueType::String),
|
||||
}
|
||||
|
|
|
@ -14,11 +14,11 @@ use mentat_core::{HasSchema, Schema};
|
|||
|
||||
use edn::query::{FnArg, NonIntegerConstant, PlainSymbol};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::types::{EmptyBecause, QueryValue};
|
||||
use types::{EmptyBecause, QueryValue};
|
||||
|
||||
/// Argument resolution.
|
||||
impl ConjoiningClauses {
|
||||
|
|
|
@ -12,16 +12,16 @@ use core_traits::ValueType;
|
|||
|
||||
use edn::query::{Binding, FnArg, SrcVar, VariableOrPlaceholder, WhereFn};
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, BindingError, Result};
|
||||
|
||||
use crate::types::{
|
||||
use types::{
|
||||
Column, ColumnConstraint, DatomsTable, Inequality, QualifiedAlias, QueryValue, SourceAlias,
|
||||
TransactionsColumn,
|
||||
};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
// Log in Query: tx-ids and tx-data
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
|
||||
use edn::query::WhereFn;
|
||||
|
||||
use crate::clauses::ConjoiningClauses;
|
||||
use clauses::ConjoiningClauses;
|
||||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
use crate::Known;
|
||||
use Known;
|
||||
|
||||
/// Application of `where` functions.
|
||||
impl ConjoiningClauses {
|
||||
|
|
|
@ -34,9 +34,9 @@ use edn::query::{Element, FindSpec, Limit, Order, ParsedQuery, SrcVar, Variable,
|
|||
|
||||
use query_algebrizer_traits::errors::{AlgebrizerError, Result};
|
||||
|
||||
pub use crate::clauses::{QueryInputs, VariableBindings};
|
||||
pub use clauses::{QueryInputs, VariableBindings};
|
||||
|
||||
pub use crate::types::{EmptyBecause, FindQuery};
|
||||
pub use types::{EmptyBecause, FindQuery};
|
||||
|
||||
/// A convenience wrapper around things known in memory: the schema and caches.
|
||||
/// We use a trait object here to avoid making dozens of functions generic over the type
|
||||
|
@ -347,9 +347,9 @@ pub fn algebrize_with_inputs(
|
|||
simplify_limit(q)
|
||||
}
|
||||
|
||||
pub use crate::clauses::ConjoiningClauses;
|
||||
pub use clauses::ConjoiningClauses;
|
||||
|
||||
pub use crate::types::{
|
||||
pub use types::{
|
||||
Column, ColumnAlternation, ColumnConstraint, ColumnConstraintOrAlternation, ColumnIntersection,
|
||||
ColumnName, ComputedTable, DatomsColumn, DatomsTable, FulltextColumn, OrderBy, QualifiedAlias,
|
||||
QueryValue, SourceAlias, TableAlias, VariableColumn,
|
||||
|
|
|
@ -32,11 +32,11 @@ pub enum DatomsTable {
|
|||
/// A source of rows that isn't a named table -- typically a subquery or union.
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub enum ComputedTable {
|
||||
Subquery(Box<crate::clauses::ConjoiningClauses>),
|
||||
Subquery(Box<::clauses::ConjoiningClauses>),
|
||||
Union {
|
||||
projection: BTreeSet<Variable>,
|
||||
type_extraction: BTreeSet<Variable>,
|
||||
arms: Vec<crate::clauses::ConjoiningClauses>,
|
||||
arms: Vec<::clauses::ConjoiningClauses>,
|
||||
},
|
||||
NamedValues {
|
||||
names: Vec<Variable>,
|
||||
|
|
|
@ -91,11 +91,11 @@ mod tests {
|
|||
Variable, WhereClause,
|
||||
};
|
||||
|
||||
use crate::clauses::ident;
|
||||
use clauses::ident;
|
||||
|
||||
use super::*;
|
||||
use crate::parse_find_string;
|
||||
use crate::types::FindQuery;
|
||||
use parse_find_string;
|
||||
use types::FindQuery;
|
||||
|
||||
fn value_ident(ns: &str, name: &str) -> PatternValuePlace {
|
||||
Keyword::namespaced(ns, name).into()
|
||||
|
@ -112,7 +112,7 @@ mod tests {
|
|||
match clause {
|
||||
WhereClause::OrJoin(or_join) => {
|
||||
// It's valid: the variables are the same in each branch.
|
||||
validate_or_join(&or_join).unwrap();
|
||||
assert_eq!((), validate_or_join(&or_join).unwrap());
|
||||
assert_eq!(expected_unify, or_join.unify_vars);
|
||||
or_join.clauses
|
||||
}
|
||||
|
@ -254,10 +254,10 @@ mod tests {
|
|||
/// Tests that the top-level form is a valid `not`, returning the clauses.
|
||||
fn valid_not_join(parsed: FindQuery, expected_unify: UnifyVars) -> Vec<WhereClause> {
|
||||
// Filter out all the clauses that are not `not`s.
|
||||
let mut nots = parsed
|
||||
.where_clauses
|
||||
.into_iter()
|
||||
.filter(|x| matches!(x, WhereClause::NotJoin(_)));
|
||||
let mut nots = parsed.where_clauses.into_iter().filter(|x| match x {
|
||||
&WhereClause::NotJoin(_) => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
// There should be only one not clause.
|
||||
let clause = nots.next().unwrap();
|
||||
|
@ -266,7 +266,7 @@ mod tests {
|
|||
match clause {
|
||||
WhereClause::NotJoin(not_join) => {
|
||||
// It's valid: the variables are the same in each branch.
|
||||
validate_not_join(¬_join).unwrap();
|
||||
assert_eq!((), validate_not_join(¬_join).unwrap());
|
||||
assert_eq!(expected_unify, not_join.unify_vars);
|
||||
not_join.clauses
|
||||
}
|
||||
|
@ -368,10 +368,11 @@ mod tests {
|
|||
[?release :release/artists "Pink Floyd"]
|
||||
[?release :release/year 1970])]"#;
|
||||
let parsed = parse_find_string(query).expect("expected successful parse");
|
||||
let mut nots = parsed
|
||||
.where_clauses
|
||||
.iter()
|
||||
.filter(|&x| matches!(*x, WhereClause::NotJoin(_)));
|
||||
let mut nots = parsed.where_clauses.iter().filter(|&x| match *x {
|
||||
WhereClause::NotJoin(_) => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
let clause = nots.next().unwrap().clone();
|
||||
assert_eq!(None, nots.next());
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ use mentat_core::Schema;
|
|||
|
||||
use edn::query::Keyword;
|
||||
|
||||
use crate::utils::{add_attribute, alg, associate_ident};
|
||||
use utils::{add_attribute, alg, associate_ident};
|
||||
|
||||
use mentat_query_algebrizer::Known;
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ use query_algebrizer_traits::errors::{AlgebrizerError, BindingError};
|
|||
|
||||
use mentat_query_algebrizer::{ComputedTable, Known, QueryInputs};
|
||||
|
||||
use crate::utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
||||
use utils::{add_attribute, alg, associate_ident, bails, bails_with_inputs};
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
let mut schema = Schema::default();
|
||||
|
|
|
@ -26,7 +26,7 @@ use query_algebrizer_traits::errors::AlgebrizerError;
|
|||
|
||||
use mentat_query_algebrizer::{EmptyBecause, Known, QueryInputs};
|
||||
|
||||
use crate::utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
||||
use utils::{add_attribute, alg, alg_with_inputs, associate_ident, bails};
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
let mut schema = Schema::default();
|
||||
|
@ -162,7 +162,7 @@ fn test_instant_predicates_accepts_var() {
|
|||
let cc = alg_with_inputs(
|
||||
known,
|
||||
query,
|
||||
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value)]),
|
||||
QueryInputs::with_value_sequence(vec![(instant_var.clone(), instant_value.clone())]),
|
||||
);
|
||||
assert_eq!(
|
||||
cc.known_type(&instant_var).expect("?time is known"),
|
||||
|
@ -202,7 +202,7 @@ fn test_numeric_predicates_accepts_var() {
|
|||
let cc = alg_with_inputs(
|
||||
known,
|
||||
query,
|
||||
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value)]),
|
||||
QueryInputs::with_value_sequence(vec![(numeric_var.clone(), numeric_value.clone())]),
|
||||
);
|
||||
assert_eq!(
|
||||
cc.known_type(&numeric_var).expect("?long is known"),
|
||||
|
|
|
@ -16,7 +16,7 @@ extern crate query_algebrizer_traits;
|
|||
|
||||
mod utils;
|
||||
|
||||
use crate::utils::{alg, bails, SchemaBuilder};
|
||||
use utils::{alg, bails, SchemaBuilder};
|
||||
|
||||
use core_traits::ValueType;
|
||||
|
||||
|
@ -34,7 +34,6 @@ fn prepopulated_schema() -> Schema {
|
|||
.define_simple_attr("test", "uuid", ValueType::Uuid, false)
|
||||
.define_simple_attr("test", "instant", ValueType::Instant, false)
|
||||
.define_simple_attr("test", "ref", ValueType::Ref, false)
|
||||
.define_simple_attr("test", "bytes", ValueType::Bytes, false)
|
||||
.schema
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ use mentat_query_algebrizer::{
|
|||
// These are helpers that tests use to build Schema instances.
|
||||
pub fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i, e);
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
}
|
||||
|
||||
pub fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "query_projector_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -11,12 +11,12 @@ path = "lib.rs"
|
|||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -16,7 +16,7 @@ use mentat_query_algebrizer::{ColumnName, ConjoiningClauses, VariableColumn};
|
|||
|
||||
use mentat_query_sql::{ColumnOrExpression, Expression, Name, ProjectedColumn};
|
||||
|
||||
use crate::errors::{ProjectorError, Result};
|
||||
use errors::{ProjectorError, Result};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum SimpleAggregationOp {
|
||||
|
@ -110,7 +110,7 @@ impl SimpleAggregationOp {
|
|||
String => Ok(the_type),
|
||||
|
||||
// Unordered types.
|
||||
Keyword | Ref | Uuid | Bytes => {
|
||||
Keyword | Ref | Uuid => {
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
|
|
|
@ -17,7 +17,7 @@ use db_traits::errors::DbError;
|
|||
use edn::query::PlainSymbol;
|
||||
use query_pull_traits::errors::PullError;
|
||||
|
||||
use crate::aggregates::SimpleAggregationOp;
|
||||
use aggregates::SimpleAggregationOp;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, ProjectorError>;
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ use mentat_query_projector::query_projection;
|
|||
// These are helpers that tests use to build Schema instances.
|
||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i, e);
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
}
|
||||
|
||||
fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
[package]
|
||||
name = "mentat_query_projector"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[features]
|
||||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
failure = "0.1"
|
||||
indexmap = "1.3"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -50,24 +50,24 @@ use mentat_query_sql::{GroupBy, Projection};
|
|||
pub mod translate;
|
||||
|
||||
mod binding_tuple;
|
||||
pub use crate::binding_tuple::BindingTuple;
|
||||
pub use binding_tuple::BindingTuple;
|
||||
mod project;
|
||||
mod projectors;
|
||||
mod pull;
|
||||
mod relresult;
|
||||
|
||||
use crate::project::{project_elements, ProjectedElements};
|
||||
use project::{project_elements, ProjectedElements};
|
||||
|
||||
pub use crate::project::projected_column_for_var;
|
||||
pub use project::projected_column_for_var;
|
||||
|
||||
pub use crate::projectors::{ConstantProjector, Projector};
|
||||
pub use projectors::{ConstantProjector, Projector};
|
||||
|
||||
use crate::projectors::{
|
||||
use projectors::{
|
||||
CollProjector, CollTwoStagePullProjector, RelProjector, RelTwoStagePullProjector,
|
||||
ScalarProjector, ScalarTwoStagePullProjector, TupleProjector, TupleTwoStagePullProjector,
|
||||
};
|
||||
|
||||
pub use crate::relresult::{RelResult, StructuredRelResult};
|
||||
pub use relresult::{RelResult, StructuredRelResult};
|
||||
|
||||
use query_projector_traits::errors::{ProjectorError, Result};
|
||||
|
||||
|
@ -241,7 +241,7 @@ impl QueryOutput {
|
|||
|
||||
impl QueryResults {
|
||||
pub fn len(&self) -> usize {
|
||||
use crate::QueryResults::*;
|
||||
use QueryResults::*;
|
||||
match *self {
|
||||
Scalar(ref o) => {
|
||||
if o.is_some() {
|
||||
|
@ -263,7 +263,7 @@ impl QueryResults {
|
|||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
use crate::QueryResults::*;
|
||||
use QueryResults::*;
|
||||
match *self {
|
||||
Scalar(ref o) => o.is_none(),
|
||||
Tuple(ref o) => o.is_none(),
|
||||
|
@ -339,7 +339,7 @@ impl TypedIndex {
|
|||
/// This function will return a runtime error if the type tag is unknown, or the value is
|
||||
/// otherwise not convertible by the DB layer.
|
||||
fn lookup<'a>(&self, row: &Row<'a>) -> Result<Binding> {
|
||||
use crate::TypedIndex::*;
|
||||
use TypedIndex::*;
|
||||
|
||||
match *self {
|
||||
Known(value_index, value_type) => {
|
||||
|
@ -403,7 +403,10 @@ trait IsPull {
|
|||
|
||||
impl IsPull for Element {
|
||||
fn is_pull(&self) -> bool {
|
||||
matches!(*self, Element::Pull(_))
|
||||
match *self {
|
||||
Element::Pull(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -522,14 +525,12 @@ fn test_into_tuple() {
|
|||
))
|
||||
);
|
||||
|
||||
match query_output.into_tuple() {
|
||||
match query_output.clone().into_tuple() {
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
assert_eq!((expected, got), (3, 2));
|
||||
}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||
}
|
||||
|
||||
let query_output = QueryOutput {
|
||||
|
@ -543,18 +544,14 @@ fn test_into_tuple() {
|
|||
match query_output.clone().into_tuple() {
|
||||
Ok(None) => {}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
Ok(Some((_, _))) | _ => panic!("expected error"),
|
||||
}
|
||||
|
||||
match query_output.into_tuple() {
|
||||
match query_output.clone().into_tuple() {
|
||||
Err(ProjectorError::UnexpectedResultsTupleLength(expected, got)) => {
|
||||
assert_eq!((expected, got), (3, 2));
|
||||
}
|
||||
// This forces the result type.
|
||||
Ok(Some((_, _, _))) => panic!("expected error"),
|
||||
#[allow(clippy::wildcard_in_or_patterns)]
|
||||
_ => panic!("expected error"),
|
||||
Ok(Some((_, _, _))) | _ => panic!("expected error"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,9 +32,9 @@ use query_projector_traits::aggregates::{
|
|||
|
||||
use query_projector_traits::errors::{ProjectorError, Result};
|
||||
|
||||
use crate::projectors::Projector;
|
||||
use projectors::Projector;
|
||||
|
||||
use crate::pull::{PullIndices, PullOperation, PullTemplate};
|
||||
use pull::{PullIndices, PullOperation, PullTemplate};
|
||||
|
||||
use super::{CombinedProjection, TypedIndex};
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{rusqlite, Element, FindSpec, QueryOutput, QueryResults, Rows, Schema};
|
||||
use {rusqlite, Element, FindSpec, QueryOutput, QueryResults, Rows, Schema};
|
||||
|
||||
use query_projector_traits::errors::Result;
|
||||
|
||||
|
|
|
@ -16,12 +16,12 @@ use mentat_query_pull::Puller;
|
|||
|
||||
use core_traits::Entid;
|
||||
|
||||
use crate::{
|
||||
use {
|
||||
rusqlite, Binding, CombinedProjection, Element, FindSpec, ProjectedElements, QueryOutput,
|
||||
QueryResults, RelResult, Row, Rows, Schema, TypedIndex,
|
||||
};
|
||||
|
||||
use crate::pull::{PullConsumer, PullOperation, PullTemplate};
|
||||
use pull::{PullConsumer, PullOperation, PullTemplate};
|
||||
|
||||
use query_projector_traits::errors::Result;
|
||||
|
||||
|
@ -123,7 +123,7 @@ impl TupleTwoStagePullProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
self.templates
|
||||
.iter()
|
||||
.map(|ti| ti.lookup(row))
|
||||
|
@ -226,7 +226,7 @@ impl RelTwoStagePullProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
let mut count = 0;
|
||||
for binding in self.templates.iter().map(|ti| ti.lookup(&row)) {
|
||||
out.push(binding?);
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{
|
||||
use {
|
||||
rusqlite, Binding, CombinedProjection, Element, FindSpec, ProjectedElements, QueryOutput,
|
||||
QueryResults, RelResult, Row, Rows, Schema, TypedIndex,
|
||||
};
|
||||
|
@ -93,7 +93,7 @@ impl TupleProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
self.templates
|
||||
.iter()
|
||||
.map(|ti| ti.lookup(&row))
|
||||
|
@ -163,7 +163,7 @@ impl RelProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
let mut count = 0;
|
||||
for binding in self.templates.iter().map(|ti| ti.lookup(&row)) {
|
||||
out.push(binding?);
|
||||
|
|
|
@ -22,7 +22,7 @@ use mentat_query_algebrizer::{
|
|||
OrderBy, QualifiedAlias, QueryValue, SourceAlias, TableAlias, VariableColumn,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
use {
|
||||
projected_column_for_var, query_projection, CombinedProjection, ConstantProjector, Projector,
|
||||
};
|
||||
|
||||
|
@ -82,7 +82,6 @@ fn affinity_count(tag: i32) -> usize {
|
|||
.count()
|
||||
}
|
||||
|
||||
#[allow(clippy::ptr_arg)]
|
||||
fn type_constraint(
|
||||
table: &TableAlias,
|
||||
tag: i32,
|
||||
|
@ -370,7 +369,7 @@ fn cc_to_select_query(
|
|||
FromClause::TableList(TableList(tables.collect()))
|
||||
};
|
||||
|
||||
let order = order.map_or(vec![], |vec| vec.into_iter().collect());
|
||||
let order = order.map_or(vec![], |vec| vec.into_iter().map(|o| o).collect());
|
||||
let limit = if cc.empty_because.is_some() {
|
||||
Limit::Fixed(0)
|
||||
} else {
|
||||
|
|
|
@ -46,7 +46,7 @@ macro_rules! var {
|
|||
|
||||
fn associate_ident(schema: &mut Schema, i: Keyword, e: Entid) {
|
||||
schema.entid_map.insert(e, i.clone());
|
||||
schema.ident_map.insert(i, e);
|
||||
schema.ident_map.insert(i.clone(), e);
|
||||
}
|
||||
|
||||
fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "query_pull_traits"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
|
@ -8,8 +8,8 @@ name = "query_pull_traits"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
failure = "0.1"
|
||||
failure_derive = "0.1"
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
[package]
|
||||
name = "mentat_query_pull"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[features]
|
||||
sqlcipher = ["rusqlite/sqlcipher"]
|
||||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
failure = "0.1.1"
|
||||
|
||||
[dependencies.query_pull_traits]
|
||||
path = "../query-pull-traits"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
version = "0.21"
|
||||
features = ["limits"]
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -159,7 +159,7 @@ impl Puller {
|
|||
|
||||
for attr in attributes.iter() {
|
||||
match attr {
|
||||
PullAttributeSpec::Wildcard => {
|
||||
&PullAttributeSpec::Wildcard => {
|
||||
let attribute_ids = schema.attribute_map.keys();
|
||||
for id in attribute_ids {
|
||||
names.insert(*id, lookup_name(id)?);
|
||||
|
@ -167,28 +167,28 @@ impl Puller {
|
|||
}
|
||||
break;
|
||||
}
|
||||
PullAttributeSpec::Attribute(NamedPullAttribute {
|
||||
&PullAttributeSpec::Attribute(NamedPullAttribute {
|
||||
ref attribute,
|
||||
ref alias,
|
||||
}) => {
|
||||
let alias = alias.as_ref().map(|ref r| r.to_value_rc());
|
||||
match attribute {
|
||||
// Handle :db/id.
|
||||
PullConcreteAttribute::Ident(ref i) if i.as_ref() == db_id.as_ref() => {
|
||||
&PullConcreteAttribute::Ident(ref i) if i.as_ref() == db_id.as_ref() => {
|
||||
// We only allow :db/id once.
|
||||
if db_id_alias.is_some() {
|
||||
return Err(PullError::RepeatedDbId);
|
||||
Err(PullError::RepeatedDbId)?
|
||||
}
|
||||
db_id_alias = Some(alias.unwrap_or_else(|| db_id.to_value_rc()));
|
||||
}
|
||||
PullConcreteAttribute::Ident(ref i) => {
|
||||
&PullConcreteAttribute::Ident(ref i) => {
|
||||
if let Some(entid) = schema.get_entid(i) {
|
||||
let name = alias.unwrap_or_else(|| i.to_value_rc());
|
||||
names.insert(entid.into(), name);
|
||||
attrs.insert(entid.into());
|
||||
}
|
||||
}
|
||||
PullConcreteAttribute::Entid(ref entid) => {
|
||||
&PullConcreteAttribute::Entid(ref entid) => {
|
||||
let name = alias.map(Ok).unwrap_or_else(|| lookup_name(entid))?;
|
||||
names.insert(*entid, name);
|
||||
attrs.insert(*entid);
|
||||
|
@ -242,7 +242,7 @@ impl Puller {
|
|||
for e in entities.iter() {
|
||||
let r = maps
|
||||
.entry(*e)
|
||||
.or_insert_with(|| ValueRc::new(StructuredMap::default()));
|
||||
.or_insert(ValueRc::new(StructuredMap::default()));
|
||||
let m = ValueRc::get_mut(r).unwrap();
|
||||
m.insert(alias.clone(), Binding::Scalar(TypedValue::Ref(*e)));
|
||||
}
|
||||
|
@ -257,7 +257,7 @@ impl Puller {
|
|||
if let Some(binding) = cache.binding_for_e(*e) {
|
||||
let r = maps
|
||||
.entry(*e)
|
||||
.or_insert_with(|| ValueRc::new(StructuredMap::default()));
|
||||
.or_insert(ValueRc::new(StructuredMap::default()));
|
||||
|
||||
// Get into the inner map so we can accumulate a value.
|
||||
// We can unwrap here because we created all of these maps…
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
[package]
|
||||
name = "mentat_query_sql"
|
||||
version = "0.0.2"
|
||||
version = "0.0.1"
|
||||
workspace = ".."
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
features = ["limits", "bundled"]
|
||||
[dependencies]
|
||||
rusqlite = "0.21"
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -837,7 +837,7 @@ mod tests {
|
|||
from: FromClause::TableList(TableList(source_aliases)),
|
||||
constraints: vec![
|
||||
Constraint::Infix {
|
||||
op: eq,
|
||||
op: eq.clone(),
|
||||
left: ColumnOrExpression::Column(QualifiedAlias::new(
|
||||
datoms01.clone(),
|
||||
DatomsColumn::Value,
|
||||
|
@ -848,17 +848,17 @@ mod tests {
|
|||
)),
|
||||
},
|
||||
Constraint::Infix {
|
||||
op: eq,
|
||||
op: eq.clone(),
|
||||
left: ColumnOrExpression::Column(QualifiedAlias::new(
|
||||
datoms00,
|
||||
datoms00.clone(),
|
||||
DatomsColumn::Attribute,
|
||||
)),
|
||||
right: ColumnOrExpression::Entid(65537),
|
||||
},
|
||||
Constraint::Infix {
|
||||
op: eq,
|
||||
op: eq.clone(),
|
||||
left: ColumnOrExpression::Column(QualifiedAlias::new(
|
||||
datoms01,
|
||||
datoms01.clone(),
|
||||
DatomsColumn::Attribute,
|
||||
)),
|
||||
right: ColumnOrExpression::Entid(65536),
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
nightly-2023-11-27
|
38
shell.nix
38
shell.nix
|
@ -1,38 +0,0 @@
|
|||
{ pkgs ? import <nixpkgs> {} }:
|
||||
pkgs.mkShell rec {
|
||||
buildInputs = with pkgs; [
|
||||
# Necessary for the openssl-sys crate:
|
||||
pkgs.openssl
|
||||
pkgs.pkg-config
|
||||
# Compiler
|
||||
clang
|
||||
# Replace llvmPackages with llvmPackages_X, where X is the latest LLVM version (at the time of writing, 16)
|
||||
llvmPackages.bintools
|
||||
rustup
|
||||
];
|
||||
RUSTC_VERSION = pkgs.lib.readFile ./rust-toolchain;
|
||||
# https://github.com/rust-lang/rust-bindgen#environment-variables
|
||||
LIBCLANG_PATH = pkgs.lib.makeLibraryPath [ pkgs.llvmPackages_latest.libclang.lib ];
|
||||
shellHook = ''
|
||||
export PATH=$PATH:''${CARGO_HOME:-~/.cargo}/bin
|
||||
export PATH=$PATH:''${RUSTUP_HOME:-~/.rustup}/toolchains/$RUSTC_VERSION-x86_64-unknown-linux-gnu/bin/
|
||||
'';
|
||||
# Add precompiled library to rustc search path
|
||||
RUSTFLAGS = (builtins.map (a: ''-L ${a}/lib'') [
|
||||
# add libraries here (e.g. pkgs.libvmi)
|
||||
]);
|
||||
# Add glibc, clang, glib and other headers to bindgen search path
|
||||
BINDGEN_EXTRA_CLANG_ARGS =
|
||||
# Includes with normal include path
|
||||
(builtins.map (a: ''-I"${a}/include"'') [
|
||||
# add dev libraries here (e.g. pkgs.libvmi.dev)
|
||||
pkgs.glibc.dev
|
||||
])
|
||||
# Includes with special directory paths
|
||||
++ [
|
||||
''-I"${pkgs.llvmPackages_latest.libclang.lib}/lib/clang/${pkgs.llvmPackages_latest.libclang.version}/include"''
|
||||
''-I"${pkgs.glib.dev}/include/glib-2.0"''
|
||||
''-I${pkgs.glib.out}/lib/glib-2.0/include/''
|
||||
];
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue