Rework caching and use it inside the query engine. (#553) r=emily
This puts caching in mentat_db, adds a reverse lookup capability for unique attributes, and populates bidirectional caches with a single SQL cursor walk. Differentiate between begin_read and begin_uncached_read. Note that we still allow toggling within InProgress, because there might be transient local state that makes starting a new transaction impossible.
This commit is contained in:
parent
df3cdb5db6
commit
e33fe71c47
29 changed files with 2025 additions and 644 deletions
32
core/src/cache.rs
Normal file
32
core/src/cache.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2018 Mozilla
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
// this file except in compliance with the License. You may obtain a copy of the
|
||||
// License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed
|
||||
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
/// Cache traits.
|
||||
|
||||
use std::collections::{
|
||||
BTreeSet,
|
||||
};
|
||||
|
||||
use ::{
|
||||
Entid,
|
||||
Schema,
|
||||
TypedValue,
|
||||
};
|
||||
|
||||
pub trait CachedAttributes {
|
||||
fn is_attribute_cached_reverse(&self, entid: Entid) -> bool;
|
||||
fn is_attribute_cached_forward(&self, entid: Entid) -> bool;
|
||||
fn get_values_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&Vec<TypedValue>>;
|
||||
fn get_value_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&TypedValue>;
|
||||
|
||||
/// Reverse lookup.
|
||||
fn get_entid_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<Entid>;
|
||||
fn get_entids_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<&BTreeSet<Entid>>;
|
||||
}
|
|
@ -23,6 +23,7 @@ extern crate serde_derive;
|
|||
extern crate edn;
|
||||
|
||||
pub mod values;
|
||||
mod cache;
|
||||
|
||||
use std::collections::{
|
||||
BTreeMap,
|
||||
|
@ -50,6 +51,8 @@ pub use edn::{
|
|||
Utc,
|
||||
};
|
||||
|
||||
pub use cache::CachedAttributes;
|
||||
|
||||
/// Core types defining a Mentat knowledge base.
|
||||
|
||||
/// Represents one entid in the entid space.
|
||||
|
|
581
db/src/cache.rs
581
db/src/cache.rs
|
@ -8,92 +8,557 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
use std::cmp::Ord;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Debug;
|
||||
use std::collections::{
|
||||
BTreeMap,
|
||||
BTreeSet,
|
||||
};
|
||||
|
||||
use std::iter::Peekable;
|
||||
|
||||
use rusqlite;
|
||||
|
||||
use errors::{
|
||||
Result
|
||||
};
|
||||
use db::{
|
||||
TypedSQLValue,
|
||||
};
|
||||
use mentat_core::{
|
||||
CachedAttributes,
|
||||
Entid,
|
||||
HasSchema,
|
||||
Schema,
|
||||
TypedValue,
|
||||
};
|
||||
|
||||
use db::{
|
||||
TypedSQLValue,
|
||||
};
|
||||
|
||||
use errors::{
|
||||
ErrorKind,
|
||||
Result,
|
||||
};
|
||||
|
||||
pub type Aev = (Entid, Entid, TypedValue);
|
||||
|
||||
fn row_to_aev(row: &rusqlite::Row) -> Aev {
|
||||
let a: Entid = row.get(0);
|
||||
let e: Entid = row.get(1);
|
||||
let value_type_tag: i32 = row.get(3);
|
||||
let v = TypedValue::from_sql_value_pair(row.get(2), value_type_tag).map(|x| x).unwrap();
|
||||
(a, e, v)
|
||||
}
|
||||
|
||||
pub type CacheMap<K, V> = BTreeMap<K, V>;
|
||||
|
||||
pub trait ValueProvider<K, V>: Clone {
|
||||
fn fetch_values<'sqlite>(&mut self, sqlite: &'sqlite rusqlite::Connection) -> Result<CacheMap<K, V>>;
|
||||
pub struct AevRows<'conn> {
|
||||
rows: rusqlite::MappedRows<'conn, fn(&rusqlite::Row) -> Aev>,
|
||||
}
|
||||
|
||||
pub trait Cacheable {
|
||||
type Key;
|
||||
type Value;
|
||||
|
||||
fn cache_values<'sqlite>(&mut self, sqlite: &'sqlite rusqlite::Connection) -> Result<()>;
|
||||
fn get(&self, key: &Self::Key) -> Option<&Self::Value>;
|
||||
/// Unwrap the Result from MappedRows. We could also use this opportunity to map_err it, but
|
||||
/// for now it's convenient to avoid error handling.
|
||||
impl<'conn> Iterator for AevRows<'conn> {
|
||||
type Item = Aev;
|
||||
fn next(&mut self) -> Option<Aev> {
|
||||
self.rows
|
||||
.next()
|
||||
.map(|row_result| row_result.expect("All database contents should be representable"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EagerCache<K, V, VP> where K: Ord, VP: ValueProvider<K, V> {
|
||||
pub cache: CacheMap<K, V>,
|
||||
value_provider: VP,
|
||||
// The behavior of the cache is different for different kinds of attributes:
|
||||
// - cardinality/one doesn't need a vec
|
||||
// - unique/* should have a bijective mapping (reverse lookup)
|
||||
|
||||
trait CardinalityOneCache {
|
||||
fn clear(&mut self);
|
||||
fn set(&mut self, e: Entid, v: TypedValue);
|
||||
fn get(&self, e: Entid) -> Option<&TypedValue>;
|
||||
}
|
||||
|
||||
impl<K, V, VP> EagerCache<K, V, VP> where K: Ord, VP: ValueProvider<K, V> {
|
||||
pub fn new(value_provider: VP) -> Self {
|
||||
EagerCache {
|
||||
cache: CacheMap::new(),
|
||||
value_provider: value_provider,
|
||||
trait CardinalityManyCache {
|
||||
fn clear(&mut self);
|
||||
fn acc(&mut self, e: Entid, v: TypedValue);
|
||||
fn set(&mut self, e: Entid, vs: Vec<TypedValue>);
|
||||
fn get(&self, e: Entid) -> Option<&Vec<TypedValue>>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleValAttributeCache {
|
||||
attr: Entid,
|
||||
e_v: CacheMap<Entid, TypedValue>,
|
||||
}
|
||||
|
||||
impl CardinalityOneCache for SingleValAttributeCache {
|
||||
fn clear(&mut self) {
|
||||
self.e_v.clear();
|
||||
}
|
||||
|
||||
fn set(&mut self, e: Entid, v: TypedValue) {
|
||||
self.e_v.insert(e, v);
|
||||
}
|
||||
|
||||
fn get(&self, e: Entid) -> Option<&TypedValue> {
|
||||
self.e_v.get(&e)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct MultiValAttributeCache {
|
||||
attr: Entid,
|
||||
e_vs: CacheMap<Entid, Vec<TypedValue>>,
|
||||
}
|
||||
|
||||
impl CardinalityManyCache for MultiValAttributeCache {
|
||||
fn clear(&mut self) {
|
||||
self.e_vs.clear();
|
||||
}
|
||||
|
||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||
self.e_vs.entry(e).or_insert(vec![]).push(v)
|
||||
}
|
||||
|
||||
fn set(&mut self, e: Entid, vs: Vec<TypedValue>) {
|
||||
self.e_vs.insert(e, vs);
|
||||
}
|
||||
|
||||
fn get(&self, e: Entid) -> Option<&Vec<TypedValue>> {
|
||||
self.e_vs.get(&e)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct UniqueReverseAttributeCache {
|
||||
attr: Entid,
|
||||
v_e: CacheMap<TypedValue, Entid>,
|
||||
}
|
||||
|
||||
impl UniqueReverseAttributeCache {
|
||||
fn clear(&mut self) {
|
||||
self.v_e.clear();
|
||||
}
|
||||
|
||||
fn set(&mut self, e: Entid, v: TypedValue) {
|
||||
self.v_e.insert(v, e);
|
||||
}
|
||||
|
||||
fn get_e(&self, v: &TypedValue) -> Option<Entid> {
|
||||
self.v_e.get(v).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct NonUniqueReverseAttributeCache {
|
||||
attr: Entid,
|
||||
v_es: CacheMap<TypedValue, BTreeSet<Entid>>,
|
||||
}
|
||||
|
||||
impl NonUniqueReverseAttributeCache {
|
||||
fn clear(&mut self) {
|
||||
self.v_es.clear();
|
||||
}
|
||||
|
||||
fn acc(&mut self, e: Entid, v: TypedValue) {
|
||||
self.v_es.entry(v).or_insert(BTreeSet::new()).insert(e);
|
||||
}
|
||||
|
||||
fn get_es(&self, v: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
||||
self.v_es.get(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AttributeCaches {
|
||||
reverse_cached_attributes: BTreeSet<Entid>,
|
||||
forward_cached_attributes: BTreeSet<Entid>,
|
||||
|
||||
single_vals: BTreeMap<Entid, SingleValAttributeCache>,
|
||||
multi_vals: BTreeMap<Entid, MultiValAttributeCache>,
|
||||
unique_reverse: BTreeMap<Entid, UniqueReverseAttributeCache>,
|
||||
non_unique_reverse: BTreeMap<Entid, NonUniqueReverseAttributeCache>,
|
||||
}
|
||||
|
||||
fn with_aev_iter<F, I>(a: Entid, iter: &mut Peekable<I>, mut f: F)
|
||||
where I: Iterator<Item=Aev>,
|
||||
F: FnMut(Entid, TypedValue) {
|
||||
let check = Some(a);
|
||||
while iter.peek().map(|&(a, _, _)| a) == check {
|
||||
let (_, e, v) = iter.next().unwrap();
|
||||
f(e, v);
|
||||
}
|
||||
}
|
||||
|
||||
fn accumulate_single_val_evs_forward<I, C>(a: Entid, f: &mut C, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityOneCache {
|
||||
with_aev_iter(a, iter, |e, v| f.set(e, v))
|
||||
}
|
||||
|
||||
fn accumulate_multi_val_evs_forward<I, C>(a: Entid, f: &mut C, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityManyCache {
|
||||
with_aev_iter(a, iter, |e, v| f.acc(e, v))
|
||||
}
|
||||
|
||||
fn accumulate_unique_evs_reverse<I>(a: Entid, r: &mut UniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev> {
|
||||
with_aev_iter(a, iter, |e, v| r.set(e, v))
|
||||
}
|
||||
|
||||
fn accumulate_non_unique_evs_reverse<I>(a: Entid, r: &mut NonUniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev> {
|
||||
with_aev_iter(a, iter, |e, v| r.acc(e, v))
|
||||
}
|
||||
|
||||
fn accumulate_single_val_unique_evs_both<I, C>(a: Entid, f: &mut C, r: &mut UniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityOneCache {
|
||||
with_aev_iter(a, iter, |e, v| {
|
||||
f.set(e, v.clone());
|
||||
r.set(e, v);
|
||||
})
|
||||
}
|
||||
|
||||
fn accumulate_multi_val_unique_evs_both<I, C>(a: Entid, f: &mut C, r: &mut UniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityManyCache {
|
||||
with_aev_iter(a, iter, |e, v| {
|
||||
f.acc(e, v.clone());
|
||||
r.set(e, v);
|
||||
})
|
||||
}
|
||||
|
||||
fn accumulate_single_val_non_unique_evs_both<I, C>(a: Entid, f: &mut C, r: &mut NonUniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityOneCache {
|
||||
with_aev_iter(a, iter, |e, v| {
|
||||
f.set(e, v.clone());
|
||||
r.acc(e, v);
|
||||
})
|
||||
}
|
||||
|
||||
fn accumulate_multi_val_non_unique_evs_both<I, C>(a: Entid, f: &mut C, r: &mut NonUniqueReverseAttributeCache, iter: &mut Peekable<I>) where I: Iterator<Item=Aev>, C: CardinalityManyCache {
|
||||
with_aev_iter(a, iter, |e, v| {
|
||||
f.acc(e, v.clone());
|
||||
r.acc(e, v);
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: if an entity or attribute is ever renumbered, the cache will need to be rebuilt.
|
||||
impl AttributeCaches {
|
||||
//
|
||||
// These function names are brief and local.
|
||||
// f = forward; r = reverse; both = both forward and reverse.
|
||||
// s = single-val; m = multi-val.
|
||||
// u = unique; nu = non-unique.
|
||||
// c = cache.
|
||||
#[inline]
|
||||
fn fsc(&mut self, a: Entid) -> &mut SingleValAttributeCache {
|
||||
self.single_vals
|
||||
.entry(a)
|
||||
.or_insert_with(Default::default)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn fmc(&mut self, a: Entid) -> &mut MultiValAttributeCache {
|
||||
self.multi_vals
|
||||
.entry(a)
|
||||
.or_insert_with(Default::default)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ruc(&mut self, a: Entid) -> &mut UniqueReverseAttributeCache {
|
||||
self.unique_reverse
|
||||
.entry(a)
|
||||
.or_insert_with(Default::default)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn rnuc(&mut self, a: Entid) -> &mut NonUniqueReverseAttributeCache {
|
||||
self.non_unique_reverse
|
||||
.entry(a)
|
||||
.or_insert_with(Default::default)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn both_s_u<'r>(&'r mut self, a: Entid) -> (&'r mut SingleValAttributeCache, &'r mut UniqueReverseAttributeCache) {
|
||||
(self.single_vals.entry(a).or_insert_with(Default::default),
|
||||
self.unique_reverse.entry(a).or_insert_with(Default::default))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn both_m_u<'r>(&'r mut self, a: Entid) -> (&'r mut MultiValAttributeCache, &'r mut UniqueReverseAttributeCache) {
|
||||
(self.multi_vals.entry(a).or_insert_with(Default::default),
|
||||
self.unique_reverse.entry(a).or_insert_with(Default::default))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn both_s_nu<'r>(&'r mut self, a: Entid) -> (&'r mut SingleValAttributeCache, &'r mut NonUniqueReverseAttributeCache) {
|
||||
(self.single_vals.entry(a).or_insert_with(Default::default),
|
||||
self.non_unique_reverse.entry(a).or_insert_with(Default::default))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn both_m_nu<'r>(&'r mut self, a: Entid) -> (&'r mut MultiValAttributeCache, &'r mut NonUniqueReverseAttributeCache) {
|
||||
(self.multi_vals.entry(a).or_insert_with(Default::default),
|
||||
self.non_unique_reverse.entry(a).or_insert_with(Default::default))
|
||||
}
|
||||
|
||||
// Process rows in `iter` that all share an attribute with the first. Leaves the iterator
|
||||
// advanced to the first non-matching row.
|
||||
fn accumulate_evs<I>(&mut self, schema: &Schema, iter: &mut Peekable<I>, replace_a: bool) where I: Iterator<Item=Aev> {
|
||||
if let Some(&(a, _, _)) = iter.peek() {
|
||||
if let Some(attribute) = schema.attribute_for_entid(a) {
|
||||
let forward = self.is_attribute_cached_forward(a);
|
||||
let reverse = self.is_attribute_cached_reverse(a);
|
||||
let multi = attribute.multival;
|
||||
let unique = attribute.unique.is_some();
|
||||
match (forward, reverse, multi, unique) {
|
||||
(true, true, true, true) => {
|
||||
let (f, r) = self.both_m_u(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
r.clear();
|
||||
}
|
||||
accumulate_multi_val_unique_evs_both(a, f, r, iter);
|
||||
},
|
||||
(true, true, true, false) => {
|
||||
let (f, r) = self.both_m_nu(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
r.clear();
|
||||
}
|
||||
accumulate_multi_val_non_unique_evs_both(a, f, r, iter);
|
||||
},
|
||||
(true, true, false, true) => {
|
||||
let (f, r) = self.both_s_u(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
r.clear();
|
||||
}
|
||||
accumulate_single_val_unique_evs_both(a, f, r, iter);
|
||||
},
|
||||
(true, true, false, false) => {
|
||||
let (f, r) = self.both_s_nu(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
r.clear();
|
||||
}
|
||||
accumulate_single_val_non_unique_evs_both(a, f, r, iter);
|
||||
},
|
||||
(true, false, true, _) => {
|
||||
let f = self.fmc(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
}
|
||||
accumulate_multi_val_evs_forward(a, f, iter)
|
||||
},
|
||||
(true, false, false, _) => {
|
||||
let f = self.fsc(a);
|
||||
if replace_a {
|
||||
f.clear();
|
||||
}
|
||||
accumulate_single_val_evs_forward(a, f, iter)
|
||||
},
|
||||
(false, true, _, true) => {
|
||||
let r = self.ruc(a);
|
||||
if replace_a {
|
||||
r.clear();
|
||||
}
|
||||
accumulate_unique_evs_reverse(a, r, iter);
|
||||
},
|
||||
(false, true, _, false) => {
|
||||
let r = self.rnuc(a);
|
||||
if replace_a {
|
||||
r.clear();
|
||||
}
|
||||
accumulate_non_unique_evs_reverse(a, r, iter);
|
||||
},
|
||||
(false, false, _, _) => {
|
||||
unreachable!(); // Must be cached in at least one direction!
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, VP> Cacheable for EagerCache<K, V, VP>
|
||||
where K: Ord + Clone + Debug + ::std::hash::Hash,
|
||||
V: Clone,
|
||||
VP: ValueProvider<K, V> {
|
||||
type Key = K;
|
||||
type Value = V;
|
||||
|
||||
fn cache_values<'sqlite>(&mut self, sqlite: &'sqlite rusqlite::Connection) -> Result<()> {
|
||||
// fetch results and add to cache
|
||||
self.cache = self.value_provider.fetch_values(sqlite)?;
|
||||
fn add_to_cache<I>(&mut self, schema: &Schema, mut iter: Peekable<I>, replace_a: bool) -> Result<()> where I: Iterator<Item=Aev> {
|
||||
while iter.peek().is_some() {
|
||||
self.accumulate_evs(schema, &mut iter, replace_a);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, key: &Self::Key) -> Option<&Self::Value> {
|
||||
self.cache.get(&key)
|
||||
fn clear_cache(&mut self) {
|
||||
self.single_vals.clear();
|
||||
self.multi_vals.clear();
|
||||
self.unique_reverse.clear();
|
||||
self.non_unique_reverse.clear();
|
||||
}
|
||||
|
||||
fn unregister_all_attributes(&mut self) {
|
||||
self.reverse_cached_attributes.clear();
|
||||
self.forward_cached_attributes.clear();
|
||||
self.clear_cache();
|
||||
}
|
||||
|
||||
pub fn unregister_attribute<U>(&mut self, attribute: U)
|
||||
where U: Into<Entid> {
|
||||
let a = attribute.into();
|
||||
self.reverse_cached_attributes.remove(&a);
|
||||
self.forward_cached_attributes.remove(&a);
|
||||
self.single_vals.remove(&a);
|
||||
self.multi_vals.remove(&a);
|
||||
self.unique_reverse.remove(&a);
|
||||
self.non_unique_reverse.remove(&a);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AttributeValueProvider {
|
||||
pub attribute: Entid,
|
||||
impl CachedAttributes for AttributeCaches {
|
||||
fn get_values_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&Vec<TypedValue>> {
|
||||
self.values_pairs(schema, attribute)
|
||||
.and_then(|c| c.get(&entid))
|
||||
}
|
||||
|
||||
impl ValueProvider<Entid, Vec<TypedValue>> for AttributeValueProvider {
|
||||
fn fetch_values<'sqlite>(&mut self, sqlite: &'sqlite rusqlite::Connection) -> Result<CacheMap<Entid, Vec<TypedValue>>> {
|
||||
let sql = "SELECT e, v, value_type_tag FROM datoms WHERE a = ? ORDER BY e ASC";
|
||||
fn get_value_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&TypedValue> {
|
||||
self.value_pairs(schema, attribute)
|
||||
.and_then(|c| c.get(&entid))
|
||||
}
|
||||
|
||||
fn is_attribute_cached_reverse(&self, attribute: Entid) -> bool {
|
||||
self.reverse_cached_attributes.contains(&attribute)
|
||||
}
|
||||
|
||||
fn is_attribute_cached_forward(&self, attribute: Entid) -> bool {
|
||||
self.forward_cached_attributes.contains(&attribute)
|
||||
}
|
||||
|
||||
fn get_entid_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<Entid> {
|
||||
if self.is_attribute_cached_reverse(attribute) {
|
||||
self.unique_reverse.get(&attribute).and_then(|c| c.get_e(value))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_entids_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
||||
if self.is_attribute_cached_reverse(attribute) {
|
||||
self.non_unique_reverse.get(&attribute).and_then(|c| c.get_es(value))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AttributeCaches {
|
||||
fn values_pairs<U>(&self, schema: &Schema, attribute: U) -> Option<&BTreeMap<Entid, Vec<TypedValue>>>
|
||||
where U: Into<Entid> {
|
||||
let attribute = attribute.into();
|
||||
schema.attribute_for_entid(attribute)
|
||||
.and_then(|attr|
|
||||
if attr.multival {
|
||||
self.multi_vals
|
||||
.get(&attribute)
|
||||
.map(|c| &c.e_vs)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
fn value_pairs<U>(&self, schema: &Schema, attribute: U) -> Option<&CacheMap<Entid, TypedValue>>
|
||||
where U: Into<Entid> {
|
||||
let attribute = attribute.into();
|
||||
schema.attribute_for_entid(attribute)
|
||||
.and_then(|attr|
|
||||
if attr.multival {
|
||||
None
|
||||
} else {
|
||||
self.single_vals
|
||||
.get(&attribute)
|
||||
.map(|c| &c.e_v)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SQLiteAttributeCache {
|
||||
inner: AttributeCaches,
|
||||
}
|
||||
|
||||
impl SQLiteAttributeCache {
|
||||
pub fn register_forward<U>(&mut self, schema: &Schema, sqlite: &rusqlite::Connection, attribute: U) -> Result<()>
|
||||
where U: Into<Entid> {
|
||||
let a = attribute.into();
|
||||
|
||||
// The attribute must exist!
|
||||
let _ = schema.attribute_for_entid(a).ok_or_else(|| ErrorKind::UnknownAttribute(a))?;
|
||||
self.inner.forward_cached_attributes.insert(a);
|
||||
self.repopulate(schema, sqlite, a)
|
||||
}
|
||||
|
||||
pub fn register_reverse<U>(&mut self, schema: &Schema, sqlite: &rusqlite::Connection, attribute: U) -> Result<()>
|
||||
where U: Into<Entid> {
|
||||
let a = attribute.into();
|
||||
|
||||
// The attribute must exist!
|
||||
let _ = schema.attribute_for_entid(a).ok_or_else(|| ErrorKind::UnknownAttribute(a))?;
|
||||
|
||||
self.inner.reverse_cached_attributes.insert(a);
|
||||
self.repopulate(schema, sqlite, a)
|
||||
}
|
||||
|
||||
pub fn register<U>(&mut self, schema: &Schema, sqlite: &rusqlite::Connection, attribute: U) -> Result<()>
|
||||
where U: Into<Entid> {
|
||||
let a = attribute.into();
|
||||
|
||||
// TODO: reverse-index unique by default?
|
||||
|
||||
self.inner.forward_cached_attributes.insert(a);
|
||||
self.inner.reverse_cached_attributes.insert(a);
|
||||
self.repopulate(schema, sqlite, a)
|
||||
}
|
||||
|
||||
fn repopulate(&mut self, schema: &Schema, sqlite: &rusqlite::Connection, attribute: Entid) -> Result<()> {
|
||||
let sql = "SELECT a, e, v, value_type_tag FROM datoms WHERE a = ? ORDER BY a ASC, e ASC";
|
||||
let args: Vec<&rusqlite::types::ToSql> = vec![&attribute];
|
||||
let mut stmt = sqlite.prepare(sql)?;
|
||||
let value_iter = stmt.query_map(&[&self.attribute], |row| {
|
||||
let entid: Entid = row.get(0);
|
||||
let value_type_tag: i32 = row.get(2);
|
||||
let value = TypedValue::from_sql_value_pair(row.get(1), value_type_tag).map(|x| x).unwrap();
|
||||
(entid, value)
|
||||
}).map_err(|e| e.into());
|
||||
value_iter.map(|v| {
|
||||
v.fold(CacheMap::new(), |mut map, row| {
|
||||
let _ = row.map(|r| {
|
||||
map.entry(r.0).or_insert(vec![]).push(r.1);
|
||||
});
|
||||
map
|
||||
})
|
||||
})
|
||||
let rows = stmt.query_map(&args, row_to_aev as fn(&rusqlite::Row) -> Aev)?;
|
||||
let aevs = AevRows {
|
||||
rows: rows,
|
||||
};
|
||||
self.inner.add_to_cache(schema, aevs.peekable(), true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn unregister<U>(&mut self, attribute: U)
|
||||
where U: Into<Entid> {
|
||||
self.inner.unregister_attribute(attribute);
|
||||
}
|
||||
|
||||
pub fn unregister_all(&mut self) {
|
||||
self.inner.unregister_all_attributes();
|
||||
}
|
||||
}
|
||||
|
||||
impl CachedAttributes for SQLiteAttributeCache {
|
||||
fn get_values_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&Vec<TypedValue>> {
|
||||
self.inner.get_values_for_entid(schema, attribute, entid)
|
||||
}
|
||||
|
||||
fn get_value_for_entid(&self, schema: &Schema, attribute: Entid, entid: Entid) -> Option<&TypedValue> {
|
||||
self.inner.get_value_for_entid(schema, attribute, entid)
|
||||
}
|
||||
|
||||
fn is_attribute_cached_reverse(&self, attribute: Entid) -> bool {
|
||||
self.inner.is_attribute_cached_reverse(attribute)
|
||||
}
|
||||
|
||||
fn is_attribute_cached_forward(&self, attribute: Entid) -> bool {
|
||||
self.inner.is_attribute_cached_forward(attribute)
|
||||
}
|
||||
|
||||
fn get_entids_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<&BTreeSet<Entid>> {
|
||||
self.inner.get_entids_for_value(attribute, value)
|
||||
}
|
||||
|
||||
fn get_entid_for_value(&self, attribute: Entid, value: &TypedValue) -> Option<Entid> {
|
||||
self.inner.get_entid_for_value(attribute, value)
|
||||
}
|
||||
}
|
||||
|
||||
impl SQLiteAttributeCache {
|
||||
/// Intended for use from tests.
|
||||
pub fn values_pairs<U>(&self, schema: &Schema, attribute: U) -> Option<&BTreeMap<Entid, Vec<TypedValue>>>
|
||||
where U: Into<Entid> {
|
||||
self.inner.values_pairs(schema, attribute)
|
||||
}
|
||||
|
||||
/// Intended for use from tests.
|
||||
pub fn value_pairs<U>(&self, schema: &Schema, attribute: U) -> Option<&BTreeMap<Entid, TypedValue>>
|
||||
where U: Into<Entid> {
|
||||
self.inner.value_pairs(schema, attribute)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -916,7 +916,8 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// First, insert all fulltext string values.
|
||||
// `fts_params` reference computed values in `block`.
|
||||
let fts_params: Vec<&ToSql> = block.iter()
|
||||
let fts_params: Vec<&ToSql> =
|
||||
block.iter()
|
||||
.filter(|&&(ref _e, ref _a, ref value, ref _value_type_tag, _added, ref _flags, ref _searchid)| {
|
||||
value.is_some()
|
||||
})
|
||||
|
|
|
@ -87,5 +87,15 @@ error_chain! {
|
|||
description("conflicting datoms in tx")
|
||||
display("conflicting datoms in tx")
|
||||
}
|
||||
|
||||
UnknownAttribute(attr: Entid) {
|
||||
description("unknown attribute")
|
||||
display("unknown attribute for entid: {}", attr)
|
||||
}
|
||||
|
||||
CannotCacheNonUniqueAttributeInReverse(attr: Entid) {
|
||||
description("cannot reverse-cache non-unique attribute")
|
||||
display("cannot reverse-cache non-unique attribute: {}", attr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
|
||||
use mentat_core::{
|
||||
HasSchema,
|
||||
Schema,
|
||||
TypedValue,
|
||||
ValueType,
|
||||
};
|
||||
|
@ -48,9 +47,11 @@ use types::{
|
|||
SourceAlias,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
#[allow(unused_variables)]
|
||||
pub fn apply_fulltext<'s>(&mut self, schema: &'s Schema, where_fn: WhereFn) -> Result<()> {
|
||||
pub fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 3 {
|
||||
bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 3));
|
||||
}
|
||||
|
@ -96,6 +97,8 @@ impl ConjoiningClauses {
|
|||
_ => bail!(ErrorKind::InvalidArgument(where_fn.operator.clone(), "source variable".into(), 0)),
|
||||
}
|
||||
|
||||
let schema = known.schema;
|
||||
|
||||
// TODO: accept placeholder and set of attributes. Alternately, consider putting the search
|
||||
// term before the attribute arguments and collect the (variadic) attributes into a set.
|
||||
// let a: Entid = self.resolve_attribute_argument(&where_fn.operator, 1, args.next().unwrap())?;
|
||||
|
@ -130,7 +133,7 @@ impl ConjoiningClauses {
|
|||
if !attribute.fulltext {
|
||||
// We can never get results from a non-fulltext attribute!
|
||||
println!("Can't run fulltext on non-fulltext attribute {}.", a);
|
||||
self.mark_known_empty(EmptyBecause::InvalidAttributeEntid(a));
|
||||
self.mark_known_empty(EmptyBecause::NonFulltextAttribute(a));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -258,6 +261,7 @@ mod testing {
|
|||
|
||||
use mentat_core::{
|
||||
Attribute,
|
||||
Schema,
|
||||
ValueType,
|
||||
};
|
||||
|
||||
|
@ -294,8 +298,10 @@ mod testing {
|
|||
..Default::default()
|
||||
});
|
||||
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
let op = PlainSymbol::new("fulltext");
|
||||
cc.apply_fulltext(&schema, WhereFn {
|
||||
cc.apply_fulltext(known, WhereFn {
|
||||
operator: op,
|
||||
args: vec![
|
||||
FnArg::SrcVar(SrcVar::DefaultSrc),
|
||||
|
@ -353,7 +359,7 @@ mod testing {
|
|||
|
||||
let mut cc = ConjoiningClauses::default();
|
||||
let op = PlainSymbol::new("fulltext");
|
||||
cc.apply_fulltext(&schema, WhereFn {
|
||||
cc.apply_fulltext(known, WhereFn {
|
||||
operator: op,
|
||||
args: vec![
|
||||
FnArg::SrcVar(SrcVar::DefaultSrc),
|
||||
|
|
|
@ -43,6 +43,8 @@ use types::{
|
|||
VariableColumn,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
/// Take a relation: a matrix of values which will successively bind to named variables of
|
||||
/// the provided types.
|
||||
|
@ -113,7 +115,7 @@ impl ConjoiningClauses {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_ground<'s>(&mut self, schema: &'s Schema, where_fn: WhereFn) -> Result<()> {
|
||||
pub fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
if where_fn.args.len() != 1 {
|
||||
bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 1));
|
||||
}
|
||||
|
@ -130,6 +132,8 @@ impl ConjoiningClauses {
|
|||
bail!(ErrorKind::InvalidBinding(where_fn.operator.clone(), BindingError::RepeatedBoundVariable));
|
||||
}
|
||||
|
||||
let schema = known.schema;
|
||||
|
||||
// Scalar and tuple bindings are a little special: because there's only one value,
|
||||
// we can immediately substitute the value as a known value in the CC, additionally
|
||||
// generating a WHERE clause if columns have already been bound.
|
||||
|
@ -350,10 +354,12 @@ mod testing {
|
|||
..Default::default()
|
||||
});
|
||||
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
// It's awkward enough to write these expansions that we give the details for the simplest
|
||||
// case only. See the tests of the translator for more extensive (albeit looser) coverage.
|
||||
let op = PlainSymbol::new("ground");
|
||||
cc.apply_ground(&schema, WhereFn {
|
||||
cc.apply_ground(known, WhereFn {
|
||||
operator: op,
|
||||
args: vec![
|
||||
FnArg::EntidOrInteger(10),
|
||||
|
|
|
@ -13,9 +13,12 @@ use std::cmp;
|
|||
use std::collections::{
|
||||
BTreeMap,
|
||||
BTreeSet,
|
||||
VecDeque,
|
||||
};
|
||||
|
||||
use std::collections::btree_map::Entry;
|
||||
use std::collections::btree_map::{
|
||||
Entry,
|
||||
};
|
||||
|
||||
use std::fmt::{
|
||||
Debug,
|
||||
|
@ -37,14 +40,15 @@ use mentat_core::counter::RcCounter;
|
|||
|
||||
use mentat_query::{
|
||||
NamespacedKeyword,
|
||||
NonIntegerConstant,
|
||||
Pattern,
|
||||
PatternNonValuePlace,
|
||||
PatternValuePlace,
|
||||
Variable,
|
||||
WhereClause,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
use mentat_query::{
|
||||
PatternNonValuePlace,
|
||||
};
|
||||
|
||||
use errors::{
|
||||
Error,
|
||||
ErrorKind,
|
||||
|
@ -59,7 +63,11 @@ use types::{
|
|||
DatomsColumn,
|
||||
DatomsTable,
|
||||
EmptyBecause,
|
||||
EvolvedNonValuePlace,
|
||||
EvolvedPattern,
|
||||
EvolvedValuePlace,
|
||||
FulltextColumn,
|
||||
PlaceOrEmpty,
|
||||
QualifiedAlias,
|
||||
QueryValue,
|
||||
SourceAlias,
|
||||
|
@ -85,6 +93,8 @@ use validate::{
|
|||
|
||||
pub use self::inputs::QueryInputs;
|
||||
|
||||
use Known;
|
||||
|
||||
// We do this a lot for errors.
|
||||
trait RcCloned<T> {
|
||||
fn cloned(&self) -> T;
|
||||
|
@ -146,6 +156,8 @@ impl<K: Clone + Ord, V: Clone> Intersection<K> for BTreeMap<K, V> {
|
|||
}
|
||||
}
|
||||
|
||||
type VariableBindings = BTreeMap<Variable, TypedValue>;
|
||||
|
||||
/// A `ConjoiningClauses` (CC) is a collection of clauses that are combined with `JOIN`.
|
||||
/// The topmost form in a query is a `ConjoiningClauses`.
|
||||
///
|
||||
|
@ -205,7 +217,7 @@ pub struct ConjoiningClauses {
|
|||
///
|
||||
/// and for `?val` provide `TypedValue::String("foo".to_string())`, the query will be known at
|
||||
/// algebrizing time to be empty.
|
||||
value_bindings: BTreeMap<Variable, TypedValue>,
|
||||
value_bindings: VariableBindings,
|
||||
|
||||
/// A map from var to type. Whenever a var maps unambiguously to two different types, it cannot
|
||||
/// yield results, so we don't represent that case here. If a var isn't present in the map, it
|
||||
|
@ -535,6 +547,23 @@ impl ConjoiningClauses {
|
|||
self.narrow_types_for_var(variable, ValueTypeSet::of_numeric_types());
|
||||
}
|
||||
|
||||
pub fn can_constrain_var_to_type(&self, var: &Variable, this_type: ValueType) -> Option<EmptyBecause> {
|
||||
self.can_constrain_var_to_types(var, ValueTypeSet::of_one(this_type))
|
||||
}
|
||||
|
||||
fn can_constrain_var_to_types(&self, var: &Variable, these_types: ValueTypeSet) -> Option<EmptyBecause> {
|
||||
if let Some(existing) = self.known_types.get(var) {
|
||||
if existing.intersection(&these_types).is_empty() {
|
||||
return Some(EmptyBecause::TypeMismatch {
|
||||
var: var.clone(),
|
||||
existing: existing.clone(),
|
||||
desired: these_types,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Constrains the var if there's no existing type.
|
||||
/// Marks as known-empty if it's impossible for this type to apply because there's a conflicting
|
||||
/// type already known.
|
||||
|
@ -673,17 +702,17 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
/// Ensure that the given place has the correct types to be a tx-id.
|
||||
fn constrain_to_tx(&mut self, tx: &PatternNonValuePlace) {
|
||||
fn constrain_to_tx(&mut self, tx: &EvolvedNonValuePlace) {
|
||||
self.constrain_to_ref(tx);
|
||||
}
|
||||
|
||||
/// Ensure that the given place can be an entity, and is congruent with existing types.
|
||||
/// This is used for `entity` and `attribute` places in a pattern.
|
||||
fn constrain_to_ref(&mut self, value: &PatternNonValuePlace) {
|
||||
fn constrain_to_ref(&mut self, value: &EvolvedNonValuePlace) {
|
||||
// If it's a variable, record that it has the right type.
|
||||
// Ident or attribute resolution errors (the only other check we need to do) will be done
|
||||
// by the caller.
|
||||
if let &PatternNonValuePlace::Variable(ref v) = value {
|
||||
if let &EvolvedNonValuePlace::Variable(ref v) = value {
|
||||
self.constrain_var_to_type(v.clone(), ValueType::Ref)
|
||||
}
|
||||
}
|
||||
|
@ -705,17 +734,17 @@ impl ConjoiningClauses {
|
|||
schema.get_entid(&ident)
|
||||
}
|
||||
|
||||
fn table_for_attribute_and_value<'s, 'a>(&self, attribute: &'s Attribute, value: &'a PatternValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
fn table_for_attribute_and_value<'s, 'a>(&self, attribute: &'s Attribute, value: &'a EvolvedValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
if attribute.fulltext {
|
||||
match value {
|
||||
&PatternValuePlace::Placeholder =>
|
||||
&EvolvedValuePlace::Placeholder =>
|
||||
Ok(DatomsTable::Datoms), // We don't need the value.
|
||||
|
||||
// TODO: an existing non-string binding can cause this pattern to fail.
|
||||
&PatternValuePlace::Variable(_) =>
|
||||
&EvolvedValuePlace::Variable(_) =>
|
||||
Ok(DatomsTable::AllDatoms),
|
||||
|
||||
&PatternValuePlace::Constant(NonIntegerConstant::Text(_)) =>
|
||||
&EvolvedValuePlace::Value(TypedValue::String(_)) =>
|
||||
Ok(DatomsTable::AllDatoms),
|
||||
|
||||
_ => {
|
||||
|
@ -729,7 +758,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
|
||||
fn table_for_unknown_attribute<'s, 'a>(&self, value: &'a PatternValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
fn table_for_unknown_attribute<'s, 'a>(&self, value: &'a EvolvedValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
// If the value is known to be non-textual, we can simply use the regular datoms
|
||||
// table (TODO: and exclude on `index_fulltext`!).
|
||||
//
|
||||
|
@ -742,7 +771,7 @@ impl ConjoiningClauses {
|
|||
match value {
|
||||
// TODO: see if the variable is projected, aggregated, or compared elsewhere in
|
||||
// the query. If it's not, we don't need to use all_datoms here.
|
||||
&PatternValuePlace::Variable(ref v) => {
|
||||
&EvolvedValuePlace::Variable(ref v) => {
|
||||
// If `required_types` and `known_types` don't exclude strings,
|
||||
// we need to query `all_datoms`.
|
||||
if self.required_types.get(v).map_or(true, |s| s.contains(ValueType::String)) &&
|
||||
|
@ -752,7 +781,7 @@ impl ConjoiningClauses {
|
|||
DatomsTable::Datoms
|
||||
}
|
||||
}
|
||||
&PatternValuePlace::Constant(NonIntegerConstant::Text(_)) =>
|
||||
&EvolvedValuePlace::Value(TypedValue::String(_)) =>
|
||||
DatomsTable::AllDatoms,
|
||||
_ =>
|
||||
DatomsTable::Datoms,
|
||||
|
@ -763,21 +792,17 @@ impl ConjoiningClauses {
|
|||
/// If the attribute input or value binding doesn't name an attribute, or doesn't name an
|
||||
/// attribute that is congruent with the supplied value, we return an `EmptyBecause`.
|
||||
/// The caller is responsible for marking the CC as known-empty if this is a fatal failure.
|
||||
fn table_for_places<'s, 'a>(&self, schema: &'s Schema, attribute: &'a PatternNonValuePlace, value: &'a PatternValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
fn table_for_places<'s, 'a>(&self, schema: &'s Schema, attribute: &'a EvolvedNonValuePlace, value: &'a EvolvedValuePlace) -> ::std::result::Result<DatomsTable, EmptyBecause> {
|
||||
match attribute {
|
||||
&PatternNonValuePlace::Ident(ref kw) =>
|
||||
schema.attribute_for_ident(kw)
|
||||
.ok_or_else(|| EmptyBecause::InvalidAttributeIdent(kw.cloned()))
|
||||
.and_then(|(attribute, _entid)| self.table_for_attribute_and_value(attribute, value)),
|
||||
&PatternNonValuePlace::Entid(id) =>
|
||||
&EvolvedNonValuePlace::Entid(id) =>
|
||||
schema.attribute_for_entid(id)
|
||||
.ok_or_else(|| EmptyBecause::InvalidAttributeEntid(id))
|
||||
.and_then(|attribute| self.table_for_attribute_and_value(attribute, value)),
|
||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||
// #278.
|
||||
&PatternNonValuePlace::Placeholder =>
|
||||
&EvolvedNonValuePlace::Placeholder =>
|
||||
self.table_for_unknown_attribute(value),
|
||||
&PatternNonValuePlace::Variable(ref v) => {
|
||||
&EvolvedNonValuePlace::Variable(ref v) => {
|
||||
// See if we have a binding for the variable.
|
||||
match self.bound_value(v) {
|
||||
// TODO: In a prepared context, defer this decision until a second algebrizing phase.
|
||||
|
@ -786,7 +811,7 @@ impl ConjoiningClauses {
|
|||
self.table_for_unknown_attribute(value),
|
||||
Some(TypedValue::Ref(id)) =>
|
||||
// Recurse: it's easy.
|
||||
self.table_for_places(schema, &PatternNonValuePlace::Entid(id), value),
|
||||
self.table_for_places(schema, &EvolvedNonValuePlace::Entid(id), value),
|
||||
Some(TypedValue::Keyword(ref kw)) =>
|
||||
// Don't recurse: avoid needing to clone the keyword.
|
||||
schema.attribute_for_ident(kw)
|
||||
|
@ -815,7 +840,7 @@ impl ConjoiningClauses {
|
|||
/// This is a mutating method because it mutates the aliaser function!
|
||||
/// Note that if this function decides that a pattern cannot match, it will flip
|
||||
/// `empty_because`.
|
||||
fn alias_table<'s, 'a>(&mut self, schema: &'s Schema, pattern: &'a Pattern) -> Option<SourceAlias> {
|
||||
fn alias_table<'s, 'a>(&mut self, schema: &'s Schema, pattern: &'a EvolvedPattern) -> Option<SourceAlias> {
|
||||
self.table_for_places(schema, &pattern.attribute, &pattern.value)
|
||||
.map_err(|reason| {
|
||||
self.mark_known_empty(reason);
|
||||
|
@ -833,25 +858,22 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_attribute<'s, 'a>(&self, schema: &'s Schema, pattern: &'a Pattern) -> Option<&'s Attribute> {
|
||||
fn get_attribute<'s, 'a>(&self, schema: &'s Schema, pattern: &'a EvolvedPattern) -> Option<&'s Attribute> {
|
||||
match pattern.attribute {
|
||||
PatternNonValuePlace::Entid(id) =>
|
||||
EvolvedNonValuePlace::Entid(id) =>
|
||||
// We know this one is known if the attribute lookup succeeds…
|
||||
schema.attribute_for_entid(id),
|
||||
PatternNonValuePlace::Ident(ref kw) =>
|
||||
schema.attribute_for_ident(kw).map(|(a, _id)| a),
|
||||
PatternNonValuePlace::Variable(ref var) =>
|
||||
EvolvedNonValuePlace::Variable(ref var) =>
|
||||
// If the pattern has a variable, we've already determined that the binding -- if
|
||||
// any -- is acceptable and yields a table. Here, simply look to see if it names
|
||||
// an attribute so we can find out the type.
|
||||
self.value_bindings.get(var)
|
||||
.and_then(|val| self.get_attribute_for_value(schema, val)),
|
||||
_ =>
|
||||
None,
|
||||
EvolvedNonValuePlace::Placeholder => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_value_type<'s, 'a>(&self, schema: &'s Schema, pattern: &'a Pattern) -> Option<ValueType> {
|
||||
fn get_value_type<'s, 'a>(&self, schema: &'s Schema, pattern: &'a EvolvedPattern) -> Option<ValueType> {
|
||||
self.get_attribute(schema, pattern).map(|a| a.value_type)
|
||||
}
|
||||
}
|
||||
|
@ -984,43 +1006,83 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
impl ConjoiningClauses {
|
||||
pub fn apply_clauses(&mut self, schema: &Schema, where_clauses: Vec<WhereClause>) -> Result<()> {
|
||||
fn apply_evolved_patterns(&mut self, known: Known, mut patterns: VecDeque<EvolvedPattern>) -> Result<()> {
|
||||
while let Some(pattern) = patterns.pop_front() {
|
||||
match self.evolve_pattern(known, pattern) {
|
||||
PlaceOrEmpty::Place(re_evolved) => self.apply_pattern(known, re_evolved),
|
||||
PlaceOrEmpty::Empty(because) => {
|
||||
self.mark_known_empty(because);
|
||||
patterns.clear();
|
||||
},
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_clauses(&mut self, known: Known, where_clauses: Vec<WhereClause>) -> Result<()> {
|
||||
// We apply (top level) type predicates first as an optimization.
|
||||
for clause in where_clauses.iter() {
|
||||
if let &WhereClause::TypeAnnotation(ref anno) = clause {
|
||||
self.apply_type_anno(anno)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Then we apply everything else.
|
||||
// Note that we collect contiguous runs of patterns so that we can evolve them
|
||||
// together to take advantage of mutual partial evaluation.
|
||||
let mut remaining = where_clauses.len();
|
||||
let mut patterns: VecDeque<EvolvedPattern> = VecDeque::with_capacity(remaining);
|
||||
for clause in where_clauses {
|
||||
remaining -= 1;
|
||||
if let &WhereClause::TypeAnnotation(_) = &clause {
|
||||
continue;
|
||||
}
|
||||
self.apply_clause(schema, clause)?;
|
||||
match clause {
|
||||
WhereClause::Pattern(p) => {
|
||||
match self.make_evolved_pattern(known, p) {
|
||||
PlaceOrEmpty::Place(evolved) => patterns.push_back(evolved),
|
||||
PlaceOrEmpty::Empty(because) => {
|
||||
self.mark_known_empty(because);
|
||||
return Ok(());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
if !patterns.is_empty() {
|
||||
self.apply_evolved_patterns(known, patterns)?;
|
||||
patterns = VecDeque::with_capacity(remaining);
|
||||
}
|
||||
self.apply_clause(known, clause)?;
|
||||
},
|
||||
}
|
||||
}
|
||||
self.apply_evolved_patterns(known, patterns)
|
||||
}
|
||||
|
||||
// This is here, rather than in `lib.rs`, because it's recursive: `or` can contain `or`,
|
||||
// and so on.
|
||||
pub fn apply_clause(&mut self, schema: &Schema, where_clause: WhereClause) -> Result<()> {
|
||||
pub fn apply_clause(&mut self, known: Known, where_clause: WhereClause) -> Result<()> {
|
||||
match where_clause {
|
||||
WhereClause::Pattern(p) => {
|
||||
self.apply_pattern(schema, p);
|
||||
match self.make_evolved_pattern(known, p) {
|
||||
PlaceOrEmpty::Place(evolved) => self.apply_pattern(known, evolved),
|
||||
PlaceOrEmpty::Empty(because) => self.mark_known_empty(because),
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
WhereClause::Pred(p) => {
|
||||
self.apply_predicate(schema, p)
|
||||
self.apply_predicate(known, p)
|
||||
},
|
||||
WhereClause::WhereFn(f) => {
|
||||
self.apply_where_fn(schema, f)
|
||||
self.apply_where_fn(known, f)
|
||||
},
|
||||
WhereClause::OrJoin(o) => {
|
||||
validate_or_join(&o)?;
|
||||
self.apply_or_join(schema, o)
|
||||
self.apply_or_join(known, o)
|
||||
},
|
||||
WhereClause::NotJoin(n) => {
|
||||
validate_not_join(&n)?;
|
||||
self.apply_not_join(schema, n)
|
||||
self.apply_not_join(known, n)
|
||||
},
|
||||
WhereClause::TypeAnnotation(anno) => {
|
||||
self.apply_type_anno(&anno)
|
||||
|
|
|
@ -8,8 +8,6 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
use mentat_core::Schema;
|
||||
|
||||
use mentat_query::{
|
||||
ContainsVariables,
|
||||
NotJoin,
|
||||
|
@ -28,8 +26,10 @@ use types::{
|
|||
ComputedTable,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
impl ConjoiningClauses {
|
||||
pub fn apply_not_join(&mut self, schema: &Schema, not_join: NotJoin) -> Result<()> {
|
||||
pub fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
|
||||
let unified = match not_join.unify_vars {
|
||||
UnifyVars::Implicit => not_join.collect_mentioned_variables(),
|
||||
UnifyVars::Explicit(vs) => vs,
|
||||
|
@ -49,7 +49,7 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
|
||||
template.apply_clauses(&schema, not_join.clauses)?;
|
||||
template.apply_clauses(known, not_join.clauses)?;
|
||||
|
||||
if template.is_known_empty() {
|
||||
return Ok(());
|
||||
|
@ -70,6 +70,12 @@ impl ConjoiningClauses {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
// If we don't impose any constraints on the output, we might as well
|
||||
// not exist.
|
||||
if template.wheres.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let subquery = ComputedTable::Subquery(template);
|
||||
|
||||
self.wheres.add_intersection(ColumnConstraint::NotExists(subquery));
|
||||
|
@ -133,13 +139,15 @@ mod testing {
|
|||
};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let known = Known::for_schema(schema);
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
algebrize(schema.into(), parsed).expect("algebrize failed").cc
|
||||
algebrize(known, parsed).expect("algebrize failed").cc
|
||||
}
|
||||
|
||||
fn alg_with_inputs(schema: &Schema, input: &str, inputs: QueryInputs) -> ConjoiningClauses {
|
||||
let known = Known::for_schema(schema);
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
algebrize_with_inputs(schema.into(), parsed, 0, inputs).expect("algebrize failed").cc
|
||||
algebrize_with_inputs(known, parsed, 0, inputs).expect("algebrize failed").cc
|
||||
}
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
|
@ -292,7 +300,7 @@ mod testing {
|
|||
let age = QueryValue::Entid(68);
|
||||
|
||||
let john = QueryValue::TypedValue(TypedValue::typed_string("John"));
|
||||
let eleven = QueryValue::PrimitiveLong(11);
|
||||
let eleven = QueryValue::TypedValue(TypedValue::Long(11));
|
||||
|
||||
let mut subquery = ConjoiningClauses::default();
|
||||
subquery.from = vec![SourceAlias(DatomsTable::Datoms, d3)];
|
||||
|
@ -541,12 +549,13 @@ mod testing {
|
|||
#[test]
|
||||
fn test_unbound_var_fails() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x
|
||||
:in ?y
|
||||
:where (not [?x :foo/knows ?y])]"#;
|
||||
let parsed = parse_find_string(query).expect("parse failed");
|
||||
let err = algebrize(&schema, parsed).err();
|
||||
let err = algebrize(known, parsed).err();
|
||||
assert!(err.is_some());
|
||||
match err.unwrap() {
|
||||
Error(ErrorKind::UnboundVariable(var), _) => { assert_eq!(var, PlainSymbol("?x".to_string())); },
|
||||
|
|
|
@ -15,7 +15,6 @@ use std::collections::{
|
|||
};
|
||||
|
||||
use mentat_core::{
|
||||
Schema,
|
||||
ValueTypeSet,
|
||||
};
|
||||
|
||||
|
@ -46,11 +45,15 @@ use types::{
|
|||
ComputedTable,
|
||||
DatomsTable,
|
||||
EmptyBecause,
|
||||
EvolvedPattern,
|
||||
PlaceOrEmpty,
|
||||
QualifiedAlias,
|
||||
SourceAlias,
|
||||
VariableColumn,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
/// Return true if both left and right are the same variable or both are non-variable.
|
||||
fn _simply_matches_place(left: &PatternNonValuePlace, right: &PatternNonValuePlace) -> bool {
|
||||
match (left, right) {
|
||||
|
@ -88,21 +91,21 @@ pub enum DeconstructedOrJoin {
|
|||
|
||||
/// Application of `or`. Note that this is recursive!
|
||||
impl ConjoiningClauses {
|
||||
fn apply_or_where_clause(&mut self, schema: &Schema, clause: OrWhereClause) -> Result<()> {
|
||||
fn apply_or_where_clause(&mut self, known: Known, clause: OrWhereClause) -> Result<()> {
|
||||
match clause {
|
||||
OrWhereClause::Clause(clause) => self.apply_clause(schema, clause),
|
||||
OrWhereClause::Clause(clause) => self.apply_clause(known, clause),
|
||||
|
||||
// A query might be:
|
||||
// [:find ?x :where (or (and [?x _ 5] [?x :foo/bar 7]))]
|
||||
// which is equivalent to dropping the `or` _and_ the `and`!
|
||||
OrWhereClause::And(clauses) => {
|
||||
self.apply_clauses(schema, clauses)?;
|
||||
self.apply_clauses(known, clauses)?;
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_or_join(&mut self, schema: &Schema, mut or_join: OrJoin) -> Result<()> {
|
||||
pub fn apply_or_join(&mut self, known: Known, mut or_join: OrJoin) -> Result<()> {
|
||||
// Simple optimization. Empty `or` clauses disappear. Unit `or` clauses
|
||||
// are equivalent to just the inner clause.
|
||||
|
||||
|
@ -113,7 +116,7 @@ impl ConjoiningClauses {
|
|||
0 => Ok(()),
|
||||
1 if or_join.is_fully_unified() => {
|
||||
let clause = or_join.clauses.pop().expect("there's a clause");
|
||||
self.apply_or_where_clause(schema, clause)
|
||||
self.apply_or_where_clause(known, clause)
|
||||
},
|
||||
// Either there's only one clause pattern, and it's not fully unified, or we
|
||||
// have multiple clauses.
|
||||
|
@ -122,7 +125,7 @@ impl ConjoiningClauses {
|
|||
// Notably, this clause might be an `and`, making this a complex pattern, so we can't
|
||||
// necessarily rewrite it in place.
|
||||
// In the latter case, we still need to do a bit more work.
|
||||
_ => self.apply_non_trivial_or_join(schema, or_join),
|
||||
_ => self.apply_non_trivial_or_join(known, or_join),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,7 +150,7 @@ impl ConjoiningClauses {
|
|||
/// - No patterns can match: the enclosing CC is known-empty.
|
||||
/// - Some patterns can't match: they are discarded.
|
||||
/// - Only one pattern can match: the `or` can be simplified away.
|
||||
fn deconstruct_or_join(&self, schema: &Schema, or_join: OrJoin) -> DeconstructedOrJoin {
|
||||
fn deconstruct_or_join(&self, known: Known, or_join: OrJoin) -> DeconstructedOrJoin {
|
||||
// If we have explicit non-maximal unify-vars, we *can't* simply run this as a
|
||||
// single pattern --
|
||||
// ```
|
||||
|
@ -172,7 +175,7 @@ impl ConjoiningClauses {
|
|||
// It's safe to simply 'leak' the entire clause, because we know every var in it is
|
||||
// supposed to unify with the enclosing form.
|
||||
1 => DeconstructedOrJoin::Unit(or_join.clauses.into_iter().next().unwrap()),
|
||||
_ => self._deconstruct_or_join(schema, or_join),
|
||||
_ => self._deconstruct_or_join(known, or_join),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,7 +186,7 @@ impl ConjoiningClauses {
|
|||
///
|
||||
/// See the description of `deconstruct_or_join` for more details. This method expects
|
||||
/// to be called _only_ by `deconstruct_or_join`.
|
||||
fn _deconstruct_or_join(&self, schema: &Schema, or_join: OrJoin) -> DeconstructedOrJoin {
|
||||
fn _deconstruct_or_join(&self, known: Known, or_join: OrJoin) -> DeconstructedOrJoin {
|
||||
// Preconditions enforced by `deconstruct_or_join`.
|
||||
// Note that a fully unified explicit `or-join` can arrive here, and might leave as
|
||||
// an implicit `or`.
|
||||
|
@ -215,8 +218,19 @@ impl ConjoiningClauses {
|
|||
// Compute the table for the pattern. If we can't figure one out, it means
|
||||
// the pattern cannot succeed; we drop it.
|
||||
// Inside an `or` it's not a failure for a pattern to be unable to match, which
|
||||
// manifests as a table being unable to be found.
|
||||
let table = self.table_for_places(schema, &p.attribute, &p.value);
|
||||
use self::PlaceOrEmpty::*;
|
||||
let table = match self.make_evolved_attribute(&known, p.attribute.clone()) {
|
||||
Place((aaa, value_type)) => {
|
||||
match self.make_evolved_value(&known, value_type, p.value.clone()) {
|
||||
Place(v) => {
|
||||
self.table_for_places(known.schema, &aaa, &v)
|
||||
},
|
||||
Empty(e) => Err(e),
|
||||
}
|
||||
},
|
||||
Empty(e) => Err(e),
|
||||
};
|
||||
|
||||
match table {
|
||||
Err(e) => {
|
||||
empty_because = Some(e);
|
||||
|
@ -290,8 +304,8 @@ impl ConjoiningClauses {
|
|||
}
|
||||
}
|
||||
|
||||
fn apply_non_trivial_or_join(&mut self, schema: &Schema, or_join: OrJoin) -> Result<()> {
|
||||
match self.deconstruct_or_join(schema, or_join) {
|
||||
fn apply_non_trivial_or_join(&mut self, known: Known, or_join: OrJoin) -> Result<()> {
|
||||
match self.deconstruct_or_join(known, or_join) {
|
||||
DeconstructedOrJoin::KnownSuccess => {
|
||||
// The pattern came to us empty -- `(or)`. Do nothing.
|
||||
Ok(())
|
||||
|
@ -304,22 +318,29 @@ impl ConjoiningClauses {
|
|||
},
|
||||
DeconstructedOrJoin::Unit(clause) => {
|
||||
// There was only one clause. We're unifying all variables, so we can just apply here.
|
||||
self.apply_or_where_clause(schema, clause)
|
||||
self.apply_or_where_clause(known, clause)
|
||||
},
|
||||
DeconstructedOrJoin::UnitPattern(pattern) => {
|
||||
// Same, but simpler.
|
||||
self.apply_pattern(schema, pattern);
|
||||
match self.make_evolved_pattern(known, pattern) {
|
||||
PlaceOrEmpty::Empty(e) => {
|
||||
self.mark_known_empty(e);
|
||||
},
|
||||
PlaceOrEmpty::Place(pattern) => {
|
||||
self.apply_pattern(known, pattern);
|
||||
},
|
||||
};
|
||||
Ok(())
|
||||
},
|
||||
DeconstructedOrJoin::Simple(patterns, mentioned_vars) => {
|
||||
// Hooray! Fully unified and plain ol' patterns that all use the same table.
|
||||
// Go right ahead and produce a set of constraint alternations that we can collect,
|
||||
// using a single table alias.
|
||||
self.apply_simple_or_join(schema, patterns, mentioned_vars)
|
||||
self.apply_simple_or_join(known, patterns, mentioned_vars)
|
||||
},
|
||||
DeconstructedOrJoin::Complex(or_join) => {
|
||||
// Do this the hard way.
|
||||
self.apply_complex_or_join(schema, or_join)
|
||||
self.apply_complex_or_join(known, or_join)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -353,7 +374,7 @@ impl ConjoiningClauses {
|
|||
/// ```
|
||||
///
|
||||
fn apply_simple_or_join(&mut self,
|
||||
schema: &Schema,
|
||||
known: Known,
|
||||
patterns: Vec<Pattern>,
|
||||
mentioned_vars: BTreeSet<Variable>)
|
||||
-> Result<()> {
|
||||
|
@ -363,6 +384,17 @@ impl ConjoiningClauses {
|
|||
|
||||
assert!(patterns.len() >= 2);
|
||||
|
||||
let patterns: Vec<EvolvedPattern> = patterns.into_iter().filter_map(|pattern| {
|
||||
match self.make_evolved_pattern(known, pattern) {
|
||||
PlaceOrEmpty::Empty(_e) => {
|
||||
// Never mind.
|
||||
None
|
||||
},
|
||||
PlaceOrEmpty::Place(p) => Some(p),
|
||||
}
|
||||
}).collect();
|
||||
|
||||
|
||||
// Begin by building a base CC that we'll use to produce constraints from each pattern.
|
||||
// Populate this base CC with whatever variables are already known from the CC to which
|
||||
// we're applying this `or`.
|
||||
|
@ -373,7 +405,7 @@ impl ConjoiningClauses {
|
|||
|
||||
// We expect this to always work: if it doesn't, it means we should never have got to this
|
||||
// point.
|
||||
let source_alias = self.alias_table(schema, &patterns[0]).expect("couldn't get table");
|
||||
let source_alias = self.alias_table(known.schema, &patterns[0]).expect("couldn't get table");
|
||||
|
||||
// This is where we'll collect everything we eventually add to the destination CC.
|
||||
let mut folded = ConjoiningClauses::default();
|
||||
|
@ -405,7 +437,7 @@ impl ConjoiningClauses {
|
|||
.map(|pattern| {
|
||||
let mut receptacle = template.make_receptacle();
|
||||
println!("Applying pattern with attribute {:?}", pattern.attribute);
|
||||
receptacle.apply_pattern_clause_for_alias(schema, &pattern, &source_alias);
|
||||
receptacle.apply_pattern_clause_for_alias(known, &pattern, &source_alias);
|
||||
receptacle
|
||||
})
|
||||
.peekable();
|
||||
|
@ -543,7 +575,7 @@ impl ConjoiningClauses {
|
|||
///
|
||||
/// Note that a top-level standalone `or` doesn't really need to be aliased, but
|
||||
/// it shouldn't do any harm.
|
||||
fn apply_complex_or_join(&mut self, schema: &Schema, or_join: OrJoin) -> Result<()> {
|
||||
fn apply_complex_or_join(&mut self, known: Known, or_join: OrJoin) -> Result<()> {
|
||||
// N.B., a solitary pattern here *cannot* be simply applied to the enclosing CC. We don't
|
||||
// want to join all the vars, and indeed if it were safe to do so, we wouldn't have ended up
|
||||
// in this function!
|
||||
|
@ -562,10 +594,10 @@ impl ConjoiningClauses {
|
|||
let mut receptacle = template.make_receptacle();
|
||||
match clause {
|
||||
OrWhereClause::And(clauses) => {
|
||||
receptacle.apply_clauses(&schema, clauses)?;
|
||||
receptacle.apply_clauses(known, clauses)?;
|
||||
},
|
||||
OrWhereClause::Clause(clause) => {
|
||||
receptacle.apply_clause(&schema, clause)?;
|
||||
receptacle.apply_clause(known, clause)?;
|
||||
},
|
||||
}
|
||||
if receptacle.is_known_empty() {
|
||||
|
@ -670,6 +702,7 @@ impl ConjoiningClauses {
|
|||
let alias = self.next_alias_for_table(table);
|
||||
|
||||
// Stitch the computed table into column_bindings, so we get cross-linking.
|
||||
let schema = known.schema;
|
||||
for var in var_associations.into_iter() {
|
||||
self.bind_column_to_var(schema, alias.clone(), VariableColumn::Variable(var.clone()), var);
|
||||
}
|
||||
|
@ -726,6 +759,7 @@ mod testing {
|
|||
|
||||
use mentat_core::{
|
||||
Attribute,
|
||||
Schema,
|
||||
TypedValue,
|
||||
ValueType,
|
||||
};
|
||||
|
@ -759,16 +793,16 @@ mod testing {
|
|||
algebrize_with_counter,
|
||||
};
|
||||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
algebrize(schema.into(), parsed).expect("algebrize failed").cc
|
||||
algebrize(known, parsed).expect("algebrize failed").cc
|
||||
}
|
||||
|
||||
/// Algebrize with a starting counter, so we can compare inner queries by algebrizing a
|
||||
/// simpler version.
|
||||
fn alg_c(schema: &Schema, counter: usize, input: &str) -> ConjoiningClauses {
|
||||
fn alg_c(known: Known, counter: usize, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
algebrize_with_counter(schema.into(), parsed, counter).expect("algebrize failed").cc
|
||||
algebrize_with_counter(known, parsed, counter).expect("algebrize failed").cc
|
||||
}
|
||||
|
||||
fn compare_ccs(left: ConjoiningClauses, right: ConjoiningClauses) {
|
||||
|
@ -815,40 +849,43 @@ mod testing {
|
|||
#[test]
|
||||
fn test_schema_based_failure() {
|
||||
let schema = Schema::default();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x
|
||||
:where (or [?x :foo/nope1 "John"]
|
||||
[?x :foo/nope2 "Ámbar"]
|
||||
[?x :foo/nope3 "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
assert!(cc.is_known_empty());
|
||||
assert_eq!(cc.empty_because, Some(EmptyBecause::InvalidAttributeIdent(NamespacedKeyword::new("foo", "nope3"))));
|
||||
assert_eq!(cc.empty_because, Some(EmptyBecause::UnresolvedIdent(NamespacedKeyword::new("foo", "nope3"))));
|
||||
}
|
||||
|
||||
/// Test that if only one of the attributes in an `or` resolves, it's equivalent to a simple query.
|
||||
#[test]
|
||||
fn test_only_one_arm_succeeds() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x
|
||||
:where (or [?x :foo/nope "John"]
|
||||
[?x :foo/parent "Ámbar"]
|
||||
[?x :foo/nope "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
assert!(!cc.is_known_empty());
|
||||
compare_ccs(cc, alg(&schema, r#"[:find ?x :where [?x :foo/parent "Ámbar"]]"#));
|
||||
compare_ccs(cc, alg(known, r#"[:find ?x :where [?x :foo/parent "Ámbar"]]"#));
|
||||
}
|
||||
|
||||
// Simple alternation.
|
||||
#[test]
|
||||
fn test_simple_alternation() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x
|
||||
:where (or [?x :foo/knows "John"]
|
||||
[?x :foo/parent "Ámbar"]
|
||||
[?x :foo/knows "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
let vx = Variable::from_valid_name("?x");
|
||||
let d0 = "datoms00".to_string();
|
||||
let d0e = QualifiedAlias::new(d0.clone(), DatomsColumn::Entity);
|
||||
|
@ -882,6 +919,7 @@ mod testing {
|
|||
#[test]
|
||||
fn test_alternation_with_pattern() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find [?x ?name]
|
||||
:where
|
||||
|
@ -889,7 +927,7 @@ mod testing {
|
|||
(or [?x :foo/knows "John"]
|
||||
[?x :foo/parent "Ámbar"]
|
||||
[?x :foo/knows "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
let vx = Variable::from_valid_name("?x");
|
||||
let d0 = "datoms00".to_string();
|
||||
let d1 = "datoms01".to_string();
|
||||
|
@ -932,6 +970,7 @@ mod testing {
|
|||
#[test]
|
||||
fn test_alternation_with_pattern_and_predicate() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x ?age
|
||||
:where
|
||||
|
@ -939,7 +978,7 @@ mod testing {
|
|||
[[< ?age 30]]
|
||||
(or [?x :foo/knows "John"]
|
||||
[?x :foo/knows "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
let vx = Variable::from_valid_name("?x");
|
||||
let d0 = "datoms00".to_string();
|
||||
let d1 = "datoms01".to_string();
|
||||
|
@ -985,10 +1024,11 @@ mod testing {
|
|||
#[test]
|
||||
fn test_unit_or_join_doesnt_flatten() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"[:find ?x
|
||||
:where [?x :foo/knows ?y]
|
||||
(or-join [?x] [?x :foo/parent ?y])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
let vx = Variable::from_valid_name("?x");
|
||||
let vy = Variable::from_valid_name("?y");
|
||||
let d0 = "datoms00".to_string();
|
||||
|
@ -1020,28 +1060,30 @@ mod testing {
|
|||
#[test]
|
||||
fn test_unit_or_does_flatten() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let or_query = r#"[:find ?x
|
||||
:where [?x :foo/knows ?y]
|
||||
(or [?x :foo/parent ?y])]"#;
|
||||
let flat_query = r#"[:find ?x
|
||||
:where [?x :foo/knows ?y]
|
||||
[?x :foo/parent ?y]]"#;
|
||||
compare_ccs(alg(&schema, or_query),
|
||||
alg(&schema, flat_query));
|
||||
compare_ccs(alg(known, or_query),
|
||||
alg(known, flat_query));
|
||||
}
|
||||
|
||||
// Elision of `and`.
|
||||
#[test]
|
||||
fn test_unit_or_and_does_flatten() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let or_query = r#"[:find ?x
|
||||
:where (or (and [?x :foo/parent ?y]
|
||||
[?x :foo/age 7]))]"#;
|
||||
let flat_query = r#"[:find ?x
|
||||
:where [?x :foo/parent ?y]
|
||||
[?x :foo/age 7]]"#;
|
||||
compare_ccs(alg(&schema, or_query),
|
||||
alg(&schema, flat_query));
|
||||
compare_ccs(alg(known, or_query),
|
||||
alg(known, flat_query));
|
||||
}
|
||||
|
||||
// Alternation with `and`.
|
||||
|
@ -1054,12 +1096,13 @@ mod testing {
|
|||
#[test]
|
||||
fn test_alternation_with_and() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let query = r#"
|
||||
[:find ?x
|
||||
:where (or (and [?x :foo/knows "John"]
|
||||
[?x :foo/parent "Ámbar"])
|
||||
[?x :foo/knows "Daphne"])]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
let mut tables = cc.computed_tables.into_iter();
|
||||
match (tables.next(), tables.next()) {
|
||||
(Some(ComputedTable::Union { projection, type_extraction, arms }), None) => {
|
||||
|
@ -1069,12 +1112,12 @@ mod testing {
|
|||
let mut arms = arms.into_iter();
|
||||
match (arms.next(), arms.next(), arms.next()) {
|
||||
(Some(and), Some(pattern), None) => {
|
||||
let expected_and = alg_c(&schema,
|
||||
let expected_and = alg_c(known,
|
||||
0, // The first pattern to be processed.
|
||||
r#"[:find ?x :where [?x :foo/knows "John"] [?x :foo/parent "Ámbar"]]"#);
|
||||
compare_ccs(and, expected_and);
|
||||
|
||||
let expected_pattern = alg_c(&schema,
|
||||
let expected_pattern = alg_c(known,
|
||||
2, // Two aliases taken by the other arm.
|
||||
r#"[:find ?x :where [?x :foo/knows "Daphne"]]"#);
|
||||
compare_ccs(pattern, expected_pattern);
|
||||
|
@ -1093,6 +1136,7 @@ mod testing {
|
|||
#[test]
|
||||
fn test_type_based_or_pruning() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
// This simplifies to:
|
||||
// [:find ?x
|
||||
// :where [?a :some/int ?x]
|
||||
|
@ -1106,6 +1150,6 @@ mod testing {
|
|||
[:find ?x
|
||||
:where [?a :foo/age ?x]
|
||||
[_ :foo/height ?x]]"#;
|
||||
compare_ccs(alg(&schema, query), alg(&schema, simple));
|
||||
compare_ccs(alg(known, query), alg(known, simple));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,10 +9,11 @@
|
|||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
use mentat_core::{
|
||||
Entid,
|
||||
HasSchema,
|
||||
Schema,
|
||||
TypedValue,
|
||||
ValueType,
|
||||
ValueTypeSet,
|
||||
};
|
||||
|
||||
use mentat_query::{
|
||||
|
@ -20,19 +21,28 @@ use mentat_query::{
|
|||
PatternValuePlace,
|
||||
PatternNonValuePlace,
|
||||
SrcVar,
|
||||
Variable,
|
||||
};
|
||||
|
||||
use super::RcCloned;
|
||||
|
||||
use clauses::ConjoiningClauses;
|
||||
use clauses::{
|
||||
ConjoiningClauses,
|
||||
};
|
||||
|
||||
use types::{
|
||||
ColumnConstraint,
|
||||
DatomsColumn,
|
||||
EmptyBecause,
|
||||
EvolvedNonValuePlace,
|
||||
EvolvedPattern,
|
||||
EvolvedValuePlace,
|
||||
PlaceOrEmpty,
|
||||
SourceAlias,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
/// Application of patterns.
|
||||
impl ConjoiningClauses {
|
||||
|
||||
|
@ -71,7 +81,7 @@ impl ConjoiningClauses {
|
|||
/// existence subquery instead of a join.
|
||||
///
|
||||
/// This method is only public for use from `or.rs`.
|
||||
pub fn apply_pattern_clause_for_alias<'s>(&mut self, schema: &'s Schema, pattern: &Pattern, alias: &SourceAlias) {
|
||||
pub fn apply_pattern_clause_for_alias(&mut self, known: Known, pattern: &EvolvedPattern, alias: &SourceAlias) {
|
||||
if self.is_known_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -88,33 +98,25 @@ impl ConjoiningClauses {
|
|||
|
||||
let ref col = alias.1;
|
||||
|
||||
let schema = known.schema;
|
||||
match pattern.entity {
|
||||
PatternNonValuePlace::Placeholder =>
|
||||
EvolvedNonValuePlace::Placeholder =>
|
||||
// Placeholders don't contribute any column bindings, nor do
|
||||
// they constrain the query -- there's no need to produce
|
||||
// IS NOT NULL, because we don't store nulls in our schema.
|
||||
(),
|
||||
PatternNonValuePlace::Variable(ref v) =>
|
||||
EvolvedNonValuePlace::Variable(ref v) =>
|
||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Entity, v.clone()),
|
||||
PatternNonValuePlace::Entid(entid) =>
|
||||
EvolvedNonValuePlace::Entid(entid) =>
|
||||
self.constrain_column_to_entity(col.clone(), DatomsColumn::Entity, entid),
|
||||
PatternNonValuePlace::Ident(ref ident) => {
|
||||
if let Some(entid) = self.entid_for_ident(schema, ident.as_ref()) {
|
||||
self.constrain_column_to_entity(col.clone(), DatomsColumn::Entity, entid.into())
|
||||
} else {
|
||||
// A resolution failure means we're done here.
|
||||
self.mark_known_empty(EmptyBecause::UnresolvedIdent(ident.cloned()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match pattern.attribute {
|
||||
PatternNonValuePlace::Placeholder =>
|
||||
EvolvedNonValuePlace::Placeholder =>
|
||||
(),
|
||||
PatternNonValuePlace::Variable(ref v) =>
|
||||
EvolvedNonValuePlace::Variable(ref v) =>
|
||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Attribute, v.clone()),
|
||||
PatternNonValuePlace::Entid(entid) => {
|
||||
EvolvedNonValuePlace::Entid(entid) => {
|
||||
if !schema.is_attribute(entid) {
|
||||
// Furthermore, that entid must resolve to an attribute. If it doesn't, this
|
||||
// query is meaningless.
|
||||
|
@ -123,20 +125,6 @@ impl ConjoiningClauses {
|
|||
}
|
||||
self.constrain_attribute(col.clone(), entid)
|
||||
},
|
||||
PatternNonValuePlace::Ident(ref ident) => {
|
||||
if let Some(entid) = self.entid_for_ident(schema, ident) {
|
||||
self.constrain_attribute(col.clone(), entid.into());
|
||||
|
||||
if !schema.is_attribute(entid) {
|
||||
self.mark_known_empty(EmptyBecause::InvalidAttributeIdent(ident.cloned()));
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// A resolution failure means we're done here.
|
||||
self.mark_known_empty(EmptyBecause::UnresolvedIdent(ident.cloned()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if the pattern's value type is known.
|
||||
|
@ -147,10 +135,10 @@ impl ConjoiningClauses {
|
|||
let value_type = self.get_value_type(schema, pattern);
|
||||
|
||||
match pattern.value {
|
||||
PatternValuePlace::Placeholder =>
|
||||
EvolvedValuePlace::Placeholder =>
|
||||
(),
|
||||
|
||||
PatternValuePlace::Variable(ref v) => {
|
||||
EvolvedValuePlace::Variable(ref v) => {
|
||||
if let Some(this_type) = value_type {
|
||||
// Wouldn't it be nice if we didn't need to clone in the found case?
|
||||
// It doesn't matter too much: collisons won't be too frequent.
|
||||
|
@ -162,7 +150,18 @@ impl ConjoiningClauses {
|
|||
|
||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Value, v.clone());
|
||||
},
|
||||
PatternValuePlace::EntidOrInteger(i) =>
|
||||
EvolvedValuePlace::Entid(i) => {
|
||||
match value_type {
|
||||
Some(ValueType::Ref) | None => {
|
||||
self.constrain_column_to_entity(col.clone(), DatomsColumn::Value, i);
|
||||
},
|
||||
Some(value_type) => {
|
||||
self.mark_known_empty(EmptyBecause::ValueTypeMismatch(value_type, TypedValue::Ref(i)));
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
EvolvedValuePlace::EntidOrInteger(i) =>
|
||||
// If we know the valueType, then we can determine whether this is an entid or an
|
||||
// integer. If we don't, then we must generate a more general query with a
|
||||
// value_type_tag.
|
||||
|
@ -180,9 +179,11 @@ impl ConjoiningClauses {
|
|||
// - Constraining the value column to the plain numeric value '1'.
|
||||
// - Constraining its type column to one of a set of types.
|
||||
//
|
||||
// TODO: isn't there a bug here? We'll happily take a numeric value
|
||||
// for a non-numeric attribute!
|
||||
self.constrain_value_to_numeric(col.clone(), i);
|
||||
},
|
||||
PatternValuePlace::IdentOrKeyword(ref kw) => {
|
||||
EvolvedValuePlace::IdentOrKeyword(ref kw) => {
|
||||
// If we know the valueType, then we can determine whether this is an ident or a
|
||||
// keyword. If we don't, then we must generate a more general query with a
|
||||
// value_type_tag.
|
||||
|
@ -204,9 +205,9 @@ impl ConjoiningClauses {
|
|||
self.wheres.add_intersection(ColumnConstraint::has_unit_type(col.clone(), ValueType::Keyword));
|
||||
};
|
||||
},
|
||||
PatternValuePlace::Constant(ref c) => {
|
||||
EvolvedValuePlace::Value(ref c) => {
|
||||
// TODO: don't allocate.
|
||||
let typed_value = c.clone().into_typed_value();
|
||||
let typed_value = c.clone();
|
||||
if !typed_value.is_congruent_with(value_type) {
|
||||
// If the attribute and its value don't match, the pattern must fail.
|
||||
// We can never have a congruence failure if `value_type` is `None`, so we
|
||||
|
@ -244,34 +245,388 @@ impl ConjoiningClauses {
|
|||
}
|
||||
|
||||
match pattern.tx {
|
||||
PatternNonValuePlace::Placeholder => (),
|
||||
PatternNonValuePlace::Variable(ref v) => {
|
||||
EvolvedNonValuePlace::Placeholder => (),
|
||||
EvolvedNonValuePlace::Variable(ref v) => {
|
||||
self.bind_column_to_var(schema, col.clone(), DatomsColumn::Tx, v.clone());
|
||||
},
|
||||
PatternNonValuePlace::Entid(entid) => {
|
||||
EvolvedNonValuePlace::Entid(entid) => {
|
||||
self.constrain_column_to_entity(col.clone(), DatomsColumn::Tx, entid);
|
||||
},
|
||||
PatternNonValuePlace::Ident(ref ident) => {
|
||||
if let Some(entid) = self.entid_for_ident(schema, ident.as_ref()) {
|
||||
self.constrain_column_to_entity(col.clone(), DatomsColumn::Tx, entid.into())
|
||||
}
|
||||
}
|
||||
|
||||
fn reverse_lookup(&mut self, known: Known, var: &Variable, attr: Entid, val: &TypedValue) -> bool {
|
||||
if let Some(attribute) = known.schema.attribute_for_entid(attr) {
|
||||
let unique = attribute.unique.is_some();
|
||||
if unique {
|
||||
match known.get_entid_for_value(attr, val) {
|
||||
None => {
|
||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||
value: val.clone(),
|
||||
attr: attr,
|
||||
});
|
||||
true
|
||||
},
|
||||
Some(item) => {
|
||||
self.bind_value(var, TypedValue::Ref(item));
|
||||
true
|
||||
},
|
||||
}
|
||||
} else {
|
||||
// A resolution failure means we're done here.
|
||||
self.mark_known_empty(EmptyBecause::UnresolvedIdent(ident.cloned()));
|
||||
match known.get_entids_for_value(attr, val) {
|
||||
None => {
|
||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoEntity {
|
||||
value: val.clone(),
|
||||
attr: attr,
|
||||
});
|
||||
true
|
||||
},
|
||||
Some(items) => {
|
||||
if items.len() == 1 {
|
||||
let item = items.iter().next().cloned().unwrap();
|
||||
self.bind_value(var, TypedValue::Ref(item));
|
||||
true
|
||||
} else {
|
||||
// Oh well.
|
||||
// TODO: handle multiple values.
|
||||
false
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.mark_known_empty(EmptyBecause::InvalidAttributeEntid(attr));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: generalize.
|
||||
// TODO: use constant values -- extract transformation code from apply_pattern_clause_for_alias.
|
||||
// TODO: loop over all patterns until no more cache values apply?
|
||||
fn attempt_cache_lookup(&mut self, known: Known, pattern: &EvolvedPattern) -> bool {
|
||||
// Precondition: default source. If it's not default, don't call this.
|
||||
assert!(pattern.source == SrcVar::DefaultSrc);
|
||||
|
||||
let schema = known.schema;
|
||||
|
||||
if pattern.tx != EvolvedNonValuePlace::Placeholder {
|
||||
return false;
|
||||
}
|
||||
|
||||
// See if we can use the cache.
|
||||
match pattern.attribute {
|
||||
EvolvedNonValuePlace::Entid(attr) => {
|
||||
if !schema.is_attribute(attr) {
|
||||
// Furthermore, that entid must resolve to an attribute. If it doesn't, this
|
||||
// query is meaningless.
|
||||
self.mark_known_empty(EmptyBecause::InvalidAttributeEntid(attr));
|
||||
return true;
|
||||
}
|
||||
|
||||
let cached_forward = known.is_attribute_cached_forward(attr);
|
||||
let cached_reverse = known.is_attribute_cached_reverse(attr);
|
||||
|
||||
if (cached_forward || cached_reverse) &&
|
||||
pattern.tx == EvolvedNonValuePlace::Placeholder {
|
||||
|
||||
let attribute = schema.attribute_for_entid(attr).unwrap();
|
||||
|
||||
// There are two patterns we can handle:
|
||||
// [?e :some/unique 123 _ _] -- reverse lookup
|
||||
// [123 :some/attr ?v _ _] -- forward lookup
|
||||
match pattern.entity {
|
||||
// Reverse lookup.
|
||||
EvolvedNonValuePlace::Variable(ref var) => {
|
||||
match pattern.value {
|
||||
// TODO: EntidOrInteger etc.
|
||||
EvolvedValuePlace::IdentOrKeyword(ref kw) => {
|
||||
match attribute.value_type {
|
||||
ValueType::Ref => {
|
||||
// It's an ident.
|
||||
// TODO
|
||||
return false;
|
||||
},
|
||||
ValueType::Keyword => {
|
||||
let tv: TypedValue = TypedValue::Keyword(kw.clone());
|
||||
return self.reverse_lookup(known, var, attr, &tv);
|
||||
},
|
||||
t => {
|
||||
let tv: TypedValue = TypedValue::Keyword(kw.clone());
|
||||
// Anything else can't match an IdentOrKeyword.
|
||||
self.mark_known_empty(EmptyBecause::ValueTypeMismatch(t, tv));
|
||||
return true;
|
||||
},
|
||||
}
|
||||
},
|
||||
EvolvedValuePlace::Value(ref val) => {
|
||||
if cached_reverse {
|
||||
return self.reverse_lookup(known, var, attr, val);
|
||||
}
|
||||
}
|
||||
_ => {}, // TODO: check constant values against cache.
|
||||
}
|
||||
},
|
||||
|
||||
// Forward lookup.
|
||||
EvolvedNonValuePlace::Entid(entity) => {
|
||||
match pattern.value {
|
||||
EvolvedValuePlace::Variable(ref var) => {
|
||||
if cached_forward {
|
||||
match known.get_value_for_entid(known.schema, attr, entity) {
|
||||
None => {
|
||||
self.mark_known_empty(EmptyBecause::CachedAttributeHasNoValues {
|
||||
entity: entity,
|
||||
attr: attr,
|
||||
});
|
||||
return true;
|
||||
},
|
||||
Some(item) => {
|
||||
println!("{} is known to be {:?}", var, item);
|
||||
self.bind_value(var, item.clone());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}, // TODO: check constant values against cache.
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Transform a pattern place into a narrower type.
|
||||
/// If that's impossible, returns Empty.
|
||||
fn make_evolved_non_value(&self, known: &Known, col: DatomsColumn, non_value: PatternNonValuePlace) -> PlaceOrEmpty<EvolvedNonValuePlace> {
|
||||
use self::PlaceOrEmpty::*;
|
||||
match non_value {
|
||||
PatternNonValuePlace::Placeholder => Place(EvolvedNonValuePlace::Placeholder),
|
||||
PatternNonValuePlace::Entid(e) => Place(EvolvedNonValuePlace::Entid(e)),
|
||||
PatternNonValuePlace::Ident(kw) => {
|
||||
// Resolve the ident.
|
||||
if let Some(entid) = known.schema.get_entid(&kw) {
|
||||
Place(EvolvedNonValuePlace::Entid(entid.into()))
|
||||
} else {
|
||||
Empty(EmptyBecause::UnresolvedIdent((&*kw).clone()))
|
||||
}
|
||||
},
|
||||
PatternNonValuePlace::Variable(var) => {
|
||||
// See if we have it!
|
||||
match self.bound_value(&var) {
|
||||
None => Place(EvolvedNonValuePlace::Variable(var)),
|
||||
Some(TypedValue::Ref(entid)) => Place(EvolvedNonValuePlace::Entid(entid)),
|
||||
Some(TypedValue::Keyword(kw)) => {
|
||||
// We'll allow this only if it's an ident.
|
||||
if let Some(entid) = known.schema.get_entid(&kw) {
|
||||
Place(EvolvedNonValuePlace::Entid(entid.into()))
|
||||
} else {
|
||||
Empty(EmptyBecause::UnresolvedIdent((&*kw).clone()))
|
||||
}
|
||||
},
|
||||
Some(v) => {
|
||||
Empty(EmptyBecause::InvalidBinding(col.into(), v))
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn make_evolved_entity(&self, known: &Known, entity: PatternNonValuePlace) -> PlaceOrEmpty<EvolvedNonValuePlace> {
|
||||
self.make_evolved_non_value(known, DatomsColumn::Entity, entity)
|
||||
}
|
||||
|
||||
fn make_evolved_tx(&self, known: &Known, tx: PatternNonValuePlace) -> PlaceOrEmpty<EvolvedNonValuePlace> {
|
||||
// TODO: make sure that, if it's an entid, it names a tx.
|
||||
self.make_evolved_non_value(known, DatomsColumn::Tx, tx)
|
||||
}
|
||||
|
||||
pub fn make_evolved_attribute(&self, known: &Known, attribute: PatternNonValuePlace) -> PlaceOrEmpty<(EvolvedNonValuePlace, Option<ValueType>)> {
|
||||
use self::PlaceOrEmpty::*;
|
||||
self.make_evolved_non_value(known, DatomsColumn::Attribute, attribute)
|
||||
.and_then(|a| {
|
||||
// Make sure that, if it's an entid, it names an attribute.
|
||||
if let EvolvedNonValuePlace::Entid(e) = a {
|
||||
if let Some(attr) = known.schema.attribute_for_entid(e) {
|
||||
Place((a, Some(attr.value_type)))
|
||||
} else {
|
||||
Empty(EmptyBecause::InvalidAttributeEntid(e))
|
||||
}
|
||||
} else {
|
||||
Place((a, None))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn make_evolved_value(&self,
|
||||
known: &Known,
|
||||
value_type: Option<ValueType>,
|
||||
value: PatternValuePlace) -> PlaceOrEmpty<EvolvedValuePlace> {
|
||||
use self::PlaceOrEmpty::*;
|
||||
match value {
|
||||
PatternValuePlace::Placeholder => Place(EvolvedValuePlace::Placeholder),
|
||||
PatternValuePlace::EntidOrInteger(e) => {
|
||||
match value_type {
|
||||
Some(ValueType::Ref) => Place(EvolvedValuePlace::Entid(e)),
|
||||
Some(ValueType::Long) => Place(EvolvedValuePlace::Value(TypedValue::Long(e))),
|
||||
Some(ValueType::Double) => Place(EvolvedValuePlace::Value((e as f64).into())),
|
||||
Some(t) => Empty(EmptyBecause::ValueTypeMismatch(t, TypedValue::Long(e))),
|
||||
None => Place(EvolvedValuePlace::EntidOrInteger(e)),
|
||||
}
|
||||
},
|
||||
PatternValuePlace::IdentOrKeyword(kw) => {
|
||||
match value_type {
|
||||
Some(ValueType::Ref) => {
|
||||
// Resolve the ident.
|
||||
if let Some(entid) = known.schema.get_entid(&kw) {
|
||||
Place(EvolvedValuePlace::Entid(entid.into()))
|
||||
} else {
|
||||
Empty(EmptyBecause::UnresolvedIdent((&*kw).clone()))
|
||||
}
|
||||
},
|
||||
Some(ValueType::Keyword) => {
|
||||
Place(EvolvedValuePlace::Value(TypedValue::Keyword(kw)))
|
||||
},
|
||||
Some(t) => {
|
||||
Empty(EmptyBecause::ValueTypeMismatch(t, TypedValue::Keyword(kw)))
|
||||
},
|
||||
None => {
|
||||
Place(EvolvedValuePlace::IdentOrKeyword(kw))
|
||||
},
|
||||
}
|
||||
},
|
||||
PatternValuePlace::Variable(var) => {
|
||||
// See if we have it!
|
||||
match self.bound_value(&var) {
|
||||
None => Place(EvolvedValuePlace::Variable(var)),
|
||||
Some(TypedValue::Ref(entid)) => {
|
||||
if let Some(empty) = self.can_constrain_var_to_type(&var, ValueType::Ref) {
|
||||
Empty(empty)
|
||||
} else {
|
||||
Place(EvolvedValuePlace::Entid(entid))
|
||||
}
|
||||
},
|
||||
Some(val) => {
|
||||
if let Some(empty) = self.can_constrain_var_to_type(&var, val.value_type()) {
|
||||
Empty(empty)
|
||||
} else {
|
||||
Place(EvolvedValuePlace::Value(val))
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
PatternValuePlace::Constant(nic) => {
|
||||
Place(EvolvedValuePlace::Value(nic.into_typed_value()))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_evolved_pattern(&self, known: Known, pattern: Pattern) -> PlaceOrEmpty<EvolvedPattern> {
|
||||
let (e, a, v, tx, source) = (pattern.entity, pattern.attribute, pattern.value, pattern.tx, pattern.source);
|
||||
use self::PlaceOrEmpty::*;
|
||||
match self.make_evolved_entity(&known, e) {
|
||||
Empty(because) => Empty(because),
|
||||
Place(e) => {
|
||||
match self.make_evolved_attribute(&known, a) {
|
||||
Empty(because) => Empty(because),
|
||||
Place((a, value_type)) => {
|
||||
match self.make_evolved_value(&known, value_type, v) {
|
||||
Empty(because) => Empty(because),
|
||||
Place(v) => {
|
||||
match self.make_evolved_tx(&known, tx) {
|
||||
Empty(because) => Empty(because),
|
||||
Place(tx) => {
|
||||
PlaceOrEmpty::Place(EvolvedPattern {
|
||||
source: source.unwrap_or(SrcVar::DefaultSrc),
|
||||
entity: e,
|
||||
attribute: a,
|
||||
value: v,
|
||||
tx: tx,
|
||||
})
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Re-examine the pattern to see if it can be specialized or is now known to fail.
|
||||
#[allow(unused_variables)]
|
||||
pub fn evolve_pattern(&mut self, known: Known, mut pattern: EvolvedPattern) -> PlaceOrEmpty<EvolvedPattern> {
|
||||
use self::PlaceOrEmpty::*;
|
||||
|
||||
let mut new_entity: Option<EvolvedNonValuePlace> = None;
|
||||
let mut new_value: Option<EvolvedValuePlace> = None;
|
||||
|
||||
match &pattern.entity {
|
||||
&EvolvedNonValuePlace::Variable(ref var) => {
|
||||
// See if we have it yet!
|
||||
match self.bound_value(&var) {
|
||||
None => (),
|
||||
Some(TypedValue::Ref(entid)) => {
|
||||
new_entity = Some(EvolvedNonValuePlace::Entid(entid));
|
||||
},
|
||||
Some(v) => {
|
||||
return Empty(EmptyBecause::TypeMismatch {
|
||||
var: var.clone(),
|
||||
existing: self.known_type_set(&var),
|
||||
desired: ValueTypeSet::of_one(ValueType::Ref),
|
||||
});
|
||||
},
|
||||
};
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
match &pattern.value {
|
||||
&EvolvedValuePlace::Variable(ref var) => {
|
||||
// See if we have it yet!
|
||||
match self.bound_value(&var) {
|
||||
None => (),
|
||||
Some(tv) => {
|
||||
new_value = Some(EvolvedValuePlace::Value(tv.clone()));
|
||||
},
|
||||
};
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
|
||||
|
||||
if let Some(e) = new_entity {
|
||||
pattern.entity = e;
|
||||
}
|
||||
if let Some(v) = new_value {
|
||||
pattern.value = v;
|
||||
}
|
||||
Place(pattern)
|
||||
}
|
||||
|
||||
pub fn apply_parsed_pattern(&mut self, known: Known, pattern: Pattern) {
|
||||
use self::PlaceOrEmpty::*;
|
||||
match self.make_evolved_pattern(known, pattern) {
|
||||
Empty(e) => self.mark_known_empty(e),
|
||||
Place(p) => self.apply_pattern(known, p),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn apply_pattern(&mut self, known: Known, pattern: EvolvedPattern) {
|
||||
// For now we only support the default source.
|
||||
if pattern.source != SrcVar::DefaultSrc {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
if self.attempt_cache_lookup(known, &pattern) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_pattern<'s, 'p>(&mut self, schema: &'s Schema, pattern: Pattern) {
|
||||
// For now we only support the default source.
|
||||
match pattern.source {
|
||||
Some(SrcVar::DefaultSrc) | None => (),
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
|
||||
if let Some(alias) = self.alias_table(schema, &pattern) {
|
||||
self.apply_pattern_clause_for_alias(schema, &pattern, &alias);
|
||||
if let Some(alias) = self.alias_table(known.schema, &pattern) {
|
||||
self.apply_pattern_clause_for_alias(known, &pattern, &alias);
|
||||
self.from.push(alias);
|
||||
} else {
|
||||
// We didn't determine a table, likely because there was a mismatch
|
||||
|
@ -296,6 +651,7 @@ mod testing {
|
|||
use mentat_core::attribute::Unique;
|
||||
use mentat_core::{
|
||||
Attribute,
|
||||
Schema,
|
||||
ValueTypeSet,
|
||||
};
|
||||
|
||||
|
@ -329,15 +685,17 @@ mod testing {
|
|||
|
||||
fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("parse failed");
|
||||
algebrize(schema.into(), parsed).expect("algebrize failed").cc
|
||||
let known = Known::for_schema(schema);
|
||||
algebrize(known, parsed).expect("algebrize failed").cc
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unknown_ident() {
|
||||
let mut cc = ConjoiningClauses::default();
|
||||
let schema = Schema::default();
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(Variable::from_valid_name("?x")),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -355,7 +713,8 @@ mod testing {
|
|||
|
||||
associate_ident(&mut schema, NamespacedKeyword::new("foo", "bar"), 99);
|
||||
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(Variable::from_valid_name("?x")),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -378,7 +737,8 @@ mod testing {
|
|||
});
|
||||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -418,7 +778,8 @@ mod testing {
|
|||
let schema = Schema::default();
|
||||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
|
@ -467,7 +828,8 @@ mod testing {
|
|||
|
||||
cc.input_variables.insert(a.clone());
|
||||
cc.value_bindings.insert(a.clone(), TypedValue::Keyword(Rc::new(NamespacedKeyword::new("foo", "bar"))));
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
|
@ -510,7 +872,8 @@ mod testing {
|
|||
|
||||
cc.input_variables.insert(a.clone());
|
||||
cc.value_bindings.insert(a.clone(), hello.clone());
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
|
@ -532,7 +895,8 @@ mod testing {
|
|||
let x = Variable::from_valid_name("?x");
|
||||
let a = Variable::from_valid_name("?a");
|
||||
let v = Variable::from_valid_name("?v");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(a.clone()),
|
||||
|
@ -562,7 +926,8 @@ mod testing {
|
|||
let schema = Schema::default();
|
||||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
|
@ -612,14 +977,15 @@ mod testing {
|
|||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
let y = Variable::from_valid_name("?y");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "roz"),
|
||||
value: PatternValuePlace::Constant(NonIntegerConstant::Text(Rc::new("idgoeshere".to_string()))),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
});
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -686,7 +1052,8 @@ mod testing {
|
|||
let variables: BTreeSet<Variable> = vec![Variable::from_valid_name("?y")].into_iter().collect();
|
||||
let mut cc = ConjoiningClauses::with_inputs(variables, inputs);
|
||||
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -733,7 +1100,8 @@ mod testing {
|
|||
let variables: BTreeSet<Variable> = vec![Variable::from_valid_name("?y")].into_iter().collect();
|
||||
let mut cc = ConjoiningClauses::with_inputs(variables, inputs);
|
||||
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -768,7 +1136,8 @@ mod testing {
|
|||
let variables: BTreeSet<Variable> = vec![Variable::from_valid_name("?y")].into_iter().collect();
|
||||
let mut cc = ConjoiningClauses::with_inputs(variables, inputs);
|
||||
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -802,14 +1171,15 @@ mod testing {
|
|||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
let y = Variable::from_valid_name("?y");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "roz"),
|
||||
value: PatternValuePlace::Variable(y.clone()),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
});
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "bar"),
|
||||
|
@ -844,14 +1214,15 @@ mod testing {
|
|||
let x = Variable::from_valid_name("?x");
|
||||
let y = Variable::from_valid_name("?y");
|
||||
let z = Variable::from_valid_name("?z");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(y.clone()),
|
||||
value: PatternValuePlace::Constant(NonIntegerConstant::Boolean(true)),
|
||||
tx: PatternNonValuePlace::Placeholder,
|
||||
});
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(z.clone()),
|
||||
attribute: PatternNonValuePlace::Variable(y.clone()),
|
||||
|
|
|
@ -35,6 +35,8 @@ use types::{
|
|||
Inequality,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
/// Application of predicates.
|
||||
impl ConjoiningClauses {
|
||||
/// There are several kinds of predicates in our Datalog:
|
||||
|
@ -43,11 +45,11 @@ impl ConjoiningClauses {
|
|||
/// - In the future, some predicates that are implemented via function calls in SQLite.
|
||||
///
|
||||
/// At present we have implemented only the five built-in comparison binary operators.
|
||||
pub fn apply_predicate<'s>(&mut self, schema: &'s Schema, predicate: Predicate) -> Result<()> {
|
||||
pub fn apply_predicate(&mut self, known: Known, predicate: Predicate) -> Result<()> {
|
||||
// Because we'll be growing the set of built-in predicates, handling each differently,
|
||||
// and ultimately allowing user-specified predicates, we match on the predicate name first.
|
||||
if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) {
|
||||
self.apply_inequality(schema, op, predicate)
|
||||
self.apply_inequality(known, op, predicate)
|
||||
} else {
|
||||
bail!(ErrorKind::UnknownFunction(predicate.operator.clone()))
|
||||
}
|
||||
|
@ -71,7 +73,7 @@ impl ConjoiningClauses {
|
|||
/// - Resolves variables and converts types to those more amenable to SQL.
|
||||
/// - Ensures that the predicate functions name a known operator.
|
||||
/// - Accumulates an `Inequality` constraint into the `wheres` list.
|
||||
pub fn apply_inequality<'s>(&mut self, schema: &'s Schema, comparison: Inequality, predicate: Predicate) -> Result<()> {
|
||||
pub fn apply_inequality(&mut self, known: Known, comparison: Inequality, predicate: Predicate) -> Result<()> {
|
||||
if predicate.args.len() != 2 {
|
||||
bail!(ErrorKind::InvalidNumberOfArguments(predicate.operator.clone(), predicate.args.len(), 2));
|
||||
}
|
||||
|
@ -87,13 +89,13 @@ impl ConjoiningClauses {
|
|||
// The types we're handling here must be the intersection of the possible types of the arguments,
|
||||
// the known types of any variables, and the types supported by our inequality operators.
|
||||
let supported_types = comparison.supported_types();
|
||||
let mut left_types = self.potential_types(schema, &left)?
|
||||
let mut left_types = self.potential_types(known.schema, &left)?
|
||||
.intersection(&supported_types);
|
||||
if left_types.is_empty() {
|
||||
bail!(ErrorKind::InvalidArgument(predicate.operator.clone(), "numeric or instant", 0));
|
||||
}
|
||||
|
||||
let mut right_types = self.potential_types(schema, &right)?
|
||||
let mut right_types = self.potential_types(known.schema, &right)?
|
||||
.intersection(&supported_types);
|
||||
if right_types.is_empty() {
|
||||
bail!(ErrorKind::InvalidArgument(predicate.operator.clone(), "numeric or instant", 1));
|
||||
|
@ -203,7 +205,8 @@ mod testing {
|
|||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
let y = Variable::from_valid_name("?y");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
|
@ -214,7 +217,7 @@ mod testing {
|
|||
|
||||
let op = PlainSymbol::new("<");
|
||||
let comp = Inequality::from_datalog_operator(op.plain_name()).unwrap();
|
||||
assert!(cc.apply_inequality(&schema, comp, Predicate {
|
||||
assert!(cc.apply_inequality(known, comp, Predicate {
|
||||
operator: op,
|
||||
args: vec![
|
||||
FnArg::Variable(Variable::from_valid_name("?y")), FnArg::EntidOrInteger(10),
|
||||
|
@ -263,7 +266,8 @@ mod testing {
|
|||
|
||||
let x = Variable::from_valid_name("?x");
|
||||
let y = Variable::from_valid_name("?y");
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
let known = Known::for_schema(&schema);
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: PatternNonValuePlace::Placeholder,
|
||||
|
@ -274,14 +278,14 @@ mod testing {
|
|||
|
||||
let op = PlainSymbol::new(">=");
|
||||
let comp = Inequality::from_datalog_operator(op.plain_name()).unwrap();
|
||||
assert!(cc.apply_inequality(&schema, comp, Predicate {
|
||||
assert!(cc.apply_inequality(known, comp, Predicate {
|
||||
operator: op,
|
||||
args: vec![
|
||||
FnArg::Variable(Variable::from_valid_name("?y")), FnArg::EntidOrInteger(10),
|
||||
]}).is_ok());
|
||||
|
||||
assert!(!cc.is_known_empty());
|
||||
cc.apply_pattern(&schema, Pattern {
|
||||
cc.apply_parsed_pattern(known, Pattern {
|
||||
source: None,
|
||||
entity: PatternNonValuePlace::Variable(x.clone()),
|
||||
attribute: ident("foo", "roz"),
|
||||
|
|
|
@ -8,10 +8,6 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
use mentat_core::{
|
||||
Schema,
|
||||
};
|
||||
|
||||
use mentat_query::{
|
||||
WhereFn,
|
||||
};
|
||||
|
@ -25,6 +21,8 @@ use errors::{
|
|||
Result,
|
||||
};
|
||||
|
||||
use Known;
|
||||
|
||||
/// Application of `where` functions.
|
||||
impl ConjoiningClauses {
|
||||
/// There are several kinds of functions binding variables in our Datalog:
|
||||
|
@ -33,12 +31,12 @@ impl ConjoiningClauses {
|
|||
/// - In the future, some functions that are implemented via function calls in SQLite.
|
||||
///
|
||||
/// At present we have implemented only a limited selection of functions.
|
||||
pub fn apply_where_fn<'s>(&mut self, schema: &'s Schema, where_fn: WhereFn) -> Result<()> {
|
||||
pub fn apply_where_fn(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
|
||||
// Because we'll be growing the set of built-in functions, handling each differently, and
|
||||
// ultimately allowing user-specified functions, we match on the function name first.
|
||||
match where_fn.operator.0.as_str() {
|
||||
"fulltext" => self.apply_fulltext(schema, where_fn),
|
||||
"ground" => self.apply_ground(schema, where_fn),
|
||||
"fulltext" => self.apply_fulltext(known, where_fn),
|
||||
"ground" => self.apply_ground(known, where_fn),
|
||||
_ => bail!(ErrorKind::UnknownFunction(where_fn.operator.clone())),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,8 @@ mod validate;
|
|||
mod clauses;
|
||||
|
||||
use mentat_core::{
|
||||
CachedAttributes,
|
||||
Entid,
|
||||
Schema,
|
||||
TypedValue,
|
||||
ValueType,
|
||||
|
@ -59,6 +61,68 @@ pub use types::{
|
|||
EmptyBecause,
|
||||
};
|
||||
|
||||
/// A convenience wrapper around things known in memory: the schema and caches.
|
||||
/// We use a trait object here to avoid making dozens of functions generic over the type
|
||||
/// of the cache. If performance becomes a concern, we should hard-code specific kinds of
|
||||
/// cache right here, and/or eliminate the Option.
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Known<'s, 'c> {
|
||||
pub schema: &'s Schema,
|
||||
pub cache: Option<&'c CachedAttributes>,
|
||||
}
|
||||
|
||||
impl<'s, 'c> Known<'s, 'c> {
|
||||
pub fn for_schema(s: &'s Schema) -> Known<'s, 'static> {
|
||||
Known {
|
||||
schema: s,
|
||||
cache: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(s: &'s Schema, c: Option<&'c CachedAttributes>) -> Known<'s, 'c> {
|
||||
Known {
|
||||
schema: s,
|
||||
cache: c,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This is `CachedAttributes`, but with handy generic parameters.
|
||||
/// Why not make the trait generic? Because then we can't use it as a trait object in `Known`.
|
||||
impl<'s, 'c> Known<'s, 'c> {
|
||||
pub fn is_attribute_cached_reverse<U>(&self, entid: U) -> bool where U: Into<Entid> {
|
||||
self.cache
|
||||
.map(|cache| cache.is_attribute_cached_reverse(entid.into()))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_attribute_cached_forward<U>(&self, entid: U) -> bool where U: Into<Entid> {
|
||||
self.cache
|
||||
.map(|cache| cache.is_attribute_cached_forward(entid.into()))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn get_values_for_entid<U, V>(&self, schema: &Schema, attribute: U, entid: V) -> Option<&Vec<TypedValue>>
|
||||
where U: Into<Entid>, V: Into<Entid> {
|
||||
self.cache.and_then(|cache| cache.get_values_for_entid(schema, attribute.into(), entid.into()))
|
||||
}
|
||||
|
||||
pub fn get_value_for_entid<U, V>(&self, schema: &Schema, attribute: U, entid: V) -> Option<&TypedValue>
|
||||
where U: Into<Entid>, V: Into<Entid> {
|
||||
self.cache.and_then(|cache| cache.get_value_for_entid(schema, attribute.into(), entid.into()))
|
||||
}
|
||||
|
||||
pub fn get_entid_for_value<U>(&self, attribute: U, value: &TypedValue) -> Option<Entid>
|
||||
where U: Into<Entid> {
|
||||
self.cache.and_then(|cache| cache.get_entid_for_value(attribute.into(), value))
|
||||
}
|
||||
|
||||
pub fn get_entids_for_value<U>(&self, attribute: U, value: &TypedValue) -> Option<&BTreeSet<Entid>>
|
||||
where U: Into<Entid> {
|
||||
self.cache.and_then(|cache| cache.get_entids_for_value(attribute.into(), value))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AlgebraicQuery {
|
||||
default_source: SrcVar,
|
||||
|
@ -83,12 +147,12 @@ impl AlgebraicQuery {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn algebrize_with_counter(schema: &Schema, parsed: FindQuery, counter: usize) -> Result<AlgebraicQuery> {
|
||||
algebrize_with_inputs(schema, parsed, counter, QueryInputs::default())
|
||||
pub fn algebrize_with_counter(known: Known, parsed: FindQuery, counter: usize) -> Result<AlgebraicQuery> {
|
||||
algebrize_with_inputs(known, parsed, counter, QueryInputs::default())
|
||||
}
|
||||
|
||||
pub fn algebrize(schema: &Schema, parsed: FindQuery) -> Result<AlgebraicQuery> {
|
||||
algebrize_with_inputs(schema, parsed, 0, QueryInputs::default())
|
||||
pub fn algebrize(known: Known, parsed: FindQuery) -> Result<AlgebraicQuery> {
|
||||
algebrize_with_inputs(known, parsed, 0, QueryInputs::default())
|
||||
}
|
||||
|
||||
/// Take an ordering list. Any variables that aren't fixed by the query are used to produce
|
||||
|
@ -166,7 +230,7 @@ fn simplify_limit(mut query: AlgebraicQuery) -> Result<AlgebraicQuery> {
|
|||
Ok(query)
|
||||
}
|
||||
|
||||
pub fn algebrize_with_inputs(schema: &Schema,
|
||||
pub fn algebrize_with_inputs(known: Known,
|
||||
parsed: FindQuery,
|
||||
counter: usize,
|
||||
inputs: QueryInputs) -> Result<AlgebraicQuery> {
|
||||
|
@ -180,7 +244,7 @@ pub fn algebrize_with_inputs(schema: &Schema,
|
|||
|
||||
// TODO: integrate default source into pattern processing.
|
||||
// TODO: flesh out the rest of find-into-context.
|
||||
cc.apply_clauses(schema, parsed.where_clauses)?;
|
||||
cc.apply_clauses(known, parsed.where_clauses)?;
|
||||
|
||||
cc.expand_column_bindings();
|
||||
cc.prune_extracted_types();
|
||||
|
|
|
@ -15,6 +15,8 @@ use std::fmt::{
|
|||
Result,
|
||||
};
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use mentat_core::{
|
||||
Entid,
|
||||
TypedValue,
|
||||
|
@ -26,6 +28,7 @@ use mentat_query::{
|
|||
Direction,
|
||||
NamespacedKeyword,
|
||||
Order,
|
||||
SrcVar,
|
||||
Variable,
|
||||
};
|
||||
|
||||
|
@ -489,6 +492,8 @@ impl Debug for ColumnConstraint {
|
|||
|
||||
#[derive(PartialEq, Clone)]
|
||||
pub enum EmptyBecause {
|
||||
CachedAttributeHasNoValues { entity: Entid, attr: Entid },
|
||||
CachedAttributeHasNoEntity { value: TypedValue, attr: Entid },
|
||||
ConflictingBindings { var: Variable, existing: TypedValue, desired: TypedValue },
|
||||
|
||||
// A variable is known to be of two conflicting sets of types.
|
||||
|
@ -501,6 +506,7 @@ pub enum EmptyBecause {
|
|||
NonInstantArgument,
|
||||
NonNumericArgument,
|
||||
NonStringFulltextValue,
|
||||
NonFulltextAttribute(Entid),
|
||||
UnresolvedIdent(NamespacedKeyword),
|
||||
InvalidAttributeIdent(NamespacedKeyword),
|
||||
InvalidAttributeEntid(Entid),
|
||||
|
@ -513,6 +519,12 @@ impl Debug for EmptyBecause {
|
|||
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
|
||||
use self::EmptyBecause::*;
|
||||
match self {
|
||||
&CachedAttributeHasNoEntity { ref value, ref attr } => {
|
||||
write!(f, "(?e, {}, {:?}, _) not present in store", attr, value)
|
||||
},
|
||||
&CachedAttributeHasNoValues { ref entity, ref attr } => {
|
||||
write!(f, "({}, {}, ?v, _) not present in store", entity, attr)
|
||||
},
|
||||
&ConflictingBindings { ref var, ref existing, ref desired } => {
|
||||
write!(f, "Var {:?} can't be {:?} because it's already bound to {:?}",
|
||||
var, desired, existing)
|
||||
|
@ -549,6 +561,9 @@ impl Debug for EmptyBecause {
|
|||
&InvalidAttributeEntid(entid) => {
|
||||
write!(f, "{} is not an attribute", entid)
|
||||
},
|
||||
&NonFulltextAttribute(entid) => {
|
||||
write!(f, "{} is not a fulltext attribute", entid)
|
||||
},
|
||||
&InvalidBinding(ref column, ref tv) => {
|
||||
write!(f, "{:?} cannot name column {:?}", tv, column)
|
||||
},
|
||||
|
@ -562,3 +577,52 @@ impl Debug for EmptyBecause {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Intermediate data structures for resolving patterns.
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum EvolvedNonValuePlace {
|
||||
Placeholder,
|
||||
Variable(Variable),
|
||||
Entid(Entid), // Will always be +ve. See #190.
|
||||
}
|
||||
|
||||
// TODO: some of these aren't necessary?
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum EvolvedValuePlace {
|
||||
Placeholder,
|
||||
Variable(Variable),
|
||||
Entid(Entid),
|
||||
Value(TypedValue),
|
||||
EntidOrInteger(i64),
|
||||
IdentOrKeyword(Rc<NamespacedKeyword>),
|
||||
}
|
||||
|
||||
pub enum PlaceOrEmpty<T> {
|
||||
Place(T),
|
||||
Empty(EmptyBecause),
|
||||
}
|
||||
|
||||
impl<T> PlaceOrEmpty<T> {
|
||||
pub fn and_then<U, F: FnOnce(T) -> PlaceOrEmpty<U>>(self, f: F) -> PlaceOrEmpty<U> {
|
||||
match self {
|
||||
PlaceOrEmpty::Place(x) => f(x),
|
||||
PlaceOrEmpty::Empty(e) => PlaceOrEmpty::Empty(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn then<F: FnOnce(T)>(self, f: F) {
|
||||
match self {
|
||||
PlaceOrEmpty::Place(x) => f(x),
|
||||
PlaceOrEmpty::Empty(_e) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EvolvedPattern {
|
||||
pub source: SrcVar,
|
||||
pub entity: EvolvedNonValuePlace,
|
||||
pub attribute: EvolvedNonValuePlace,
|
||||
pub value: EvolvedValuePlace,
|
||||
pub tx: EvolvedNonValuePlace,
|
||||
}
|
||||
|
|
|
@ -31,6 +31,8 @@ use utils::{
|
|||
associate_ident,
|
||||
};
|
||||
|
||||
use mentat_query_algebrizer::Known;
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
let mut schema = Schema::default();
|
||||
associate_ident(&mut schema, NamespacedKeyword::new("foo", "name"), 65);
|
||||
|
@ -71,15 +73,16 @@ fn prepopulated_schema() -> Schema {
|
|||
#[test]
|
||||
fn test_apply_fulltext() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
// If you use a non-FTS attribute, we will short-circuit.
|
||||
let query = r#"[:find ?val
|
||||
:where [(fulltext $ :foo/name "hello") [[?entity ?val _ _]]]]"#;
|
||||
assert!(alg(&schema, query).is_known_empty());
|
||||
assert!(alg(known, query).is_known_empty());
|
||||
|
||||
// If you get a type mismatch, we will short-circuit.
|
||||
let query = r#"[:find ?val
|
||||
:where [(fulltext $ :foo/description "hello") [[?entity ?val ?tx ?score]]]
|
||||
[?score :foo/bar _]]"#;
|
||||
assert!(alg(&schema, query).is_known_empty());
|
||||
assert!(alg(known, query).is_known_empty());
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ use mentat_query_algebrizer::{
|
|||
ComputedTable,
|
||||
Error,
|
||||
ErrorKind,
|
||||
Known,
|
||||
QueryInputs,
|
||||
};
|
||||
|
||||
|
@ -87,7 +88,8 @@ fn test_ground_doesnt_bail_for_type_conflicts() {
|
|||
// The query can return no results.
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground 9.95) ?x]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -95,7 +97,8 @@ fn test_ground_doesnt_bail_for_type_conflicts() {
|
|||
fn test_ground_tuple_fails_impossible() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [5 9.95]) [?x ?p]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -103,7 +106,8 @@ fn test_ground_tuple_fails_impossible() {
|
|||
fn test_ground_scalar_fails_impossible() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground true) ?p]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -113,7 +117,8 @@ fn test_ground_coll_skips_impossible() {
|
|||
// The query can return no results.
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [5 9.95 11]) [?x ...]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.computed_tables[0], ComputedTable::NamedValues {
|
||||
names: vec![Variable::from_valid_name("?x")],
|
||||
|
@ -125,7 +130,8 @@ fn test_ground_coll_skips_impossible() {
|
|||
fn test_ground_coll_fails_if_all_impossible() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [5.1 5.2]) [?p ...]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -133,7 +139,8 @@ fn test_ground_coll_fails_if_all_impossible() {
|
|||
fn test_ground_rel_skips_impossible() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [[8 "foo"] [5 7] [9.95 9] [11 12]]) [[?x ?p]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.computed_tables[0], ComputedTable::NamedValues {
|
||||
names: vec![Variable::from_valid_name("?x"), Variable::from_valid_name("?p")],
|
||||
|
@ -145,7 +152,8 @@ fn test_ground_rel_skips_impossible() {
|
|||
fn test_ground_rel_fails_if_all_impossible() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [[11 5.1] [12 5.2]]) [[?x ?p]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -153,21 +161,24 @@ fn test_ground_rel_fails_if_all_impossible() {
|
|||
fn test_ground_tuple_rejects_all_placeholders() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [8 "foo" 3]) [_ _ _]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
bails(known, &q);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ground_rel_rejects_all_placeholders() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [[8 "foo"]]) [[_ _]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
bails(known, &q);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ground_tuple_placeholders() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [8 "foo" 3]) [?x _ ?p]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.bound_value(&Variable::from_valid_name("?x")), Some(TypedValue::Ref(8)));
|
||||
assert_eq!(cc.bound_value(&Variable::from_valid_name("?p")), Some(TypedValue::Ref(3)));
|
||||
|
@ -177,7 +188,8 @@ fn test_ground_tuple_placeholders() {
|
|||
fn test_ground_rel_placeholders() {
|
||||
let q = r#"[:find ?x :where [?x :foo/knows ?p] [(ground [[8 "foo" 3] [5 false 7] [5 9.95 9]]) [[?x _ ?p]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.computed_tables[0], ComputedTable::NamedValues {
|
||||
names: vec![Variable::from_valid_name("?x"), Variable::from_valid_name("?p")],
|
||||
|
@ -197,7 +209,8 @@ fn test_ground_rel_placeholders() {
|
|||
fn test_multiple_reference_type_failure() {
|
||||
let q = r#"[:find ?x :where [?x :foo/age ?y] [?x :foo/knows ?y]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
|
@ -205,7 +218,8 @@ fn test_multiple_reference_type_failure() {
|
|||
fn test_ground_tuple_infers_types() {
|
||||
let q = r#"[:find ?x :where [?x :foo/age ?v] [(ground [8 10]) [?x ?v]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.bound_value(&Variable::from_valid_name("?x")), Some(TypedValue::Ref(8)));
|
||||
assert_eq!(cc.bound_value(&Variable::from_valid_name("?v")), Some(TypedValue::Long(10)));
|
||||
|
@ -215,7 +229,8 @@ fn test_ground_tuple_infers_types() {
|
|||
fn test_ground_rel_infers_types() {
|
||||
let q = r#"[:find ?x :where [?x :foo/age ?v] [(ground [[8 10]]) [[?x ?v]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let cc = alg(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_none());
|
||||
assert_eq!(cc.computed_tables[0], ComputedTable::NamedValues {
|
||||
names: vec![Variable::from_valid_name("?x"), Variable::from_valid_name("?v")],
|
||||
|
@ -227,7 +242,8 @@ fn test_ground_rel_infers_types() {
|
|||
fn test_ground_coll_heterogeneous_types() {
|
||||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [false 8.5]) [?v ...]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let e = bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let e = bails(known, &q);
|
||||
match e {
|
||||
Error(ErrorKind::InvalidGroundConstant, _) => {
|
||||
},
|
||||
|
@ -241,7 +257,8 @@ fn test_ground_coll_heterogeneous_types() {
|
|||
fn test_ground_rel_heterogeneous_types() {
|
||||
let q = r#"[:find ?x :where [?x _ ?v] [(ground [[false] [5]]) [[?v]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let e = bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let e = bails(known, &q);
|
||||
match e {
|
||||
Error(ErrorKind::InvalidGroundConstant, _) => {
|
||||
},
|
||||
|
@ -255,7 +272,8 @@ fn test_ground_rel_heterogeneous_types() {
|
|||
fn test_ground_tuple_duplicate_vars() {
|
||||
let q = r#"[:find ?x :where [?x :foo/age ?v] [(ground [8 10]) [?x ?x]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let e = bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let e = bails(known, &q);
|
||||
match e {
|
||||
Error(ErrorKind::InvalidBinding(v, e), _) => {
|
||||
assert_eq!(v, PlainSymbol::new("ground"));
|
||||
|
@ -271,7 +289,8 @@ fn test_ground_tuple_duplicate_vars() {
|
|||
fn test_ground_rel_duplicate_vars() {
|
||||
let q = r#"[:find ?x :where [?x :foo/age ?v] [(ground [[8 10]]) [[?x ?x]]]]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let e = bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let e = bails(known, &q);
|
||||
match e {
|
||||
Error(ErrorKind::InvalidBinding(v, e), _) => {
|
||||
assert_eq!(v, PlainSymbol::new("ground"));
|
||||
|
@ -287,7 +306,8 @@ fn test_ground_rel_duplicate_vars() {
|
|||
fn test_ground_nonexistent_variable_invalid() {
|
||||
let q = r#"[:find ?x ?e :where [?e _ ?x] (not [(ground 17) ?v])]"#;
|
||||
let schema = prepopulated_schema();
|
||||
let e = bails(&schema, &q);
|
||||
let known = Known::for_schema(&schema);
|
||||
let e = bails(known, &q);
|
||||
match e {
|
||||
Error(ErrorKind::UnboundVariable(PlainSymbol(v)), _) => {
|
||||
assert_eq!(v, "?v".to_string());
|
||||
|
@ -301,6 +321,7 @@ fn test_ground_nonexistent_variable_invalid() {
|
|||
#[test]
|
||||
fn test_unbound_input_variable_invalid() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let q = r#"[:find ?y ?age :in ?x :where [(ground [?x]) [?y ...]] [?y :foo/age ?age]]"#;
|
||||
|
||||
// This fails even if we know the type: we don't support grounding bindings
|
||||
|
@ -310,7 +331,7 @@ fn test_unbound_input_variable_invalid() {
|
|||
|
||||
let i = QueryInputs::new(types, BTreeMap::default()).expect("valid QueryInputs");
|
||||
|
||||
let e = bails_with_inputs(&schema, &q, i);
|
||||
let e = bails_with_inputs(known, &q, i);
|
||||
match e {
|
||||
Error(ErrorKind::UnboundVariable(v), _) => {
|
||||
assert_eq!(v.0, "?x");
|
||||
|
|
|
@ -31,6 +31,7 @@ use mentat_query::{
|
|||
use mentat_query_algebrizer::{
|
||||
EmptyBecause,
|
||||
ErrorKind,
|
||||
Known,
|
||||
};
|
||||
|
||||
use utils::{
|
||||
|
@ -60,13 +61,14 @@ fn prepopulated_schema() -> Schema {
|
|||
#[test]
|
||||
fn test_instant_predicates_require_instants() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
// You can't use a string for an inequality: this is a straight-up error.
|
||||
let query = r#"[:find ?e
|
||||
:where
|
||||
[?e :foo/date ?t]
|
||||
[(> ?t "2017-06-16T00:56:41.257Z")]]"#;
|
||||
match bails(&schema, query).0 {
|
||||
match bails(known, query).0 {
|
||||
ErrorKind::InvalidArgument(op, why, idx) => {
|
||||
assert_eq!(op, PlainSymbol::new(">"));
|
||||
assert_eq!(why, "numeric or instant");
|
||||
|
@ -79,7 +81,7 @@ fn test_instant_predicates_require_instants() {
|
|||
:where
|
||||
[?e :foo/date ?t]
|
||||
[(> "2017-06-16T00:56:41.257Z", ?t)]]"#;
|
||||
match bails(&schema, query).0 {
|
||||
match bails(known, query).0 {
|
||||
ErrorKind::InvalidArgument(op, why, idx) => {
|
||||
assert_eq!(op, PlainSymbol::new(">"));
|
||||
assert_eq!(why, "numeric or instant");
|
||||
|
@ -95,7 +97,7 @@ fn test_instant_predicates_require_instants() {
|
|||
:where
|
||||
[?e :foo/date ?t]
|
||||
[(> ?t 1234512345)]]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
assert!(cc.is_known_empty());
|
||||
assert_eq!(cc.empty_because.unwrap(),
|
||||
EmptyBecause::TypeMismatch {
|
||||
|
@ -109,7 +111,7 @@ fn test_instant_predicates_require_instants() {
|
|||
:where
|
||||
[?e :foo/double ?t]
|
||||
[(< ?t 1234512345)]]"#;
|
||||
let cc = alg(&schema, query);
|
||||
let cc = alg(known, query);
|
||||
assert!(!cc.is_known_empty());
|
||||
assert_eq!(cc.known_type(&Variable::from_valid_name("?t")).expect("?t is known"),
|
||||
ValueType::Double);
|
||||
|
|
|
@ -26,6 +26,8 @@ use mentat_core::{
|
|||
ValueType,
|
||||
};
|
||||
|
||||
use mentat_query_algebrizer::Known;
|
||||
|
||||
fn prepopulated_schema() -> Schema {
|
||||
SchemaBuilder::new()
|
||||
.define_simple_attr("test", "boolean", ValueType::Boolean, false)
|
||||
|
@ -52,12 +54,13 @@ fn test_empty_known() {
|
|||
"ref",
|
||||
];
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
for known_type in type_names.iter() {
|
||||
for required in type_names.iter() {
|
||||
let q = format!("[:find ?e :where [?e :test/{} ?v] [({} ?v)]]",
|
||||
known_type, required);
|
||||
println!("Query: {}", q);
|
||||
let cc = alg(&schema, &q);
|
||||
let cc = alg(known, &q);
|
||||
// It should only be empty if the known type and our requirement differ.
|
||||
assert_eq!(cc.empty_because.is_some(), known_type != required,
|
||||
"known_type = {}; required = {}", known_type, required);
|
||||
|
@ -68,13 +71,15 @@ fn test_empty_known() {
|
|||
#[test]
|
||||
fn test_multiple() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
let q = "[:find ?e :where [?e _ ?v] [(long ?v)] [(double ?v)]]";
|
||||
let cc = alg(&schema, &q);
|
||||
let cc = alg(known, &q);
|
||||
assert!(cc.empty_because.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unbound() {
|
||||
let schema = prepopulated_schema();
|
||||
bails(&schema, "[:find ?e :where [(string ?e)]]");
|
||||
let known = Known::for_schema(&schema);
|
||||
bails(known, "[:find ?e :where [(string ?e)]]");
|
||||
}
|
||||
|
|
|
@ -29,11 +29,12 @@ use mentat_query::{
|
|||
};
|
||||
|
||||
use mentat_query_algebrizer::{
|
||||
algebrize,
|
||||
algebrize_with_inputs,
|
||||
ConjoiningClauses,
|
||||
Error,
|
||||
Known,
|
||||
QueryInputs,
|
||||
algebrize,
|
||||
algebrize_with_inputs,
|
||||
};
|
||||
|
||||
// Common utility functions used in multiple test files.
|
||||
|
@ -83,17 +84,17 @@ impl SchemaBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn bails(schema: &Schema, input: &str) -> Error {
|
||||
pub fn bails(known: Known, input: &str) -> Error {
|
||||
let parsed = parse_find_string(input).expect("query input to have parsed");
|
||||
algebrize(schema.into(), parsed).expect_err("algebrize to have failed")
|
||||
algebrize(known, parsed).expect_err("algebrize to have failed")
|
||||
}
|
||||
|
||||
pub fn bails_with_inputs(schema: &Schema, input: &str, inputs: QueryInputs) -> Error {
|
||||
pub fn bails_with_inputs(known: Known, input: &str, inputs: QueryInputs) -> Error {
|
||||
let parsed = parse_find_string(input).expect("query input to have parsed");
|
||||
algebrize_with_inputs(schema, parsed, 0, inputs).expect_err("algebrize to have failed")
|
||||
algebrize_with_inputs(known, parsed, 0, inputs).expect_err("algebrize to have failed")
|
||||
}
|
||||
|
||||
pub fn alg(schema: &Schema, input: &str) -> ConjoiningClauses {
|
||||
pub fn alg(known: Known, input: &str) -> ConjoiningClauses {
|
||||
let parsed = parse_find_string(input).expect("query input to have parsed");
|
||||
algebrize(schema.into(), parsed).expect("algebrizing to have succeeded").cc
|
||||
algebrize(known, parsed).expect("algebrizing to have succeeded").cc
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ use mentat_core::{
|
|||
|
||||
use mentat_query_parser::parse_find_string;
|
||||
use mentat_query_algebrizer::{
|
||||
Known,
|
||||
QueryInputs,
|
||||
algebrize,
|
||||
algebrize_with_inputs,
|
||||
|
@ -54,8 +55,9 @@ fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
|
|||
}
|
||||
|
||||
fn translate_with_inputs(schema: &Schema, query: &'static str, inputs: QueryInputs) -> SQLQuery {
|
||||
let known = Known::for_schema(schema);
|
||||
let parsed = parse_find_string(query).expect("parse to succeed");
|
||||
let algebrized = algebrize_with_inputs(schema, parsed, 0, inputs).expect("algebrize to succeed");
|
||||
let algebrized = algebrize_with_inputs(known, parsed, 0, inputs).expect("algebrize to succeed");
|
||||
let select = query_to_select(algebrized).expect("translate to succeed");
|
||||
select.query.to_sql_query().unwrap()
|
||||
}
|
||||
|
@ -182,10 +184,11 @@ fn test_bound_variable_limit_affects_distinct() {
|
|||
#[test]
|
||||
fn test_bound_variable_limit_affects_types() {
|
||||
let schema = prepopulated_schema();
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
let query = r#"[:find ?x ?limit :in ?limit :where [?x _ ?limit] :limit ?limit]"#;
|
||||
let parsed = parse_find_string(query).expect("parse failed");
|
||||
let algebrized = algebrize(&schema, parsed).expect("algebrize failed");
|
||||
let algebrized = algebrize(known, parsed).expect("algebrize failed");
|
||||
|
||||
// The type is known.
|
||||
assert_eq!(Some(ValueType::Long),
|
||||
|
@ -272,10 +275,11 @@ fn test_unknown_attribute_integer_value() {
|
|||
#[test]
|
||||
fn test_unknown_ident() {
|
||||
let schema = Schema::default();
|
||||
let known = Known::for_schema(&schema);
|
||||
|
||||
let impossible = r#"[:find ?x :where [?x :db/ident :no/exist]]"#;
|
||||
let parsed = parse_find_string(impossible).expect("parse failed");
|
||||
let algebrized = algebrize(&schema, parsed).expect("algebrize failed");
|
||||
let algebrized = algebrize(known, parsed).expect("algebrize failed");
|
||||
|
||||
// This query cannot return results: the ident doesn't resolve for a ref-typed attribute.
|
||||
assert!(algebrized.is_known_empty());
|
||||
|
|
221
src/cache.rs
221
src/cache.rs
|
@ -1,221 +0,0 @@
|
|||
// Copyright 2016 Mozilla
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
// this file except in compliance with the License. You may obtain a copy of the
|
||||
// License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed
|
||||
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use rusqlite;
|
||||
|
||||
use mentat_core::{
|
||||
Entid,
|
||||
TypedValue,
|
||||
};
|
||||
|
||||
use mentat_db::cache::{
|
||||
AttributeValueProvider,
|
||||
Cacheable,
|
||||
EagerCache,
|
||||
CacheMap,
|
||||
};
|
||||
|
||||
use errors::{
|
||||
Result,
|
||||
};
|
||||
|
||||
pub enum CacheAction {
|
||||
Register,
|
||||
Deregister,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AttributeCacher {
|
||||
a_e_vs_cache: BTreeMap<Entid, EagerCache<Entid, Vec<TypedValue>, AttributeValueProvider>>, // values keyed by attribute
|
||||
}
|
||||
|
||||
impl AttributeCacher {
|
||||
|
||||
pub fn new() -> Self {
|
||||
AttributeCacher {
|
||||
a_e_vs_cache: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_attribute<'sqlite>(&mut self, sqlite: &'sqlite rusqlite::Connection, attribute: Entid) -> Result<()> {
|
||||
let value_provider = AttributeValueProvider{ attribute: attribute };
|
||||
let mut cacher = EagerCache::new(value_provider);
|
||||
cacher.cache_values(sqlite)?;
|
||||
self.a_e_vs_cache.insert(attribute, cacher);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn deregister_attribute(&mut self, attribute: &Entid) -> Option<CacheMap<Entid, Vec<TypedValue>>> {
|
||||
self.a_e_vs_cache.remove(&attribute).map(|m| m.cache)
|
||||
}
|
||||
|
||||
pub fn get(&self, attribute: &Entid) -> Option<&CacheMap<Entid, Vec<TypedValue>>> {
|
||||
self.a_e_vs_cache.get( &attribute ).map(|m| &m.cache)
|
||||
}
|
||||
|
||||
pub fn get_values_for_entid(&self, attribute: &Entid, entid: &Entid) -> Option<&Vec<TypedValue>> {
|
||||
self.a_e_vs_cache.get(&attribute).and_then(|c| c.get(&entid))
|
||||
}
|
||||
|
||||
pub fn get_value_for_entid(&self, attribute: &Entid, entid: &Entid) -> Option<&TypedValue> {
|
||||
self.get_values_for_entid(attribute, entid).and_then(|c| c.first())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::rc::Rc;
|
||||
use mentat_core::{
|
||||
HasSchema,
|
||||
KnownEntid,
|
||||
};
|
||||
use mentat_db::db;
|
||||
use mentat_db::types::TypedValue;
|
||||
|
||||
use conn::Conn;
|
||||
|
||||
fn populate_db() -> (Conn, rusqlite::Connection) {
|
||||
let mut sqlite = db::new_connection("").unwrap();
|
||||
let mut conn = Conn::connect(&mut sqlite).unwrap();
|
||||
let _report = conn.transact(&mut sqlite, r#"[
|
||||
{ :db/ident :foo/bar
|
||||
:db/valueType :db.type/long
|
||||
:db/cardinality :db.cardinality/one },
|
||||
{ :db/ident :foo/baz
|
||||
:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one },
|
||||
{ :db/ident :foo/bap
|
||||
:db/valueType :db.type/string
|
||||
:db/cardinality :db.cardinality/many}]"#).expect("transaction expected to succeed");
|
||||
let _report = conn.transact(&mut sqlite, r#"[
|
||||
{ :foo/bar 100
|
||||
:foo/baz false
|
||||
:foo/bap ["one","two","buckle my shoe"] },
|
||||
{ :foo/bar 200
|
||||
:foo/baz true
|
||||
:foo/bap ["three", "four", "knock at my door"] }]"#).expect("transaction expected to succeed");
|
||||
(conn, sqlite)
|
||||
}
|
||||
|
||||
fn assert_values_present_for_attribute(attribute_cache: &mut AttributeCacher, attribute: &KnownEntid, values: Vec<Vec<TypedValue>>) {
|
||||
let cached_values: Vec<Vec<TypedValue>> = attribute_cache.get(&attribute.0)
|
||||
.expect("Expected cached values")
|
||||
.values()
|
||||
.cloned()
|
||||
.collect();
|
||||
assert_eq!(cached_values, values);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_to_cache() {
|
||||
let (conn, sqlite) = populate_db();
|
||||
let schema = conn.current_schema();
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
let kw = kw!(:foo/bar);
|
||||
let entid = schema.get_entid(&kw).expect("Expected entid for attribute");
|
||||
attribute_cache.register_attribute(&sqlite, entid.0.clone() ).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&mut attribute_cache, &entid, vec![vec![TypedValue::Long(100)], vec![TypedValue::Long(200)]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_attribute_already_in_cache() {
|
||||
let (conn, mut sqlite) = populate_db();
|
||||
let schema = conn.current_schema();
|
||||
|
||||
let kw = kw!(:foo/bar);
|
||||
let entid = schema.get_entid(&kw).expect("Expected entid for attribute");
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
|
||||
attribute_cache.register_attribute(&mut sqlite, entid.0.clone()).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&mut attribute_cache, &entid, vec![vec![TypedValue::Long(100)], vec![TypedValue::Long(200)]]);
|
||||
attribute_cache.register_attribute(&mut sqlite, entid.0.clone()).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&mut attribute_cache, &entid, vec![vec![TypedValue::Long(100)], vec![TypedValue::Long(200)]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_from_cache() {
|
||||
let (conn, mut sqlite) = populate_db();
|
||||
let schema = conn.current_schema();
|
||||
|
||||
let kwr = kw!(:foo/bar);
|
||||
let entidr = schema.get_entid(&kwr).expect("Expected entid for attribute");
|
||||
let kwz = kw!(:foo/baz);
|
||||
let entidz = schema.get_entid(&kwz).expect("Expected entid for attribute");
|
||||
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
|
||||
attribute_cache.register_attribute(&mut sqlite, entidr.0.clone()).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&mut attribute_cache, &entidr, vec![vec![TypedValue::Long(100)], vec![TypedValue::Long(200)]]);
|
||||
attribute_cache.register_attribute(&mut sqlite, entidz.0.clone()).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&mut attribute_cache, &entidz, vec![vec![TypedValue::Boolean(false)], vec![TypedValue::Boolean(true)]]);
|
||||
|
||||
// test that we can remove an item from cache
|
||||
attribute_cache.deregister_attribute(&entidz.0).expect("No errors on remove from cache");
|
||||
assert_eq!(attribute_cache.get(&entidz.0), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_attribute_not_in_cache() {
|
||||
let (conn, _sqlite) = populate_db();
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
|
||||
let schema = conn.current_schema();
|
||||
let kw = kw!(:foo/baz);
|
||||
let entid = schema.get_entid(&kw).expect("Expected entid for attribute").0;
|
||||
assert_eq!(None, attribute_cache.deregister_attribute(&entid));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_attribute_value_for_entid() {
|
||||
let (conn, mut sqlite) = populate_db();
|
||||
let schema = conn.current_schema();
|
||||
|
||||
let entities = conn.q_once(&sqlite, r#"[:find ?e . :where [?e :foo/bar 100]]"#, None).expect("Expected query to work").into_scalar().expect("expected scalar results");
|
||||
let entid = match entities {
|
||||
Some(TypedValue::Ref(entid)) => entid,
|
||||
x => panic!("expected Some(Ref), got {:?}", x),
|
||||
};
|
||||
|
||||
let kwr = kw!(:foo/bar);
|
||||
let attr_entid = schema.get_entid(&kwr).expect("Expected entid for attribute").0;
|
||||
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
|
||||
attribute_cache.register_attribute(&mut sqlite, attr_entid.clone()).expect("No errors on add to cache");
|
||||
let val = attribute_cache.get_value_for_entid(&attr_entid, &entid).expect("Expected value");
|
||||
assert_eq!(*val, TypedValue::Long(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_attribute_values_for_entid() {
|
||||
let (conn, mut sqlite) = populate_db();
|
||||
let schema = conn.current_schema();
|
||||
|
||||
let entities = conn.q_once(&sqlite, r#"[:find ?e . :where [?e :foo/bar 100]]"#, None).expect("Expected query to work").into_scalar().expect("expected scalar results");
|
||||
let entid = match entities {
|
||||
Some(TypedValue::Ref(entid)) => entid,
|
||||
x => panic!("expected Some(Ref), got {:?}", x),
|
||||
};
|
||||
|
||||
let kwp = kw!(:foo/bap);
|
||||
let attr_entid = schema.get_entid(&kwp).expect("Expected entid for attribute").0;
|
||||
|
||||
let mut attribute_cache = AttributeCacher::new();
|
||||
|
||||
attribute_cache.register_attribute(&mut sqlite, attr_entid.clone()).expect("No errors on add to cache");
|
||||
let val = attribute_cache.get_values_for_entid(&attr_entid, &entid).expect("Expected value");
|
||||
assert_eq!(*val, vec![TypedValue::String(Rc::new("buckle my shoe".to_string())), TypedValue::String(Rc::new("one".to_string())), TypedValue::String(Rc::new("two".to_string()))]);
|
||||
}
|
||||
}
|
||||
|
||||
|
174
src/conn.rs
174
src/conn.rs
|
@ -42,6 +42,7 @@ use mentat_core::{
|
|||
|
||||
use mentat_core::intern_set::InternSet;
|
||||
|
||||
use mentat_db::cache::SQLiteAttributeCache;
|
||||
use mentat_db::db;
|
||||
use mentat_db::{
|
||||
transact,
|
||||
|
@ -61,13 +62,6 @@ use mentat_tx_parser;
|
|||
use mentat_tolstoy::Syncer;
|
||||
|
||||
use uuid::Uuid;
|
||||
use cache::{
|
||||
AttributeCacher,
|
||||
};
|
||||
|
||||
pub use cache::{
|
||||
CacheAction,
|
||||
};
|
||||
|
||||
use entity_builder::{
|
||||
InProgressBuilder,
|
||||
|
@ -76,15 +70,17 @@ use entity_builder::{
|
|||
use errors::*;
|
||||
|
||||
use query::{
|
||||
lookup_value_for_attribute,
|
||||
lookup_values_for_attribute,
|
||||
Known,
|
||||
PreparedResult,
|
||||
q_once,
|
||||
q_prepare,
|
||||
q_explain,
|
||||
QueryExplanation,
|
||||
QueryInputs,
|
||||
QueryOutput,
|
||||
lookup_value_for_attribute,
|
||||
lookup_values_for_attribute,
|
||||
q_explain,
|
||||
q_once,
|
||||
q_prepare,
|
||||
q_uncached,
|
||||
};
|
||||
|
||||
/// Connection metadata required to query from, or apply transactions to, a Mentat store.
|
||||
|
@ -127,7 +123,7 @@ pub struct Conn {
|
|||
// TODO: maintain cache of query plans that could be shared across threads and invalidated when
|
||||
// the schema changes. #315.
|
||||
|
||||
attribute_cache: RwLock<AttributeCacher>,
|
||||
attribute_cache: RwLock<SQLiteAttributeCache>,
|
||||
}
|
||||
|
||||
/// A convenience wrapper around a single SQLite connection and a Conn. This is suitable
|
||||
|
@ -190,7 +186,9 @@ pub struct InProgress<'a, 'c> {
|
|||
generation: u64,
|
||||
partition_map: PartitionMap,
|
||||
schema: Schema,
|
||||
cache: RwLockWriteGuard<'a, AttributeCacher>,
|
||||
cache: RwLockWriteGuard<'a, SQLiteAttributeCache>,
|
||||
|
||||
use_caching: bool,
|
||||
}
|
||||
|
||||
/// Represents an in-progress set of reads to the store. Just like `InProgress`,
|
||||
|
@ -228,39 +226,50 @@ impl<'a, 'c> Queryable for InProgress<'a, 'c> {
|
|||
fn q_once<T>(&self, query: &str, inputs: T) -> Result<QueryOutput>
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
if self.use_caching {
|
||||
let known = Known::new(&self.schema, Some(&*self.cache));
|
||||
q_once(&*(self.transaction),
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
} else {
|
||||
q_uncached(&*(self.transaction),
|
||||
&self.schema,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
}
|
||||
|
||||
fn q_prepare<T>(&self, query: &str, inputs: T) -> PreparedResult
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
let known = Known::new(&self.schema, Some(&*self.cache));
|
||||
q_prepare(&*(self.transaction),
|
||||
&self.schema,
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
|
||||
fn q_explain<T>(&self, query: &str, inputs: T) -> Result<QueryExplanation>
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
let known = Known::new(&self.schema, Some(&*self.cache));
|
||||
q_explain(&*(self.transaction),
|
||||
&self.schema,
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
|
||||
fn lookup_values_for_attribute<E>(&self, entity: E, attribute: &edn::NamespacedKeyword) -> Result<Vec<TypedValue>>
|
||||
where E: Into<Entid> {
|
||||
let cc = &*self.cache;
|
||||
lookup_values_for_attribute(&*(self.transaction), &self.schema, cc, entity, attribute)
|
||||
let known = Known::new(&self.schema, Some(&*self.cache));
|
||||
lookup_values_for_attribute(&*(self.transaction), known, entity, attribute)
|
||||
}
|
||||
|
||||
fn lookup_value_for_attribute<E>(&self, entity: E, attribute: &edn::NamespacedKeyword) -> Result<Option<TypedValue>>
|
||||
where E: Into<Entid> {
|
||||
let cc = &*self.cache;
|
||||
lookup_value_for_attribute(&*(self.transaction), &self.schema, cc, entity, attribute)
|
||||
let known = Known::new(&self.schema, Some(&*self.cache));
|
||||
lookup_value_for_attribute(&*(self.transaction), known, entity, attribute)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -334,6 +343,11 @@ impl<'a, 'c> InProgress<'a, 'c> {
|
|||
InProgressBuilder::new(self)
|
||||
}
|
||||
|
||||
/// Choose whether to use in-memory caches for running queries.
|
||||
pub fn use_caching(&mut self, yesno: bool) {
|
||||
self.use_caching = yesno;
|
||||
}
|
||||
|
||||
pub fn transact_terms<I>(&mut self, terms: I, tempid_set: InternSet<TempId>) -> Result<TxReport> where I: IntoIterator<Item=TermWithTempIds> {
|
||||
let (report, next_partition_map, next_schema) = transact_terms(&self.transaction,
|
||||
self.partition_map.clone(),
|
||||
|
@ -405,6 +419,13 @@ impl<'a, 'c> InProgress<'a, 'c> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Store {
|
||||
/// Intended for use from tests.
|
||||
pub fn sqlite_mut(&mut self) -> &mut rusqlite::Connection {
|
||||
&mut self.sqlite
|
||||
}
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub fn dismantle(self) -> (rusqlite::Connection, Conn) {
|
||||
(self.sqlite, self.conn)
|
||||
|
@ -421,6 +442,15 @@ impl Store {
|
|||
pub fn begin_transaction<'m>(&'m mut self) -> Result<InProgress<'m, 'm>> {
|
||||
self.conn.begin_transaction(&mut self.sqlite)
|
||||
}
|
||||
|
||||
pub fn cache(&mut self, attr: &NamespacedKeyword, direction: CacheDirection) -> Result<()> {
|
||||
let schema = &self.conn.current_schema();
|
||||
self.conn.cache(&mut self.sqlite,
|
||||
schema,
|
||||
attr,
|
||||
direction,
|
||||
CacheAction::Register)
|
||||
}
|
||||
}
|
||||
|
||||
impl Queryable for Store {
|
||||
|
@ -450,6 +480,19 @@ impl Queryable for Store {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum CacheDirection {
|
||||
Forward,
|
||||
Reverse,
|
||||
Both,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum CacheAction {
|
||||
Register,
|
||||
Deregister,
|
||||
}
|
||||
|
||||
impl Syncable for Store {
|
||||
fn sync(&mut self, server_uri: &String, user_uuid: &String) -> Result<()> {
|
||||
let uuid = Uuid::parse_str(&user_uuid)?;
|
||||
|
@ -462,7 +505,7 @@ impl Conn {
|
|||
fn new(partition_map: PartitionMap, schema: Schema) -> Conn {
|
||||
Conn {
|
||||
metadata: Mutex::new(Metadata::new(0, partition_map, Arc::new(schema))),
|
||||
attribute_cache: RwLock::new(AttributeCacher::new())
|
||||
attribute_cache: Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -500,10 +543,15 @@ impl Conn {
|
|||
self.metadata.lock().unwrap().schema.clone()
|
||||
}
|
||||
|
||||
pub fn attribute_cache<'s>(&'s self) -> RwLockReadGuard<'s, AttributeCacher> {
|
||||
pub fn attribute_cache<'s>(&'s self) -> RwLockReadGuard<'s, SQLiteAttributeCache> {
|
||||
self.attribute_cache.read().unwrap()
|
||||
}
|
||||
|
||||
pub fn attribute_cache_mut<'s>(&'s self) -> RwLockWriteGuard<'s, SQLiteAttributeCache> {
|
||||
self.attribute_cache.write().unwrap()
|
||||
}
|
||||
|
||||
|
||||
/// Query the Mentat store, using the given connection and the current metadata.
|
||||
pub fn q_once<T>(&self,
|
||||
sqlite: &rusqlite::Connection,
|
||||
|
@ -511,8 +559,26 @@ impl Conn {
|
|||
inputs: T) -> Result<QueryOutput>
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
// Doesn't clone, unlike `current_schema`.
|
||||
let metadata = self.metadata.lock().unwrap();
|
||||
let cache = &*self.attribute_cache.read().unwrap();
|
||||
let known = Known::new(&*metadata.schema, Some(cache));
|
||||
q_once(sqlite,
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
|
||||
/// Query the Mentat store, using the given connection and the current metadata,
|
||||
/// but without using the cache.
|
||||
pub fn q_uncached<T>(&self,
|
||||
sqlite: &rusqlite::Connection,
|
||||
query: &str,
|
||||
inputs: T) -> Result<QueryOutput>
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
let metadata = self.metadata.lock().unwrap();
|
||||
q_uncached(sqlite,
|
||||
&*metadata.schema, // Doesn't clone, unlike `current_schema`.
|
||||
query,
|
||||
inputs)
|
||||
|
@ -525,8 +591,10 @@ impl Conn {
|
|||
where T: Into<Option<QueryInputs>> {
|
||||
|
||||
let metadata = self.metadata.lock().unwrap();
|
||||
let cache = &*self.attribute_cache.read().unwrap();
|
||||
let known = Known::new(&*metadata.schema, Some(cache));
|
||||
q_prepare(sqlite,
|
||||
&*metadata.schema,
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
|
@ -537,23 +605,33 @@ impl Conn {
|
|||
inputs: T) -> Result<QueryExplanation>
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
q_explain(sqlite, &*self.current_schema(), query, inputs)
|
||||
let metadata = self.metadata.lock().unwrap();
|
||||
let cache = &*self.attribute_cache.read().unwrap();
|
||||
let known = Known::new(&*metadata.schema, Some(cache));
|
||||
q_explain(sqlite,
|
||||
known,
|
||||
query,
|
||||
inputs)
|
||||
}
|
||||
|
||||
pub fn lookup_values_for_attribute(&self,
|
||||
sqlite: &rusqlite::Connection,
|
||||
entity: Entid,
|
||||
attribute: &edn::NamespacedKeyword) -> Result<Vec<TypedValue>> {
|
||||
let cc: &AttributeCacher = &*self.attribute_cache();
|
||||
lookup_values_for_attribute(sqlite, &*self.current_schema(), cc, entity, attribute)
|
||||
let schema = &*self.current_schema();
|
||||
let cache = &*self.attribute_cache();
|
||||
let known = Known::new(schema, Some(cache));
|
||||
lookup_values_for_attribute(sqlite, known, entity, attribute)
|
||||
}
|
||||
|
||||
pub fn lookup_value_for_attribute(&self,
|
||||
sqlite: &rusqlite::Connection,
|
||||
entity: Entid,
|
||||
attribute: &edn::NamespacedKeyword) -> Result<Option<TypedValue>> {
|
||||
let cc: &AttributeCacher = &*self.attribute_cache();
|
||||
lookup_value_for_attribute(sqlite, &*self.current_schema(), cc, entity, attribute)
|
||||
let schema = &*self.current_schema();
|
||||
let cache = &*self.attribute_cache();
|
||||
let known = Known::new(schema, Some(cache));
|
||||
lookup_value_for_attribute(sqlite, known, entity, attribute)
|
||||
}
|
||||
|
||||
/// Take a SQLite transaction.
|
||||
|
@ -577,6 +655,7 @@ impl Conn {
|
|||
partition_map: current_partition_map,
|
||||
schema: (*current_schema).clone(),
|
||||
cache: self.attribute_cache.write().unwrap(),
|
||||
use_caching: true,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -587,6 +666,14 @@ impl Conn {
|
|||
.map(InProgressRead)
|
||||
}
|
||||
|
||||
pub fn begin_uncached_read<'m, 'conn>(&'m mut self, sqlite: &'conn mut rusqlite::Connection) -> Result<InProgressRead<'m, 'conn>> {
|
||||
self.begin_transaction_with_behavior(sqlite, TransactionBehavior::Deferred)
|
||||
.map(|mut ip| {
|
||||
ip.use_caching(false);
|
||||
InProgressRead(ip)
|
||||
})
|
||||
}
|
||||
|
||||
/// IMMEDIATE means 'start the transaction now, but don't exclude readers'. It prevents other
|
||||
/// connections from taking immediate or exclusive transactions. This is appropriate for our
|
||||
/// writes and `InProgress`: it means we are ready to write whenever we want to, and nobody else
|
||||
|
@ -628,18 +715,29 @@ impl Conn {
|
|||
/// CacheType::Lazy caches values only after they have first been fetched.
|
||||
pub fn cache(&mut self,
|
||||
sqlite: &mut rusqlite::Connection,
|
||||
schema: &Schema,
|
||||
attribute: &NamespacedKeyword,
|
||||
cache_direction: CacheDirection,
|
||||
cache_action: CacheAction) -> Result<()> {
|
||||
// fetch the attribute for the given name
|
||||
let schema = self.current_schema();
|
||||
|
||||
match self.current_schema().attribute_for_ident(&attribute) {
|
||||
None => bail!(ErrorKind::UnknownAttribute(attribute.to_string())),
|
||||
Some((_attribute, attribute_entid)) => {
|
||||
let mut cache = self.attribute_cache.write().unwrap();
|
||||
let attribute_entid = schema.get_entid(&attribute).ok_or_else(|| ErrorKind::UnknownAttribute(attribute.to_string()))?;
|
||||
match cache_action {
|
||||
CacheAction::Register => { cache.register_attribute(sqlite, attribute_entid.0)?; },
|
||||
CacheAction::Deregister => { cache.deregister_attribute(&attribute_entid.0); },
|
||||
}
|
||||
CacheAction::Register => {
|
||||
match cache_direction {
|
||||
CacheDirection::Both => cache.register(schema, sqlite, attribute_entid),
|
||||
CacheDirection::Forward => cache.register_forward(schema, sqlite, attribute_entid),
|
||||
CacheDirection::Reverse => cache.register_reverse(schema, sqlite, attribute_entid),
|
||||
}.map_err(|e| e.into())
|
||||
},
|
||||
CacheAction::Deregister => {
|
||||
cache.unregister(attribute_entid);
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -899,7 +997,8 @@ mod tests {
|
|||
:db/valueType :db.type/boolean }]"#).unwrap();
|
||||
|
||||
let kw = kw!(:foo/bat);
|
||||
let res = conn.cache(&mut sqlite,&kw, CacheAction::Register);
|
||||
let schema = conn.current_schema();
|
||||
let res = conn.cache(&mut sqlite, &schema, &kw, CacheDirection::Forward, CacheAction::Register);
|
||||
match res.unwrap_err() {
|
||||
Error(ErrorKind::UnknownAttribute(msg), _) => assert_eq!(msg, ":foo/bat"),
|
||||
x => panic!("expected UnknownAttribute error, got {:?}", x),
|
||||
|
@ -952,7 +1051,8 @@ mod tests {
|
|||
let uncached_elapsed_time = finish.duration_since(start);
|
||||
println!("Uncached time: {:?}", uncached_elapsed_time);
|
||||
|
||||
conn.cache(&mut sqlite, &kw, CacheAction::Register).expect("expected caching to work");
|
||||
let schema = conn.current_schema();
|
||||
conn.cache(&mut sqlite, &schema, &kw, CacheDirection::Forward, CacheAction::Register).expect("expected caching to work");
|
||||
|
||||
for _ in 1..5 {
|
||||
let start = Instant::now();
|
||||
|
|
|
@ -39,6 +39,7 @@ pub use mentat_core::{
|
|||
HasSchema,
|
||||
KnownEntid,
|
||||
NamespacedKeyword,
|
||||
Schema,
|
||||
TypedValue,
|
||||
Uuid,
|
||||
ValueType,
|
||||
|
@ -87,7 +88,6 @@ macro_rules! kw {
|
|||
};
|
||||
}
|
||||
|
||||
pub mod cache;
|
||||
pub mod errors;
|
||||
pub mod ident;
|
||||
pub mod vocabulary;
|
||||
|
@ -95,10 +95,6 @@ pub mod conn;
|
|||
pub mod query;
|
||||
pub mod entity_builder;
|
||||
|
||||
pub fn get_name() -> String {
|
||||
return String::from("mentat");
|
||||
}
|
||||
|
||||
pub use query::{
|
||||
IntoResult,
|
||||
PlainSymbol,
|
||||
|
@ -113,6 +109,8 @@ pub use query::{
|
|||
};
|
||||
|
||||
pub use conn::{
|
||||
CacheAction,
|
||||
CacheDirection,
|
||||
Conn,
|
||||
InProgress,
|
||||
Metadata,
|
||||
|
|
109
src/query.rs
109
src/query.rs
|
@ -23,8 +23,8 @@ use mentat_core::{
|
|||
|
||||
use mentat_query_algebrizer::{
|
||||
AlgebraicQuery,
|
||||
algebrize_with_inputs,
|
||||
EmptyBecause,
|
||||
algebrize_with_inputs,
|
||||
};
|
||||
|
||||
pub use mentat_query_algebrizer::{
|
||||
|
@ -63,6 +63,10 @@ use mentat_query_translator::{
|
|||
query_to_select,
|
||||
};
|
||||
|
||||
pub use mentat_query_algebrizer::{
|
||||
Known,
|
||||
};
|
||||
|
||||
pub use mentat_query_projector::{
|
||||
QueryOutput, // Includes the columns/find spec.
|
||||
QueryResults, // The results themselves.
|
||||
|
@ -73,10 +77,6 @@ use errors::{
|
|||
Result,
|
||||
};
|
||||
|
||||
use cache::{
|
||||
AttributeCacher,
|
||||
};
|
||||
|
||||
pub type QueryExecutionResult = Result<QueryOutput>;
|
||||
pub type PreparedResult<'sqlite> = Result<PreparedQuery<'sqlite>>;
|
||||
|
||||
|
@ -154,13 +154,13 @@ pub struct QueryPlanStep {
|
|||
pub detail: String,
|
||||
}
|
||||
|
||||
fn algebrize_query<'schema, T>
|
||||
(schema: &'schema Schema,
|
||||
fn algebrize_query<T>
|
||||
(known: Known,
|
||||
query: FindQuery,
|
||||
inputs: T) -> Result<AlgebraicQuery>
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
let algebrized = algebrize_with_inputs(schema, query, 0, inputs.into().unwrap_or(QueryInputs::default()))?;
|
||||
let algebrized = algebrize_with_inputs(known, query, 0, inputs.into().unwrap_or(QueryInputs::default()))?;
|
||||
let unbound = algebrized.unbound_variables();
|
||||
// Because we are running once, we can check that all of our `:in` variables are bound at this point.
|
||||
// If they aren't, the user has made an error -- perhaps writing the wrong variable in `:in`, or
|
||||
|
@ -171,9 +171,9 @@ fn algebrize_query<'schema, T>
|
|||
Ok(algebrized)
|
||||
}
|
||||
|
||||
fn fetch_values<'sqlite, 'schema>
|
||||
fn fetch_values<'sqlite>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
known: Known,
|
||||
entity: Entid,
|
||||
attribute: Entid,
|
||||
only_one: bool) -> QueryExecutionResult {
|
||||
|
@ -192,7 +192,7 @@ fn fetch_values<'sqlite, 'schema>
|
|||
let query = FindQuery::simple(spec,
|
||||
vec![WhereClause::Pattern(pattern)]);
|
||||
|
||||
let algebrized = algebrize_query(schema, query, None)?;
|
||||
let algebrized = algebrize_query(known, query, None)?;
|
||||
|
||||
run_algebrized_query(sqlite, algebrized)
|
||||
}
|
||||
|
@ -208,57 +208,62 @@ fn lookup_attribute(schema: &Schema, attribute: &NamespacedKeyword) -> Result<Kn
|
|||
/// If `attribute` isn't an attribute, `None` is returned.
|
||||
pub fn lookup_value<'sqlite, 'schema, 'cache, E, A>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
cache: &'cache AttributeCacher,
|
||||
known: Known,
|
||||
entity: E,
|
||||
attribute: A) -> Result<Option<TypedValue>>
|
||||
where E: Into<Entid>, A: Into<Entid> {
|
||||
where E: Into<Entid>,
|
||||
A: Into<Entid> {
|
||||
let entid = entity.into();
|
||||
let attrid = attribute.into();
|
||||
let cached = cache.get_value_for_entid(&attrid, &entid).cloned();
|
||||
if cached.is_some() {
|
||||
return Ok(cached);
|
||||
|
||||
if known.is_attribute_cached_forward(attrid) {
|
||||
Ok(known.get_value_for_entid(known.schema, attrid, entid).cloned())
|
||||
} else {
|
||||
fetch_values(sqlite, known, entid, attrid, true).into_scalar_result()
|
||||
}
|
||||
fetch_values(sqlite, schema, entid, attrid, true).into_scalar_result()
|
||||
}
|
||||
|
||||
pub fn lookup_values<'sqlite, 'schema, 'cache, E, A>
|
||||
pub fn lookup_values<'sqlite, E, A>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
cache: &'cache AttributeCacher,
|
||||
known: Known,
|
||||
entity: E,
|
||||
attribute: A) -> Result<Vec<TypedValue>>
|
||||
where E: Into<Entid>, A: Into<Entid> {
|
||||
where E: Into<Entid>,
|
||||
A: Into<Entid> {
|
||||
let entid = entity.into();
|
||||
let attrid = attribute.into();
|
||||
if let Some(cached) = cache.get_values_for_entid(&attrid, &entid).cloned() {
|
||||
return Ok(cached);
|
||||
|
||||
if known.is_attribute_cached_forward(attrid) {
|
||||
Ok(known.get_values_for_entid(known.schema, attrid, entid)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| vec![]))
|
||||
} else {
|
||||
fetch_values(sqlite, known, entid, attrid, false).into_coll_result()
|
||||
}
|
||||
fetch_values(sqlite, schema, entid, attrid, false).into_coll_result()
|
||||
}
|
||||
|
||||
/// Return a single value for the provided entity and attribute.
|
||||
/// If the attribute is multi-valued, an arbitrary value is returned.
|
||||
/// If no value is present for that entity, `None` is returned.
|
||||
/// If `attribute` doesn't name an attribute, an error is returned.
|
||||
pub fn lookup_value_for_attribute<'sqlite, 'schema, 'cache, 'attribute, E>
|
||||
pub fn lookup_value_for_attribute<'sqlite, 'attribute, E>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
cache: &'cache AttributeCacher,
|
||||
known: Known,
|
||||
entity: E,
|
||||
attribute: &'attribute NamespacedKeyword) -> Result<Option<TypedValue>>
|
||||
where E: Into<Entid> {
|
||||
lookup_value(sqlite, schema, cache, entity.into(), lookup_attribute(schema, attribute)?)
|
||||
let attribute = lookup_attribute(known.schema, attribute)?;
|
||||
lookup_value(sqlite, known, entity.into(), attribute)
|
||||
}
|
||||
|
||||
pub fn lookup_values_for_attribute<'sqlite, 'schema, 'cache, 'attribute, E>
|
||||
pub fn lookup_values_for_attribute<'sqlite, 'attribute, E>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
cache: &'cache AttributeCacher,
|
||||
known: Known,
|
||||
entity: E,
|
||||
attribute: &'attribute NamespacedKeyword) -> Result<Vec<TypedValue>>
|
||||
where E: Into<Entid> {
|
||||
lookup_values(sqlite, schema, cache, entity.into(), lookup_attribute(schema, attribute)?)
|
||||
let attribute = lookup_attribute(known.schema, attribute)?;
|
||||
lookup_values(sqlite, known, entity.into(), attribute)
|
||||
}
|
||||
|
||||
fn run_statement<'sqlite, 'stmt, 'bound>
|
||||
|
@ -293,14 +298,13 @@ fn run_sql_query<'sqlite, 'sql, 'bound, T, F>
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
fn algebrize_query_str<'schema, 'query, T>
|
||||
(schema: &'schema Schema,
|
||||
fn algebrize_query_str<'query, T>
|
||||
(known: Known,
|
||||
query: &'query str,
|
||||
inputs: T) -> Result<AlgebraicQuery>
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
where T: Into<Option<QueryInputs>> {
|
||||
let parsed = parse_find_string(query)?;
|
||||
algebrize_query(schema, parsed, inputs)
|
||||
algebrize_query(known, parsed, inputs)
|
||||
}
|
||||
|
||||
fn run_algebrized_query<'sqlite>(sqlite: &'sqlite rusqlite::Connection, algebrized: AlgebraicQuery) -> QueryExecutionResult {
|
||||
|
@ -329,26 +333,39 @@ fn run_algebrized_query<'sqlite>(sqlite: &'sqlite rusqlite::Connection, algebriz
|
|||
/// instances.
|
||||
/// The caller is responsible for ensuring that the SQLite connection has an open transaction if
|
||||
/// isolation is required.
|
||||
pub fn q_once<'sqlite, 'schema, 'query, T>
|
||||
pub fn q_once<'sqlite, 'query, T>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
known: Known,
|
||||
query: &'query str,
|
||||
inputs: T) -> QueryExecutionResult
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
let algebrized = algebrize_query_str(known, query, inputs)?;
|
||||
run_algebrized_query(sqlite, algebrized)
|
||||
}
|
||||
|
||||
/// Just like `q_once`, but doesn't use any cached values.
|
||||
pub fn q_uncached<'sqlite, 'schema, 'query, T>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
query: &'query str,
|
||||
inputs: T) -> QueryExecutionResult
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
let algebrized = algebrize_query_str(schema, query, inputs)?;
|
||||
let known = Known::for_schema(schema);
|
||||
let algebrized = algebrize_query_str(known, query, inputs)?;
|
||||
|
||||
run_algebrized_query(sqlite, algebrized)
|
||||
}
|
||||
|
||||
pub fn q_prepare<'sqlite, 'schema, 'query, T>
|
||||
pub fn q_prepare<'sqlite, 'query, T>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
known: Known,
|
||||
query: &'query str,
|
||||
inputs: T) -> PreparedResult<'sqlite>
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
let algebrized = algebrize_query_str(schema, query, inputs)?;
|
||||
let algebrized = algebrize_query_str(known, query, inputs)?;
|
||||
|
||||
let unbound = algebrized.unbound_variables();
|
||||
if !unbound.is_empty() {
|
||||
|
@ -375,14 +392,14 @@ pub fn q_prepare<'sqlite, 'schema, 'query, T>
|
|||
})
|
||||
}
|
||||
|
||||
pub fn q_explain<'sqlite, 'schema, 'query, T>
|
||||
pub fn q_explain<'sqlite, 'query, T>
|
||||
(sqlite: &'sqlite rusqlite::Connection,
|
||||
schema: &'schema Schema,
|
||||
known: Known,
|
||||
query: &'query str,
|
||||
inputs: T) -> Result<QueryExplanation>
|
||||
where T: Into<Option<QueryInputs>>
|
||||
{
|
||||
let algebrized = algebrize_query_str(schema, query, inputs)?;
|
||||
let algebrized = algebrize_query_str(known, query, inputs)?;
|
||||
if algebrized.is_known_empty() {
|
||||
return Ok(QueryExplanation::KnownEmpty(algebrized.cc.empty_because.unwrap()));
|
||||
}
|
||||
|
|
225
tests/cache.rs
Normal file
225
tests/cache.rs
Normal file
|
@ -0,0 +1,225 @@
|
|||
// Copyright 2016 Mozilla
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
// this file except in compliance with the License. You may obtain a copy of the
|
||||
// License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed
|
||||
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
extern crate rusqlite;
|
||||
|
||||
#[macro_use]
|
||||
extern crate mentat;
|
||||
extern crate mentat_core;
|
||||
extern crate mentat_db;
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use mentat_core::{
|
||||
CachedAttributes,
|
||||
};
|
||||
|
||||
use mentat::{
|
||||
Entid,
|
||||
HasSchema,
|
||||
Queryable,
|
||||
Schema,
|
||||
Store,
|
||||
TypedValue,
|
||||
};
|
||||
|
||||
use mentat_db::cache::{
|
||||
SQLiteAttributeCache,
|
||||
};
|
||||
|
||||
fn populate_db() -> Store {
|
||||
let mut store = Store::open("").expect("opened");
|
||||
{
|
||||
let mut write = store.begin_transaction().expect("began transaction");
|
||||
let _report = write.transact(r#"[
|
||||
{:db/ident :foo/bar
|
||||
:db/valueType :db.type/long
|
||||
:db/cardinality :db.cardinality/one },
|
||||
{:db/ident :foo/baz
|
||||
:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one },
|
||||
{:db/ident :foo/bap
|
||||
:db/valueType :db.type/string
|
||||
:db/cardinality :db.cardinality/many}]"#).expect("transaction expected to succeed");
|
||||
let _report = write.transact(r#"[
|
||||
{:db/ident :item/one
|
||||
:foo/bar 100
|
||||
:foo/baz false
|
||||
:foo/bap ["one","two","buckle my shoe"] },
|
||||
{:db/ident :item/two
|
||||
:foo/bar 200
|
||||
:foo/baz true
|
||||
:foo/bap ["three", "four", "knock at my door"] }]"#).expect("transaction expected to succeed");
|
||||
write.commit().expect("committed");
|
||||
}
|
||||
store
|
||||
}
|
||||
|
||||
fn assert_value_present_for_attribute(schema: &Schema, attribute_cache: &mut SQLiteAttributeCache, attribute: Entid, entity: Entid, value: TypedValue) {
|
||||
let one = attribute_cache.get_value_for_entid(schema, attribute, entity);
|
||||
assert!(attribute_cache.get_values_for_entid(schema, attribute, entity).is_none());
|
||||
|
||||
assert_eq!(one, Some(&value));
|
||||
}
|
||||
|
||||
fn assert_values_present_for_attribute(schema: &Schema, attribute_cache: &mut SQLiteAttributeCache, attribute: Entid, entity: Entid, values: Vec<TypedValue>) {
|
||||
assert!(attribute_cache.get_value_for_entid(schema, attribute, entity).is_none());
|
||||
let actual: BTreeSet<TypedValue> = attribute_cache.get_values_for_entid(schema, attribute, entity)
|
||||
.expect("Non-None")
|
||||
.clone()
|
||||
.into_iter()
|
||||
.collect();
|
||||
let expected: BTreeSet<TypedValue> = values.into_iter().collect();
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_to_cache() {
|
||||
let mut store = populate_db();
|
||||
let schema = &store.conn().current_schema();
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
let kw = kw!(:foo/bar);
|
||||
let attr: Entid = schema.get_entid(&kw).expect("Expected entid for attribute").into();
|
||||
|
||||
{
|
||||
assert!(attribute_cache.value_pairs(schema, attr).is_none());
|
||||
}
|
||||
|
||||
attribute_cache.register(&schema, &store.sqlite_mut(), attr).expect("No errors on add to cache");
|
||||
{
|
||||
let cached_values = attribute_cache.value_pairs(schema, attr).expect("non-None");
|
||||
assert!(!cached_values.is_empty());
|
||||
let flattened: BTreeSet<TypedValue> = cached_values.values().cloned().collect();
|
||||
let expected: BTreeSet<TypedValue> = vec![TypedValue::Long(100), TypedValue::Long(200)].into_iter().collect();
|
||||
assert_eq!(flattened, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_attribute_already_in_cache() {
|
||||
let mut store = populate_db();
|
||||
let schema = store.conn().current_schema();
|
||||
|
||||
let kw = kw!(:foo/bar);
|
||||
let attr: Entid = schema.get_entid(&kw).expect("Expected entid for attribute").into();
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
|
||||
let one = schema.get_entid(&kw!(:item/one)).expect("one");
|
||||
let two = schema.get_entid(&kw!(:item/two)).expect("two");
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), attr).expect("No errors on add to cache");
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, attr.into(), one.into(), TypedValue::Long(100));
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), attr).expect("No errors on add to cache");
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, attr.into(), two.into(), TypedValue::Long(200));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_from_cache() {
|
||||
let mut store = populate_db();
|
||||
let schema = store.conn().current_schema();
|
||||
|
||||
let kwr = kw!(:foo/bar);
|
||||
let entidr: Entid = schema.get_entid(&kwr).expect("Expected entid for attribute").into();
|
||||
let kwz = kw!(:foo/baz);
|
||||
let entidz: Entid = schema.get_entid(&kwz).expect("Expected entid for attribute").into();
|
||||
let kwp = kw!(:foo/bap);
|
||||
let entidp: Entid = schema.get_entid(&kwp).expect("Expected entid for attribute").into();
|
||||
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
|
||||
let one = schema.get_entid(&kw!(:item/one)).expect("one");
|
||||
let two = schema.get_entid(&kw!(:item/two)).expect("two");
|
||||
assert!(attribute_cache.get_value_for_entid(&schema, entidz, one.into()).is_none());
|
||||
assert!(attribute_cache.get_values_for_entid(&schema, entidz, one.into()).is_none());
|
||||
assert!(attribute_cache.get_value_for_entid(&schema, entidz, two.into()).is_none());
|
||||
assert!(attribute_cache.get_values_for_entid(&schema, entidz, two.into()).is_none());
|
||||
assert!(attribute_cache.get_value_for_entid(&schema, entidp, one.into()).is_none());
|
||||
assert!(attribute_cache.get_values_for_entid(&schema, entidp, one.into()).is_none());
|
||||
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), entidr).expect("No errors on add to cache");
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, entidr, one.into(), TypedValue::Long(100));
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, entidr, two.into(), TypedValue::Long(200));
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), entidz).expect("No errors on add to cache");
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, entidz, one.into(), TypedValue::Boolean(false));
|
||||
assert_value_present_for_attribute(&schema, &mut attribute_cache, entidz, one.into(), TypedValue::Boolean(false));
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), entidp).expect("No errors on add to cache");
|
||||
assert_values_present_for_attribute(&schema, &mut attribute_cache, entidp, one.into(),
|
||||
vec![TypedValue::typed_string("buckle my shoe"),
|
||||
TypedValue::typed_string("one"),
|
||||
TypedValue::typed_string("two")]);
|
||||
assert_values_present_for_attribute(&schema, &mut attribute_cache, entidp, two.into(),
|
||||
vec![TypedValue::typed_string("knock at my door"),
|
||||
TypedValue::typed_string("three"),
|
||||
TypedValue::typed_string("four")]);
|
||||
|
||||
// test that we can remove an item from cache
|
||||
attribute_cache.unregister(entidz);
|
||||
assert!(!attribute_cache.is_attribute_cached_forward(entidz.into()));
|
||||
assert!(attribute_cache.get_value_for_entid(&schema, entidz, one.into()).is_none());
|
||||
assert!(attribute_cache.get_values_for_entid(&schema, entidz, one.into()).is_none());
|
||||
assert!(attribute_cache.get_value_for_entid(&schema, entidz, two.into()).is_none());
|
||||
assert!(attribute_cache.get_values_for_entid(&schema, entidz, two.into()).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_attribute_not_in_cache() {
|
||||
let store = populate_db();
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
|
||||
let schema = store.conn().current_schema();
|
||||
let kw = kw!(:foo/baz);
|
||||
let entid = schema.get_entid(&kw).expect("Expected entid for attribute").0;
|
||||
attribute_cache.unregister(entid);
|
||||
assert!(!attribute_cache.is_attribute_cached_forward(entid));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_attribute_value_for_entid() {
|
||||
let mut store = populate_db();
|
||||
let schema = store.conn().current_schema();
|
||||
|
||||
let entities = store.q_once(r#"[:find ?e . :where [?e :foo/bar 100]]"#, None).expect("Expected query to work").into_scalar().expect("expected scalar results");
|
||||
let entid = match entities {
|
||||
Some(TypedValue::Ref(entid)) => entid,
|
||||
x => panic!("expected Some(Ref), got {:?}", x),
|
||||
};
|
||||
|
||||
let kwr = kw!(:foo/bar);
|
||||
let attr_entid = schema.get_entid(&kwr).expect("Expected entid for attribute").0;
|
||||
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), attr_entid).expect("No errors on add to cache");
|
||||
let val = attribute_cache.get_value_for_entid(&schema, attr_entid, entid).expect("Expected value");
|
||||
assert_eq!(*val, TypedValue::Long(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_attribute_values_for_entid() {
|
||||
let mut store = populate_db();
|
||||
let schema = store.conn().current_schema();
|
||||
|
||||
let entities = store.q_once(r#"[:find ?e . :where [?e :foo/bar 100]]"#, None).expect("Expected query to work").into_scalar().expect("expected scalar results");
|
||||
let entid = match entities {
|
||||
Some(TypedValue::Ref(entid)) => entid,
|
||||
x => panic!("expected Some(Ref), got {:?}", x),
|
||||
};
|
||||
|
||||
let kwp = kw!(:foo/bap);
|
||||
let attr_entid = schema.get_entid(&kwp).expect("Expected entid for attribute").0;
|
||||
|
||||
let mut attribute_cache = SQLiteAttributeCache::default();
|
||||
|
||||
attribute_cache.register(&schema, &mut store.sqlite_mut(), attr_entid).expect("No errors on add to cache");
|
||||
let val = attribute_cache.get_values_for_entid(&schema, attr_entid, entid).expect("Expected value");
|
||||
assert_eq!(*val, vec![TypedValue::typed_string("buckle my shoe"),
|
||||
TypedValue::typed_string("one"),
|
||||
TypedValue::typed_string("two")]);
|
||||
}
|
|
@ -32,15 +32,17 @@ use mentat_core::{
|
|||
};
|
||||
|
||||
use mentat::{
|
||||
IntoResult,
|
||||
NamespacedKeyword,
|
||||
PlainSymbol,
|
||||
QueryInputs,
|
||||
QueryResults,
|
||||
Variable,
|
||||
new_connection,
|
||||
q_once,
|
||||
};
|
||||
|
||||
use mentat::query::q_uncached;
|
||||
|
||||
use mentat::conn::Conn;
|
||||
|
||||
use mentat::errors::{
|
||||
|
@ -55,7 +57,7 @@ fn test_rel() {
|
|||
|
||||
// Rel.
|
||||
let start = time::PreciseTime::now();
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find ?x ?ident :where [?x :db/ident ?ident]]", None)
|
||||
.expect("Query failed")
|
||||
.results;
|
||||
|
@ -86,7 +88,7 @@ fn test_failing_scalar() {
|
|||
|
||||
// Scalar that fails.
|
||||
let start = time::PreciseTime::now();
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find ?x . :where [?x :db/fulltext true]]", None)
|
||||
.expect("Query failed")
|
||||
.results;
|
||||
|
@ -109,7 +111,7 @@ fn test_scalar() {
|
|||
|
||||
// Scalar that succeeds.
|
||||
let start = time::PreciseTime::now();
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find ?ident . :where [24 :db/ident ?ident]]", None)
|
||||
.expect("Query failed")
|
||||
.results;
|
||||
|
@ -137,7 +139,7 @@ fn test_tuple() {
|
|||
|
||||
// Tuple.
|
||||
let start = time::PreciseTime::now();
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find [?index ?cardinality]
|
||||
:where [:db/txInstant :db/index ?index]
|
||||
[:db/txInstant :db/cardinality ?cardinality]]",
|
||||
|
@ -168,7 +170,7 @@ fn test_coll() {
|
|||
|
||||
// Coll.
|
||||
let start = time::PreciseTime::now();
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find [?e ...] :where [?e :db/ident _]]", None)
|
||||
.expect("Query failed")
|
||||
.results;
|
||||
|
@ -194,7 +196,7 @@ fn test_inputs() {
|
|||
// entids::DB_INSTALL_VALUE_TYPE = 5.
|
||||
let ee = (Variable::from_valid_name("?e"), TypedValue::Ref(5));
|
||||
let inputs = QueryInputs::with_value_sequence(vec![ee]);
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find ?i . :in ?e :where [?e :db/ident ?i]]", inputs)
|
||||
.expect("query to succeed")
|
||||
.results;
|
||||
|
@ -215,7 +217,7 @@ fn test_unbound_inputs() {
|
|||
// Bind the wrong var by 'mistake'.
|
||||
let xx = (Variable::from_valid_name("?x"), TypedValue::Ref(5));
|
||||
let inputs = QueryInputs::with_value_sequence(vec![xx]);
|
||||
let results = q_once(&c, &db.schema,
|
||||
let results = q_uncached(&c, &db.schema,
|
||||
"[:find ?i . :in ?e :where [?e :db/ident ?i]]", inputs);
|
||||
|
||||
match results {
|
||||
|
@ -620,3 +622,38 @@ fn test_type_reqs() {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_usage() {
|
||||
let mut c = new_connection("").expect("opened connection");
|
||||
let conn = Conn::connect(&mut c).expect("connected");
|
||||
|
||||
let db_ident = (*conn.current_schema()).get_entid(&kw!(:db/ident)).expect("db_ident");
|
||||
let db_type = (*conn.current_schema()).get_entid(&kw!(:db/valueType)).expect("db_ident");
|
||||
println!("db/ident is {}", db_ident.0);
|
||||
println!("db/type is {}", db_type.0);
|
||||
let query = format!("[:find ?ident . :where [?e {} :db/doc][?e {} ?type][?type {} ?ident]]",
|
||||
db_ident.0, db_type.0, db_ident.0);
|
||||
|
||||
println!("Query is {}", query);
|
||||
|
||||
let schema = conn.current_schema();
|
||||
(*conn.attribute_cache_mut()).register(&schema, &mut c, db_ident).expect("registered");
|
||||
(*conn.attribute_cache_mut()).register(&schema, &mut c, db_type).expect("registered");
|
||||
|
||||
let ident = conn.q_once(&c, query.as_str(), None).into_scalar_result().expect("query");
|
||||
assert_eq!(ident, Some(TypedValue::typed_ns_keyword("db.type", "string")));
|
||||
|
||||
let ident = conn.q_uncached(&c, query.as_str(), None).into_scalar_result().expect("query");
|
||||
assert_eq!(ident, Some(TypedValue::typed_ns_keyword("db.type", "string")));
|
||||
|
||||
let start = time::PreciseTime::now();
|
||||
conn.q_once(&c, query.as_str(), None).into_scalar_result().expect("query");
|
||||
let end = time::PreciseTime::now();
|
||||
println!("Cached took {}µs", start.to(end).num_microseconds().unwrap());
|
||||
|
||||
let start = time::PreciseTime::now();
|
||||
conn.q_uncached(&c, query.as_str(), None).into_scalar_result().expect("query");
|
||||
let end = time::PreciseTime::now();
|
||||
println!("Uncached took {}µs", start.to(end).num_microseconds().unwrap());
|
||||
}
|
||||
|
|
|
@ -32,9 +32,12 @@ use errors as cli;
|
|||
|
||||
use edn;
|
||||
|
||||
use mentat::CacheDirection;
|
||||
|
||||
pub static HELP_COMMAND: &'static str = &"help";
|
||||
pub static OPEN_COMMAND: &'static str = &"open";
|
||||
pub static OPEN_EMPTY_COMMAND: &'static str = &"empty";
|
||||
pub static CACHE_COMMAND: &'static str = &"cache";
|
||||
pub static CLOSE_COMMAND: &'static str = &"close";
|
||||
pub static LONG_QUERY_COMMAND: &'static str = &"query";
|
||||
pub static SHORT_QUERY_COMMAND: &'static str = &"q";
|
||||
|
@ -55,6 +58,7 @@ pub enum Command {
|
|||
Help(Vec<String>),
|
||||
Open(String),
|
||||
OpenEmpty(String),
|
||||
Cache(String, CacheDirection),
|
||||
Query(String),
|
||||
Schema,
|
||||
Sync(Vec<String>),
|
||||
|
@ -82,6 +86,7 @@ impl Command {
|
|||
&Command::Close |
|
||||
&Command::Exit |
|
||||
&Command::Sync(_) |
|
||||
&Command::Cache(_, _) |
|
||||
&Command::Schema => true
|
||||
}
|
||||
}
|
||||
|
@ -95,6 +100,7 @@ impl Command {
|
|||
&Command::Help(_) |
|
||||
&Command::Open(_) |
|
||||
&Command::OpenEmpty(_) |
|
||||
&Command::Cache(_, _) |
|
||||
&Command::Close |
|
||||
&Command::Exit |
|
||||
&Command::Sync(_) |
|
||||
|
@ -110,6 +116,9 @@ impl Command {
|
|||
&Command::Transact(ref args) => {
|
||||
format!(".{} {}", LONG_TRANSACT_COMMAND, args)
|
||||
},
|
||||
&Command::Cache(ref attr, ref direction) => {
|
||||
format!(".{} {} {:?}", CACHE_COMMAND, attr, direction)
|
||||
},
|
||||
&Command::Timer(on) => {
|
||||
format!(".{} {}", LONG_TIMER_COMMAND, on)
|
||||
},
|
||||
|
@ -142,6 +151,7 @@ impl Command {
|
|||
}
|
||||
|
||||
pub fn command(s: &str) -> Result<Command, cli::Error> {
|
||||
let argument = || many1::<String, _>(satisfy(|c: char| !c.is_whitespace()));
|
||||
let arguments = || sep_end_by::<Vec<_>, _, _>(many1(satisfy(|c: char| !c.is_whitespace())), many1::<Vec<_>, _>(space())).expected("arguments");
|
||||
|
||||
let help_parser = string(HELP_COMMAND)
|
||||
|
@ -158,6 +168,18 @@ pub fn command(s: &str) -> Result<Command, cli::Error> {
|
|||
Ok(Command::Timer(args))
|
||||
});
|
||||
|
||||
let direction_parser = || string("forward")
|
||||
.map(|_| CacheDirection::Forward)
|
||||
.or(string("reverse").map(|_| CacheDirection::Reverse))
|
||||
.or(string("both").map(|_| CacheDirection::Both));
|
||||
|
||||
let cache_parser = string(CACHE_COMMAND)
|
||||
.with(spaces())
|
||||
.with(argument().skip(spaces()).and(direction_parser())
|
||||
.map(|(arg, direction)| {
|
||||
Ok(Command::Cache(arg, direction))
|
||||
}));
|
||||
|
||||
let open_parser = string(OPEN_COMMAND)
|
||||
.with(spaces())
|
||||
.with(arguments())
|
||||
|
@ -256,9 +278,10 @@ pub fn command(s: &str) -> Result<Command, cli::Error> {
|
|||
});
|
||||
spaces()
|
||||
.skip(token('.'))
|
||||
.with(choice::<[&mut Parser<Input = _, Output = Result<Command, cli::Error>>; 11], _>
|
||||
.with(choice::<[&mut Parser<Input = _, Output = Result<Command, cli::Error>>; 12], _>
|
||||
([&mut try(help_parser),
|
||||
&mut try(timer_parser),
|
||||
&mut try(cache_parser),
|
||||
&mut try(open_parser),
|
||||
&mut try(open_empty_parser),
|
||||
&mut try(close_parser),
|
||||
|
|
|
@ -25,6 +25,8 @@ use time::{
|
|||
};
|
||||
|
||||
use mentat::{
|
||||
CacheDirection,
|
||||
NamespacedKeyword,
|
||||
Queryable,
|
||||
QueryExplanation,
|
||||
QueryOutput,
|
||||
|
@ -39,6 +41,7 @@ use command_parser::{
|
|||
Command,
|
||||
HELP_COMMAND,
|
||||
OPEN_COMMAND,
|
||||
CACHE_COMMAND,
|
||||
LONG_QUERY_COMMAND,
|
||||
SHORT_QUERY_COMMAND,
|
||||
SCHEMA_COMMAND,
|
||||
|
@ -66,6 +69,7 @@ lazy_static! {
|
|||
map.insert(SHORT_EXIT_COMMAND, "Shortcut for `.exit`. Close the current database and exit the REPL.");
|
||||
map.insert(HELP_COMMAND, "Show help for commands.");
|
||||
map.insert(OPEN_COMMAND, "Open a database at path.");
|
||||
map.insert(CACHE_COMMAND, "Cache an attribute. Usage: `.cache :foo/bar reverse`");
|
||||
map.insert(LONG_QUERY_COMMAND, "Execute a query against the current open database.");
|
||||
map.insert(SHORT_QUERY_COMMAND, "Shortcut for `.query`. Execute a query against the current open database.");
|
||||
map.insert(SCHEMA_COMMAND, "Output the schema for the current open database.");
|
||||
|
@ -83,6 +87,17 @@ fn eprint_out(s: &str) {
|
|||
eprint!("{green}{s}{reset}", green = color::Fg(::GREEN), s = s, reset = color::Fg(color::Reset));
|
||||
}
|
||||
|
||||
fn parse_namespaced_keyword(input: &str) -> Option<NamespacedKeyword> {
|
||||
let splits = [':', '/'];
|
||||
let mut i = input.split(&splits[..]);
|
||||
match (i.next(), i.next(), i.next(), i.next()) {
|
||||
(Some(""), Some(namespace), Some(name), None) => {
|
||||
Some(NamespacedKeyword::new(namespace, name))
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn format_time(duration: Duration) {
|
||||
let m_micros = duration.num_microseconds();
|
||||
if let Some(micros) = m_micros {
|
||||
|
@ -168,6 +183,17 @@ impl Repl {
|
|||
}
|
||||
}
|
||||
|
||||
fn cache(&mut self, attr: String, direction: CacheDirection) {
|
||||
if let Some(kw) = parse_namespaced_keyword(attr.as_str()) {
|
||||
match self.store.cache(&kw, direction) {
|
||||
Result::Ok(_) => (),
|
||||
Result::Err(e) => eprintln!("Couldn't cache attribute: {}", e),
|
||||
};
|
||||
} else {
|
||||
eprintln!("Invalid attribute {}", attr);
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs a single command input.
|
||||
fn handle_command(&mut self, cmd: Command) {
|
||||
let should_time = self.timer_on && cmd.is_timed();
|
||||
|
@ -177,6 +203,7 @@ impl Repl {
|
|||
match cmd {
|
||||
Command::Help(args) => self.help_command(args),
|
||||
Command::Timer(on) => self.toggle_timer(on),
|
||||
Command::Cache(attr, direction) => self.cache(attr, direction),
|
||||
Command::Open(db) => {
|
||||
match self.open(db) {
|
||||
Ok(_) => println!("Database {:?} opened", self.db_name()),
|
||||
|
|
Loading…
Reference in a new issue