Performance and cleanup. r=emily

* Use fixed-size arrays for bootstrap datoms, not vecs.
* Wide-ranging cleanup.

    This commit:
    - Deletes some dead code.
    - Marks some functions only used by tests as cfg(test).
    - Adds pub(crate) to a bunch of functions.
    - Cleans up a few other nits.
This commit is contained in:
Richard Newman 2018-03-06 09:03:00 -08:00
parent a0c70a7cd9
commit 1817ce7c0b
24 changed files with 103 additions and 139 deletions

View file

@ -39,8 +39,8 @@ pub const USER0: i64 = 0x10000;
pub const CORE_SCHEMA_VERSION: u32 = 1; pub const CORE_SCHEMA_VERSION: u32 = 1;
lazy_static! { lazy_static! {
static ref V1_IDENTS: Vec<(symbols::NamespacedKeyword, i64)> = { static ref V1_IDENTS: [(symbols::NamespacedKeyword, i64); 40] = {
vec![(ns_keyword!("db", "ident"), entids::DB_IDENT), [(ns_keyword!("db", "ident"), entids::DB_IDENT),
(ns_keyword!("db.part", "db"), entids::DB_PART_DB), (ns_keyword!("db.part", "db"), entids::DB_PART_DB),
(ns_keyword!("db", "txInstant"), entids::DB_TX_INSTANT), (ns_keyword!("db", "txInstant"), entids::DB_TX_INSTANT),
(ns_keyword!("db.install", "partition"), entids::DB_INSTALL_PARTITION), (ns_keyword!("db.install", "partition"), entids::DB_INSTALL_PARTITION),
@ -83,15 +83,15 @@ lazy_static! {
] ]
}; };
static ref V1_PARTS: Vec<(symbols::NamespacedKeyword, i64, i64)> = { static ref V1_PARTS: [(symbols::NamespacedKeyword, i64, i64); 3] = {
vec![(ns_keyword!("db.part", "db"), 0, (1 + V1_IDENTS.len()) as i64), [(ns_keyword!("db.part", "db"), 0, (1 + V1_IDENTS.len()) as i64),
(ns_keyword!("db.part", "user"), USER0, USER0), (ns_keyword!("db.part", "user"), USER0, USER0),
(ns_keyword!("db.part", "tx"), TX0, TX0), (ns_keyword!("db.part", "tx"), TX0, TX0),
] ]
}; };
static ref V1_CORE_SCHEMA: Vec<(symbols::NamespacedKeyword)> = { static ref V1_CORE_SCHEMA: [(symbols::NamespacedKeyword); 16] = {
vec![(ns_keyword!("db", "ident")), [(ns_keyword!("db", "ident")),
(ns_keyword!("db.install", "partition")), (ns_keyword!("db.install", "partition")),
(ns_keyword!("db.install", "valueType")), (ns_keyword!("db.install", "valueType")),
(ns_keyword!("db.install", "attribute")), (ns_keyword!("db.install", "attribute")),
@ -273,29 +273,29 @@ fn symbolic_schema_to_assertions(symbolic_schema: &Value) -> Result<Vec<Value>>
Ok(assertions) Ok(assertions)
} }
pub fn bootstrap_partition_map() -> PartitionMap { pub(crate) fn bootstrap_partition_map() -> PartitionMap {
V1_PARTS[..].iter() V1_PARTS.iter()
.map(|&(ref part, start, index)| (part.to_string(), Partition::new(start, index))) .map(|&(ref part, start, index)| (part.to_string(), Partition::new(start, index)))
.collect() .collect()
} }
pub fn bootstrap_ident_map() -> IdentMap { pub(crate) fn bootstrap_ident_map() -> IdentMap {
V1_IDENTS[..].iter() V1_IDENTS.iter()
.map(|&(ref ident, entid)| (ident.clone(), entid)) .map(|&(ref ident, entid)| (ident.clone(), entid))
.collect() .collect()
} }
pub fn bootstrap_schema() -> Schema { pub(crate) fn bootstrap_schema() -> Schema {
let ident_map = bootstrap_ident_map(); let ident_map = bootstrap_ident_map();
let bootstrap_triples = symbolic_schema_to_triples(&ident_map, &V1_SYMBOLIC_SCHEMA).unwrap(); let bootstrap_triples = symbolic_schema_to_triples(&ident_map, &V1_SYMBOLIC_SCHEMA).unwrap();
Schema::from_ident_map_and_triples(ident_map, bootstrap_triples).unwrap() Schema::from_ident_map_and_triples(ident_map, bootstrap_triples).unwrap()
} }
pub fn bootstrap_entities() -> Vec<Entity> { pub(crate) fn bootstrap_entities() -> Vec<Entity> {
let bootstrap_assertions: Value = Value::Vector([ let bootstrap_assertions: Value = Value::Vector([
symbolic_schema_to_assertions(&V1_SYMBOLIC_SCHEMA).unwrap(), symbolic_schema_to_assertions(&V1_SYMBOLIC_SCHEMA).unwrap(),
idents_to_assertions(&V1_IDENTS[..]), idents_to_assertions(&V1_IDENTS[..]),
schema_attrs_to_assertions(CORE_SCHEMA_VERSION, &V1_CORE_SCHEMA), schema_attrs_to_assertions(CORE_SCHEMA_VERSION, V1_CORE_SCHEMA.as_ref()),
].concat()); ].concat());
// Failure here is a coding error (since the inputs are fixed), not a runtime error. // Failure here is a coding error (since the inputs are fixed), not a runtime error.

View file

@ -40,7 +40,7 @@ use types::Schema;
/// Represents a *datom* (assertion) in the store. /// Represents a *datom* (assertion) in the store.
#[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)] #[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub struct Datom { pub(crate) struct Datom {
// TODO: generalize this. // TODO: generalize this.
e: Entid, e: Entid,
a: Entid, a: Entid,
@ -54,7 +54,7 @@ pub struct Datom {
/// To make comparision easier, we deterministically order. The ordering is the ascending tuple /// To make comparision easier, we deterministically order. The ordering is the ascending tuple
/// ordering determined by `(e, a, (value_type_tag, v), tx)`, where `value_type_tag` is an internal /// ordering determined by `(e, a, (value_type_tag, v), tx)`, where `value_type_tag` is an internal
/// value that is not exposed but is deterministic. /// value that is not exposed but is deterministic.
pub struct Datoms(pub Vec<Datom>); pub(crate) struct Datoms(pub Vec<Datom>);
/// Represents an ordered sequence of transactions in the store. /// Represents an ordered sequence of transactions in the store.
/// ///
@ -62,13 +62,13 @@ pub struct Datoms(pub Vec<Datom>);
/// ordering determined by `(e, a, (value_type_tag, v), tx, added)`, where `value_type_tag` is an /// ordering determined by `(e, a, (value_type_tag, v), tx, added)`, where `value_type_tag` is an
/// internal value that is not exposed but is deterministic, and `added` is ordered such that /// internal value that is not exposed but is deterministic, and `added` is ordered such that
/// retracted assertions appear before added assertions. /// retracted assertions appear before added assertions.
pub struct Transactions(pub Vec<Datoms>); pub(crate) struct Transactions(pub Vec<Datoms>);
/// Represents the fulltext values in the store. /// Represents the fulltext values in the store.
pub struct FulltextValues(pub Vec<(i64, String)>); pub(crate) struct FulltextValues(pub Vec<(i64, String)>);
impl Datom { impl Datom {
pub fn into_edn(&self) -> edn::Value { pub(crate) fn into_edn(&self) -> edn::Value {
let f = |entid: &Entid| -> edn::Value { let f = |entid: &Entid| -> edn::Value {
match *entid { match *entid {
Entid::Entid(ref y) => edn::Value::Integer(y.clone()), Entid::Entid(ref y) => edn::Value::Integer(y.clone()),
@ -87,19 +87,19 @@ impl Datom {
} }
impl Datoms { impl Datoms {
pub fn into_edn(&self) -> edn::Value { pub(crate) fn into_edn(&self) -> edn::Value {
edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect()) edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect())
} }
} }
impl Transactions { impl Transactions {
pub fn into_edn(&self) -> edn::Value { pub(crate) fn into_edn(&self) -> edn::Value {
edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect()) edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect())
} }
} }
impl FulltextValues { impl FulltextValues {
pub fn into_edn(&self) -> edn::Value { pub(crate) fn into_edn(&self) -> edn::Value {
edn::Value::Vector((&self.0).into_iter().map(|&(x, ref y)| edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])).collect()) edn::Value::Vector((&self.0).into_iter().map(|&(x, ref y)| edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])).collect())
} }
} }
@ -126,7 +126,7 @@ fn to_entid(schema: &Schema, entid: i64) -> Entid {
/// Return the set of datoms in the store, ordered by (e, a, v, tx), but not including any datoms of /// Return the set of datoms in the store, ordered by (e, a, v, tx), but not including any datoms of
/// the form [... :db/txInstant ...]. /// the form [... :db/txInstant ...].
pub fn datoms<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S) -> Result<Datoms> { pub(crate) fn datoms<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S) -> Result<Datoms> {
datoms_after(conn, schema, bootstrap::TX0 - 1) datoms_after(conn, schema, bootstrap::TX0 - 1)
} }
@ -134,7 +134,7 @@ pub fn datoms<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S) -> Res
/// ordered by (e, a, v, tx). /// ordered by (e, a, v, tx).
/// ///
/// The datom set returned does not include any datoms of the form [... :db/txInstant ...]. /// The datom set returned does not include any datoms of the form [... :db/txInstant ...].
pub fn datoms_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result<Datoms> { pub(crate) fn datoms_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result<Datoms> {
let borrowed_schema = schema.borrow(); let borrowed_schema = schema.borrow();
let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx FROM datoms WHERE tx > ? ORDER BY e ASC, a ASC, value_type_tag ASC, v ASC, tx ASC")?; let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx FROM datoms WHERE tx > ? ORDER BY e ASC, a ASC, value_type_tag ASC, v ASC, tx ASC")?;
@ -174,7 +174,7 @@ pub fn datoms_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S,
/// given `tx`, ordered by (tx, e, a, v). /// given `tx`, ordered by (tx, e, a, v).
/// ///
/// Each transaction returned includes the [:db/tx :db/txInstant ...] datom. /// Each transaction returned includes the [:db/tx :db/txInstant ...] datom.
pub fn transactions_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result<Transactions> { pub(crate) fn transactions_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result<Transactions> {
let borrowed_schema = schema.borrow(); let borrowed_schema = schema.borrow();
let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM transactions WHERE tx > ? ORDER BY tx ASC, e ASC, a ASC, value_type_tag ASC, v ASC, added ASC")?; let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM transactions WHERE tx > ? ORDER BY tx ASC, e ASC, a ASC, value_type_tag ASC, v ASC, added ASC")?;
@ -210,7 +210,7 @@ pub fn transactions_after<S: Borrow<Schema>>(conn: &rusqlite::Connection, schema
} }
/// Return the set of fulltext values in the store, ordered by rowid. /// Return the set of fulltext values in the store, ordered by rowid.
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> { pub(crate) fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
let mut stmt: rusqlite::Statement = conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?; let mut stmt: rusqlite::Statement = conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
let r: Result<Vec<_>> = stmt.query_and_then(&[], |row| { let r: Result<Vec<_>> = stmt.query_and_then(&[], |row| {
@ -227,7 +227,7 @@ pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
/// ///
/// The query is printed followed by a newline, then the returned columns followed by a newline, and /// The query is printed followed by a newline, then the returned columns followed by a newline, and
/// then the data rows and columns. All columns are aligned. /// then the data rows and columns. All columns are aligned.
pub fn dump_sql_query(conn: &rusqlite::Connection, sql: &str, params: &[&ToSql]) -> Result<String> { pub(crate) fn dump_sql_query(conn: &rusqlite::Connection, sql: &str, params: &[&ToSql]) -> Result<String> {
let mut stmt: rusqlite::Statement = conn.prepare(sql)?; let mut stmt: rusqlite::Statement = conn.prepare(sql)?;
let mut tw = TabWriter::new(Vec::new()).padding(2); let mut tw = TabWriter::new(Vec::new()).padding(2);

View file

@ -65,7 +65,7 @@ impl TermWithTempIds {
// These have no tempids by definition, and just need to be unwrapped. This operation might // These have no tempids by definition, and just need to be unwrapped. This operation might
// also be called "lowering" or "level lowering", but the concept of "unwrapping" is common in // also be called "lowering" or "level lowering", but the concept of "unwrapping" is common in
// Rust and seems appropriate here. // Rust and seems appropriate here.
pub fn unwrap(self) -> TermWithoutTempIds { pub(crate) fn unwrap(self) -> TermWithoutTempIds {
match self { match self {
Term::AddOrRetract(op, Left(n), a, Left(v)) => Term::AddOrRetract(op, n, a, v), Term::AddOrRetract(op, Left(n), a, Left(v)) => Term::AddOrRetract(op, n, a, v),
_ => unreachable!(), _ => unreachable!(),

View file

@ -175,7 +175,7 @@ impl<'conn, 'a, W> Tx<'conn, 'a, W> where W: TransactWatcher {
/// Given a collection of tempids and the [a v] pairs that they might upsert to, resolve exactly /// Given a collection of tempids and the [a v] pairs that they might upsert to, resolve exactly
/// which [a v] pairs do upsert to entids, and map each tempid that upserts to the upserted /// which [a v] pairs do upsert to entids, and map each tempid that upserts to the upserted
/// entid. The keys of the resulting map are exactly those tempids that upserted. /// entid. The keys of the resulting map are exactly those tempids that upserted.
pub fn resolve_temp_id_avs<'b>(&self, temp_id_avs: &'b [(TempIdHandle, AVPair)]) -> Result<TempIdMap> { pub(crate) fn resolve_temp_id_avs<'b>(&self, temp_id_avs: &'b [(TempIdHandle, AVPair)]) -> Result<TempIdMap> {
if temp_id_avs.is_empty() { if temp_id_avs.is_empty() {
return Ok(TempIdMap::default()); return Ok(TempIdMap::default());
} }

View file

@ -56,7 +56,7 @@ struct UpsertEV(TempIdHandle, Entid, TempIdHandle);
/// entid allocations. That's why we separate into special simple and complex upsert types /// entid allocations. That's why we separate into special simple and complex upsert types
/// immediately, and then collect the more general term types for final resolution. /// immediately, and then collect the more general term types for final resolution.
#[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)] #[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub struct Generation { pub(crate) struct Generation {
/// "Simple upserts" that look like [:db/add TEMPID a v], where a is :db.unique/identity. /// "Simple upserts" that look like [:db/add TEMPID a v], where a is :db.unique/identity.
upserts_e: Vec<UpsertE>, upserts_e: Vec<UpsertE>,
@ -79,7 +79,7 @@ pub struct Generation {
} }
#[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)] #[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub struct FinalPopulations { pub(crate) struct FinalPopulations {
/// Upserts that upserted. /// Upserts that upserted.
pub upserted: Vec<TermWithoutTempIds>, pub upserted: Vec<TermWithoutTempIds>,
@ -93,7 +93,7 @@ pub struct FinalPopulations {
impl Generation { impl Generation {
/// Split entities into a generation of populations that need to evolve to have their tempids /// Split entities into a generation of populations that need to evolve to have their tempids
/// resolved or allocated, and a population of inert entities that do not reference tempids. /// resolved or allocated, and a population of inert entities that do not reference tempids.
pub fn from<I>(terms: I, schema: &Schema) -> errors::Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> { pub(crate) fn from<I>(terms: I, schema: &Schema) -> errors::Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> {
let mut generation = Generation::default(); let mut generation = Generation::default();
let mut inert = vec![]; let mut inert = vec![];
@ -135,7 +135,7 @@ impl Generation {
/// There can be complex upserts but no simple upserts to help resolve them. We accept the /// There can be complex upserts but no simple upserts to help resolve them. We accept the
/// overhead of having the database try to resolve an empty set of simple upserts, to avoid /// overhead of having the database try to resolve an empty set of simple upserts, to avoid
/// having to special case complex upserts at entid allocation time. /// having to special case complex upserts at entid allocation time.
pub fn can_evolve(&self) -> bool { pub(crate) fn can_evolve(&self) -> bool {
!self.upserts_e.is_empty() || !self.upserts_ev.is_empty() !self.upserts_e.is_empty() || !self.upserts_ev.is_empty()
} }
@ -143,7 +143,7 @@ impl Generation {
/// given temporary IDs. /// given temporary IDs.
/// ///
/// TODO: Considering doing this in place; the function already consumes `self`. /// TODO: Considering doing this in place; the function already consumes `self`.
pub fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation { pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation {
let mut next = Generation::default(); let mut next = Generation::default();
for UpsertE(t, a, v) in self.upserts_e { for UpsertE(t, a, v) in self.upserts_e {
@ -196,7 +196,7 @@ impl Generation {
} }
// Collect id->[a v] pairs that might upsert at this evolutionary step. // Collect id->[a v] pairs that might upsert at this evolutionary step.
pub fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> { pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![]; let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
// TODO: map/collect. // TODO: map/collect.
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e { for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
@ -210,7 +210,7 @@ impl Generation {
/// After evolution is complete, yield the set of tempids that require entid allocation. These /// After evolution is complete, yield the set of tempids that require entid allocation. These
/// are the tempids that appeared in [:db/add ...] entities, but that didn't upsert to existing /// are the tempids that appeared in [:db/add ...] entities, but that didn't upsert to existing
/// entids. /// entids.
pub fn temp_ids_in_allocations(&self) -> BTreeSet<TempIdHandle> { pub(crate) fn temp_ids_in_allocations(&self) -> BTreeSet<TempIdHandle> {
assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!"); assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
assert!(self.upserts_ev.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!"); assert!(self.upserts_ev.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
@ -241,7 +241,7 @@ impl Generation {
/// After evolution is complete, use the provided allocated entids to segment `self` into /// After evolution is complete, use the provided allocated entids to segment `self` into
/// populations, each with no references to tempids. /// populations, each with no references to tempids.
pub fn into_final_populations(self, temp_id_map: &TempIdMap) -> errors::Result<FinalPopulations> { pub(crate) fn into_final_populations(self, temp_id_map: &TempIdMap) -> errors::Result<FinalPopulations> {
assert!(self.upserts_e.is_empty()); assert!(self.upserts_e.is_empty());
assert!(self.upserts_ev.is_empty()); assert!(self.upserts_ev.is_empty());

View file

@ -66,7 +66,7 @@ impl<'a> Matcher<'a> {
/// Performs pattern matching between two EDN `Value` instances (`value` /// Performs pattern matching between two EDN `Value` instances (`value`
/// and `pattern`) utilizing a specified pattern matching ruleset `T`. /// and `pattern`) utilizing a specified pattern matching ruleset `T`.
/// Returns true if matching succeeds. /// Returns true if matching succeeds.
pub fn match_with_rules<T>(value: &'a Value, pattern: &'a Value) -> bool fn match_with_rules<T>(value: &'a Value, pattern: &'a Value) -> bool
where T: PatternMatchingRules<'a, Value> { where T: PatternMatchingRules<'a, Value> {
let matcher = Matcher::new(); let matcher = Matcher::new();
matcher.match_internal::<T>(value, pattern) matcher.match_internal::<T>(value, pattern)

View file

@ -25,7 +25,7 @@ impl Value {
} }
/// Write a pretty representation of this `Value` to the given writer. /// Write a pretty representation of this `Value` to the given writer.
pub fn write_pretty<W>(&self, width: usize, out: &mut W) -> Result<(), io::Error> where W: io::Write { fn write_pretty<W>(&self, width: usize, out: &mut W) -> Result<(), io::Error> where W: io::Write {
self.as_doc(&pretty::BoxAllocator).1.render(width, out) self.as_doc(&pretty::BoxAllocator).1.render(width, out)
} }
@ -51,7 +51,7 @@ impl Value {
/// Recursively traverses this value and creates a pretty.rs document. /// Recursively traverses this value and creates a pretty.rs document.
/// This pretty printing implementation is optimized for edn queries /// This pretty printing implementation is optimized for edn queries
/// readability and limited whitespace expansion. /// readability and limited whitespace expansion.
pub fn as_doc<'a, A>(&'a self, pp: &'a A) -> pretty::DocBuilder<'a, A> fn as_doc<'a, A>(&'a self, pp: &'a A) -> pretty::DocBuilder<'a, A>
where A: pretty::DocAllocator<'a> { where A: pretty::DocAllocator<'a> {
match *self { match *self {
Value::Vector(ref vs) => self.bracket(pp, "[", vs, "]"), Value::Vector(ref vs) => self.bracket(pp, "[", vs, "]"),

View file

@ -39,10 +39,6 @@ pub use value_and_span::{
Stream, Stream,
}; };
pub use log::{
LogParsing,
};
impl std::fmt::Debug for ValueParseError { impl std::fmt::Debug for ValueParseError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, write!(f,

View file

@ -16,7 +16,7 @@ use combine::{
}; };
#[derive(Clone)] #[derive(Clone)]
pub struct Log<P, T>(P, T) pub(crate) struct Log<P, T>(P, T)
where P: Parser, where P: Parser,
T: ::std::fmt::Debug; T: ::std::fmt::Debug;
@ -46,7 +46,7 @@ impl<I, P, T> Parser for Log<P, T>
} }
#[inline(always)] #[inline(always)]
pub fn log<P, T>(p: P, msg: T) -> Log<P, T> pub(crate) fn log<P, T>(p: P, msg: T) -> Log<P, T>
where P: Parser, where P: Parser,
T: ::std::fmt::Debug, T: ::std::fmt::Debug,
{ {
@ -54,7 +54,7 @@ pub fn log<P, T>(p: P, msg: T) -> Log<P, T>
} }
/// We need a trait to define `Parser.log` and have it live outside of the `combine` crate. /// We need a trait to define `Parser.log` and have it live outside of the `combine` crate.
pub trait LogParsing: Parser + Sized { pub(crate) trait LogParsing: Parser + Sized {
fn log<T>(self, msg: T) -> Log<Self, T> fn log<T>(self, msg: T) -> Log<Self, T>
where Self: Sized, where Self: Sized,
T: ::std::fmt::Debug; T: ::std::fmt::Debug;

View file

@ -52,7 +52,7 @@ macro_rules! coerce_to_typed_value {
} } } }
} }
pub trait ValueTypes { pub(crate) trait ValueTypes {
fn potential_types(&self, schema: &Schema) -> Result<ValueTypeSet>; fn potential_types(&self, schema: &Schema) -> Result<ValueTypeSet>;
} }
@ -99,7 +99,7 @@ impl ValueTypes for FnArg {
} }
} }
pub enum ValueConversion { pub(crate) enum ValueConversion {
Val(TypedValue), Val(TypedValue),
Impossible(EmptyBecause), Impossible(EmptyBecause),
} }
@ -110,7 +110,7 @@ impl ConjoiningClauses {
/// The conversion depends on, and can fail because of: /// The conversion depends on, and can fail because of:
/// - Existing known types of a variable to which this arg will be bound. /// - Existing known types of a variable to which this arg will be bound.
/// - Existing bindings of a variable `FnArg`. /// - Existing bindings of a variable `FnArg`.
pub fn typed_value_from_arg<'s>(&self, schema: &'s Schema, var: &Variable, arg: FnArg, known_types: ValueTypeSet) -> Result<ValueConversion> { pub(crate) fn typed_value_from_arg<'s>(&self, schema: &'s Schema, var: &Variable, arg: FnArg, known_types: ValueTypeSet) -> Result<ValueConversion> {
use self::ValueConversion::*; use self::ValueConversion::*;
if known_types.is_empty() { if known_types.is_empty() {
// If this happens, it likely means the pattern has already failed! // If this happens, it likely means the pattern has already failed!

View file

@ -51,7 +51,7 @@ use Known;
impl ConjoiningClauses { impl ConjoiningClauses {
#[allow(unused_variables)] #[allow(unused_variables)]
pub fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { pub(crate) fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
if where_fn.args.len() != 3 { if where_fn.args.len() != 3 {
bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 3)); bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 3));
} }

View file

@ -115,7 +115,7 @@ impl ConjoiningClauses {
Ok(()) Ok(())
} }
pub fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { pub(crate) fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
if where_fn.args.len() != 1 { if where_fn.args.len() != 1 {
bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 1)); bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 1));
} }

View file

@ -31,9 +31,8 @@ use errors::{
/// When built correctly, `types` is guaranteed to contain the types of `values` -- use /// When built correctly, `types` is guaranteed to contain the types of `values` -- use
/// `QueryInputs::new` or `QueryInputs::with_values` to construct an instance. /// `QueryInputs::new` or `QueryInputs::with_values` to construct an instance.
pub struct QueryInputs { pub struct QueryInputs {
// These should be crate-private. pub(crate) types: BTreeMap<Variable, ValueType>,
pub types: BTreeMap<Variable, ValueType>, pub(crate) values: BTreeMap<Variable, TypedValue>,
pub values: BTreeMap<Variable, TypedValue>,
} }
impl Default for QueryInputs { impl Default for QueryInputs {

View file

@ -290,19 +290,20 @@ impl ConjoiningClauses {
/// Construct a new `ConjoiningClauses` with the provided alias counter. This allows a caller /// Construct a new `ConjoiningClauses` with the provided alias counter. This allows a caller
/// to share a counter with an enclosing scope, and to start counting at a particular offset /// to share a counter with an enclosing scope, and to start counting at a particular offset
/// for testing. /// for testing.
pub fn with_alias_counter(counter: RcCounter) -> ConjoiningClauses { pub(crate) fn with_alias_counter(counter: RcCounter) -> ConjoiningClauses {
ConjoiningClauses { ConjoiningClauses {
alias_counter: counter, alias_counter: counter,
..Default::default() ..Default::default()
} }
} }
#[cfg(test)]
pub fn with_inputs<T>(in_variables: BTreeSet<Variable>, inputs: T) -> ConjoiningClauses pub fn with_inputs<T>(in_variables: BTreeSet<Variable>, inputs: T) -> ConjoiningClauses
where T: Into<Option<QueryInputs>> { where T: Into<Option<QueryInputs>> {
ConjoiningClauses::with_inputs_and_alias_counter(in_variables, inputs, RcCounter::new()) ConjoiningClauses::with_inputs_and_alias_counter(in_variables, inputs, RcCounter::new())
} }
pub fn with_inputs_and_alias_counter<T>(in_variables: BTreeSet<Variable>, pub(crate) fn with_inputs_and_alias_counter<T>(in_variables: BTreeSet<Variable>,
inputs: T, inputs: T,
alias_counter: RcCounter) -> ConjoiningClauses alias_counter: RcCounter) -> ConjoiningClauses
where T: Into<Option<QueryInputs>> { where T: Into<Option<QueryInputs>> {
@ -389,7 +390,7 @@ impl ConjoiningClauses {
self.value_bindings.get(var).cloned() self.value_bindings.get(var).cloned()
} }
pub fn is_value_bound(&self, var: &Variable) -> bool { pub(crate) fn is_value_bound(&self, var: &Variable) -> bool {
self.value_bindings.contains_key(var) self.value_bindings.contains_key(var)
} }
@ -397,13 +398,8 @@ impl ConjoiningClauses {
self.value_bindings.with_intersected_keys(variables) self.value_bindings.with_intersected_keys(variables)
} }
/// Return an interator over the variables externally bound to values.
pub fn value_bound_variables(&self) -> ::std::collections::btree_map::Keys<Variable, TypedValue> {
self.value_bindings.keys()
}
/// Return a set of the variables externally bound to values. /// Return a set of the variables externally bound to values.
pub fn value_bound_variable_set(&self) -> BTreeSet<Variable> { pub(crate) fn value_bound_variable_set(&self) -> BTreeSet<Variable> {
self.value_bindings.keys().cloned().collect() self.value_bindings.keys().cloned().collect()
} }
@ -418,11 +414,11 @@ impl ConjoiningClauses {
} }
} }
pub fn known_type_set(&self, var: &Variable) -> ValueTypeSet { pub(crate) fn known_type_set(&self, var: &Variable) -> ValueTypeSet {
self.known_types.get(var).cloned().unwrap_or(ValueTypeSet::any()) self.known_types.get(var).cloned().unwrap_or(ValueTypeSet::any())
} }
pub fn bind_column_to_var<C: Into<Column>>(&mut self, schema: &Schema, table: TableAlias, column: C, var: Variable) { pub(crate) fn bind_column_to_var<C: Into<Column>>(&mut self, schema: &Schema, table: TableAlias, column: C, var: Variable) {
let column = column.into(); let column = column.into();
// Do we have an external binding for this? // Do we have an external binding for this?
if let Some(bound_val) = self.bound_value(&var) { if let Some(bound_val) = self.bound_value(&var) {
@ -515,7 +511,7 @@ impl ConjoiningClauses {
self.column_bindings.entry(var).or_insert(vec![]).push(alias); self.column_bindings.entry(var).or_insert(vec![]).push(alias);
} }
pub fn constrain_column_to_constant<C: Into<Column>>(&mut self, table: TableAlias, column: C, constant: TypedValue) { pub(crate) fn constrain_column_to_constant<C: Into<Column>>(&mut self, table: TableAlias, column: C, constant: TypedValue) {
match constant { match constant {
// Be a little more explicit. // Be a little more explicit.
TypedValue::Ref(entid) => self.constrain_column_to_entity(table, column, entid), TypedValue::Ref(entid) => self.constrain_column_to_entity(table, column, entid),
@ -526,23 +522,23 @@ impl ConjoiningClauses {
} }
} }
pub fn constrain_column_to_entity<C: Into<Column>>(&mut self, table: TableAlias, column: C, entity: Entid) { pub(crate) fn constrain_column_to_entity<C: Into<Column>>(&mut self, table: TableAlias, column: C, entity: Entid) {
let column = column.into(); let column = column.into();
self.wheres.add_intersection(ColumnConstraint::Equals(QualifiedAlias(table, column), QueryValue::Entid(entity))) self.wheres.add_intersection(ColumnConstraint::Equals(QualifiedAlias(table, column), QueryValue::Entid(entity)))
} }
pub fn constrain_attribute(&mut self, table: TableAlias, attribute: Entid) { pub(crate) fn constrain_attribute(&mut self, table: TableAlias, attribute: Entid) {
self.constrain_column_to_entity(table, DatomsColumn::Attribute, attribute) self.constrain_column_to_entity(table, DatomsColumn::Attribute, attribute)
} }
pub fn constrain_value_to_numeric(&mut self, table: TableAlias, value: i64) { pub(crate) fn constrain_value_to_numeric(&mut self, table: TableAlias, value: i64) {
self.wheres.add_intersection(ColumnConstraint::Equals( self.wheres.add_intersection(ColumnConstraint::Equals(
QualifiedAlias(table, Column::Fixed(DatomsColumn::Value)), QualifiedAlias(table, Column::Fixed(DatomsColumn::Value)),
QueryValue::PrimitiveLong(value))) QueryValue::PrimitiveLong(value)))
} }
/// Mark the given value as a long. /// Mark the given value as a long.
pub fn constrain_var_to_long(&mut self, variable: Variable) { pub(crate) fn constrain_var_to_long(&mut self, variable: Variable) {
self.narrow_types_for_var(variable, ValueTypeSet::of_one(ValueType::Long)); self.narrow_types_for_var(variable, ValueTypeSet::of_one(ValueType::Long));
} }
@ -551,7 +547,7 @@ impl ConjoiningClauses {
self.narrow_types_for_var(variable, ValueTypeSet::of_numeric_types()); self.narrow_types_for_var(variable, ValueTypeSet::of_numeric_types());
} }
pub fn can_constrain_var_to_type(&self, var: &Variable, this_type: ValueType) -> Option<EmptyBecause> { pub(crate) fn can_constrain_var_to_type(&self, var: &Variable, this_type: ValueType) -> Option<EmptyBecause> {
self.can_constrain_var_to_types(var, ValueTypeSet::of_one(this_type)) self.can_constrain_var_to_types(var, ValueTypeSet::of_one(this_type))
} }
@ -591,7 +587,7 @@ impl ConjoiningClauses {
/// ///
/// If the intersection will leave the variable so that it cannot be any /// If the intersection will leave the variable so that it cannot be any
/// type, we'll call `mark_known_empty`. /// type, we'll call `mark_known_empty`.
pub fn add_type_requirement(&mut self, var: Variable, types: ValueTypeSet) { pub(crate) fn add_type_requirement(&mut self, var: Variable, types: ValueTypeSet) {
if types.is_empty() { if types.is_empty() {
// This shouldn't happen, but if it does… // This shouldn't happen, but if it does…
self.mark_known_empty(EmptyBecause::NoValidTypes(var)); self.mark_known_empty(EmptyBecause::NoValidTypes(var));
@ -632,7 +628,7 @@ impl ConjoiningClauses {
/// actually move from a state in which a variable can have no type to one that can /// actually move from a state in which a variable can have no type to one that can
/// yield results! We never do so at present -- we carefully set-union types before we /// yield results! We never do so at present -- we carefully set-union types before we
/// set-intersect them -- but this is worth bearing in mind. /// set-intersect them -- but this is worth bearing in mind.
pub fn broaden_types(&mut self, additional_types: BTreeMap<Variable, ValueTypeSet>) { pub(crate) fn broaden_types(&mut self, additional_types: BTreeMap<Variable, ValueTypeSet>) {
for (var, new_types) in additional_types { for (var, new_types) in additional_types {
match self.known_types.entry(var) { match self.known_types.entry(var) {
Entry::Vacant(e) => { Entry::Vacant(e) => {
@ -696,7 +692,7 @@ impl ConjoiningClauses {
/// Restrict the sets of types for the provided vars to the provided types. /// Restrict the sets of types for the provided vars to the provided types.
/// See `narrow_types_for_var`. /// See `narrow_types_for_var`.
pub fn narrow_types(&mut self, additional_types: BTreeMap<Variable, ValueTypeSet>) { pub(crate) fn narrow_types(&mut self, additional_types: BTreeMap<Variable, ValueTypeSet>) {
if additional_types.is_empty() { if additional_types.is_empty() {
return; return;
} }
@ -831,7 +827,7 @@ impl ConjoiningClauses {
} }
} }
pub fn next_alias_for_table(&mut self, table: DatomsTable) -> TableAlias { pub(crate) fn next_alias_for_table(&mut self, table: DatomsTable) -> TableAlias {
match table { match table {
DatomsTable::Computed(u) => DatomsTable::Computed(u) =>
format!("{}{:02}", table.name(), u), format!("{}{:02}", table.name(), u),
@ -900,7 +896,7 @@ impl ConjoiningClauses {
/// datoms12.e = datoms13.v /// datoms12.e = datoms13.v
/// datoms12.e = datoms14.e /// datoms12.e = datoms14.e
/// ``` /// ```
pub fn expand_column_bindings(&mut self) { pub(crate) fn expand_column_bindings(&mut self) {
for cols in self.column_bindings.values() { for cols in self.column_bindings.values() {
if cols.len() > 1 { if cols.len() > 1 {
let ref primary = cols[0]; let ref primary = cols[0];
@ -916,7 +912,7 @@ impl ConjoiningClauses {
} }
/// Eliminate any type extractions for variables whose types are definitely known. /// Eliminate any type extractions for variables whose types are definitely known.
pub fn prune_extracted_types(&mut self) { pub(crate) fn prune_extracted_types(&mut self) {
if self.extracted_types.is_empty() || self.known_types.is_empty() { if self.extracted_types.is_empty() || self.known_types.is_empty() {
return; return;
} }
@ -927,7 +923,7 @@ impl ConjoiningClauses {
} }
} }
pub fn process_required_types(&mut self) -> Result<()> { pub(crate) fn process_required_types(&mut self) -> Result<()> {
if self.empty_because.is_some() { if self.empty_because.is_some() {
return Ok(()) return Ok(())
} }
@ -986,6 +982,7 @@ impl ConjoiningClauses {
} }
Ok(()) Ok(())
} }
/// When a CC has accumulated all patterns, generate value_type_tag entries in `wheres` /// When a CC has accumulated all patterns, generate value_type_tag entries in `wheres`
/// to refine value types for which two things are true: /// to refine value types for which two things are true:
/// ///
@ -1004,7 +1001,8 @@ impl ConjoiningClauses {
/// ///
/// where `?y` must simultaneously be a ref-typed attribute and a boolean-typed attribute. This /// where `?y` must simultaneously be a ref-typed attribute and a boolean-typed attribute. This
/// function deduces that and calls `self.mark_known_empty`. #293. /// function deduces that and calls `self.mark_known_empty`. #293.
pub fn expand_type_tags(&mut self) { #[allow(dead_code)]
pub(crate) fn expand_type_tags(&mut self) {
// TODO. // TODO.
} }
} }
@ -1023,7 +1021,7 @@ impl ConjoiningClauses {
Ok(()) Ok(())
} }
pub fn apply_clauses(&mut self, known: Known, where_clauses: Vec<WhereClause>) -> Result<()> { pub(crate) fn apply_clauses(&mut self, known: Known, where_clauses: Vec<WhereClause>) -> Result<()> {
// We apply (top level) type predicates first as an optimization. // We apply (top level) type predicates first as an optimization.
for clause in where_clauses.iter() { for clause in where_clauses.iter() {
if let &WhereClause::TypeAnnotation(ref anno) = clause { if let &WhereClause::TypeAnnotation(ref anno) = clause {
@ -1065,7 +1063,7 @@ impl ConjoiningClauses {
// This is here, rather than in `lib.rs`, because it's recursive: `or` can contain `or`, // This is here, rather than in `lib.rs`, because it's recursive: `or` can contain `or`,
// and so on. // and so on.
pub fn apply_clause(&mut self, known: Known, where_clause: WhereClause) -> Result<()> { pub(crate) fn apply_clause(&mut self, known: Known, where_clause: WhereClause) -> Result<()> {
match where_clause { match where_clause {
WhereClause::Pattern(p) => { WhereClause::Pattern(p) => {
match self.make_evolved_pattern(known, p) { match self.make_evolved_pattern(known, p) {
@ -1096,7 +1094,7 @@ impl ConjoiningClauses {
} }
} }
pub trait PushComputed { pub(crate) trait PushComputed {
fn push_computed(&mut self, item: ComputedTable) -> DatomsTable; fn push_computed(&mut self, item: ComputedTable) -> DatomsTable;
} }
@ -1121,7 +1119,7 @@ fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) {
} }
#[cfg(test)] #[cfg(test)]
pub fn ident(ns: &str, name: &str) -> PatternNonValuePlace { pub(crate) fn ident(ns: &str, name: &str) -> PatternNonValuePlace {
PatternNonValuePlace::Ident(::std::rc::Rc::new(NamespacedKeyword::new(ns, name))) PatternNonValuePlace::Ident(::std::rc::Rc::new(NamespacedKeyword::new(ns, name)))
} }

View file

@ -29,7 +29,7 @@ use types::{
use Known; use Known;
impl ConjoiningClauses { impl ConjoiningClauses {
pub fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> { pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> {
let unified = match not_join.unify_vars { let unified = match not_join.unify_vars {
UnifyVars::Implicit => not_join.collect_mentioned_variables(), UnifyVars::Implicit => not_join.collect_mentioned_variables(),
UnifyVars::Explicit(vs) => vs, UnifyVars::Explicit(vs) => vs,

View file

@ -105,7 +105,7 @@ impl ConjoiningClauses {
} }
} }
pub fn apply_or_join(&mut self, known: Known, mut or_join: OrJoin) -> Result<()> { pub(crate) fn apply_or_join(&mut self, known: Known, mut or_join: OrJoin) -> Result<()> {
// Simple optimization. Empty `or` clauses disappear. Unit `or` clauses // Simple optimization. Empty `or` clauses disappear. Unit `or` clauses
// are equivalent to just the inner clause. // are equivalent to just the inner clause.

View file

@ -81,7 +81,7 @@ impl ConjoiningClauses {
/// existence subquery instead of a join. /// existence subquery instead of a join.
/// ///
/// This method is only public for use from `or.rs`. /// This method is only public for use from `or.rs`.
pub fn apply_pattern_clause_for_alias(&mut self, known: Known, pattern: &EvolvedPattern, alias: &SourceAlias) { pub(crate) fn apply_pattern_clause_for_alias(&mut self, known: Known, pattern: &EvolvedPattern, alias: &SourceAlias) {
if self.is_known_empty() { if self.is_known_empty() {
return; return;
} }
@ -444,7 +444,7 @@ impl ConjoiningClauses {
self.make_evolved_non_value(known, DatomsColumn::Tx, tx) self.make_evolved_non_value(known, DatomsColumn::Tx, tx)
} }
pub fn make_evolved_attribute(&self, known: &Known, attribute: PatternNonValuePlace) -> PlaceOrEmpty<(EvolvedNonValuePlace, Option<ValueType>)> { pub(crate) fn make_evolved_attribute(&self, known: &Known, attribute: PatternNonValuePlace) -> PlaceOrEmpty<(EvolvedNonValuePlace, Option<ValueType>)> {
use self::PlaceOrEmpty::*; use self::PlaceOrEmpty::*;
self.make_evolved_non_value(known, DatomsColumn::Attribute, attribute) self.make_evolved_non_value(known, DatomsColumn::Attribute, attribute)
.and_then(|a| { .and_then(|a| {
@ -461,7 +461,7 @@ impl ConjoiningClauses {
}) })
} }
pub fn make_evolved_value(&self, pub(crate) fn make_evolved_value(&self,
known: &Known, known: &Known,
value_type: Option<ValueType>, value_type: Option<ValueType>,
value: PatternValuePlace) -> PlaceOrEmpty<EvolvedValuePlace> { value: PatternValuePlace) -> PlaceOrEmpty<EvolvedValuePlace> {
@ -524,7 +524,7 @@ impl ConjoiningClauses {
} }
} }
pub fn make_evolved_pattern(&self, known: Known, pattern: Pattern) -> PlaceOrEmpty<EvolvedPattern> { pub(crate) fn make_evolved_pattern(&self, known: Known, pattern: Pattern) -> PlaceOrEmpty<EvolvedPattern> {
let (e, a, v, tx, source) = (pattern.entity, pattern.attribute, pattern.value, pattern.tx, pattern.source); let (e, a, v, tx, source) = (pattern.entity, pattern.attribute, pattern.value, pattern.tx, pattern.source);
use self::PlaceOrEmpty::*; use self::PlaceOrEmpty::*;
match self.make_evolved_entity(&known, e) { match self.make_evolved_entity(&known, e) {
@ -558,7 +558,7 @@ impl ConjoiningClauses {
/// Re-examine the pattern to see if it can be specialized or is now known to fail. /// Re-examine the pattern to see if it can be specialized or is now known to fail.
#[allow(unused_variables)] #[allow(unused_variables)]
pub fn evolve_pattern(&mut self, known: Known, mut pattern: EvolvedPattern) -> PlaceOrEmpty<EvolvedPattern> { pub(crate) fn evolve_pattern(&mut self, known: Known, mut pattern: EvolvedPattern) -> PlaceOrEmpty<EvolvedPattern> {
use self::PlaceOrEmpty::*; use self::PlaceOrEmpty::*;
let mut new_entity: Option<EvolvedNonValuePlace> = None; let mut new_entity: Option<EvolvedNonValuePlace> = None;
@ -606,7 +606,8 @@ impl ConjoiningClauses {
Place(pattern) Place(pattern)
} }
pub fn apply_parsed_pattern(&mut self, known: Known, pattern: Pattern) { #[cfg(test)]
pub(crate) fn apply_parsed_pattern(&mut self, known: Known, pattern: Pattern) {
use self::PlaceOrEmpty::*; use self::PlaceOrEmpty::*;
match self.make_evolved_pattern(known, pattern) { match self.make_evolved_pattern(known, pattern) {
Empty(e) => self.mark_known_empty(e), Empty(e) => self.mark_known_empty(e),
@ -614,7 +615,7 @@ impl ConjoiningClauses {
}; };
} }
pub fn apply_pattern(&mut self, known: Known, pattern: EvolvedPattern) { pub(crate) fn apply_pattern(&mut self, known: Known, pattern: EvolvedPattern) {
// For now we only support the default source. // For now we only support the default source.
if pattern.source != SrcVar::DefaultSrc { if pattern.source != SrcVar::DefaultSrc {
unimplemented!(); unimplemented!();

View file

@ -45,7 +45,7 @@ impl ConjoiningClauses {
/// - In the future, some predicates that are implemented via function calls in SQLite. /// - In the future, some predicates that are implemented via function calls in SQLite.
/// ///
/// At present we have implemented only the five built-in comparison binary operators. /// At present we have implemented only the five built-in comparison binary operators.
pub fn apply_predicate(&mut self, known: Known, predicate: Predicate) -> Result<()> { pub(crate) fn apply_predicate(&mut self, known: Known, predicate: Predicate) -> Result<()> {
// Because we'll be growing the set of built-in predicates, handling each differently, // Because we'll be growing the set of built-in predicates, handling each differently,
// and ultimately allowing user-specified predicates, we match on the predicate name first. // and ultimately allowing user-specified predicates, we match on the predicate name first.
if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) { if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) {
@ -64,7 +64,7 @@ impl ConjoiningClauses {
/// Apply a type annotation, which is a construct like a predicate that constrains the argument /// Apply a type annotation, which is a construct like a predicate that constrains the argument
/// to be a specific ValueType. /// to be a specific ValueType.
pub fn apply_type_anno(&mut self, anno: &TypeAnnotation) -> Result<()> { pub(crate) fn apply_type_anno(&mut self, anno: &TypeAnnotation) -> Result<()> {
self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(anno.value_type)); self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(anno.value_type));
Ok(()) Ok(())
} }
@ -73,7 +73,7 @@ impl ConjoiningClauses {
/// - Resolves variables and converts types to those more amenable to SQL. /// - Resolves variables and converts types to those more amenable to SQL.
/// - Ensures that the predicate functions name a known operator. /// - Ensures that the predicate functions name a known operator.
/// - Accumulates an `Inequality` constraint into the `wheres` list. /// - Accumulates an `Inequality` constraint into the `wheres` list.
pub fn apply_inequality(&mut self, known: Known, comparison: Inequality, predicate: Predicate) -> Result<()> { pub(crate) fn apply_inequality(&mut self, known: Known, comparison: Inequality, predicate: Predicate) -> Result<()> {
if predicate.args.len() != 2 { if predicate.args.len() != 2 {
bail!(ErrorKind::InvalidNumberOfArguments(predicate.operator.clone(), predicate.args.len(), 2)); bail!(ErrorKind::InvalidNumberOfArguments(predicate.operator.clone(), predicate.args.len(), 2));
} }

View file

@ -39,7 +39,7 @@ impl ConjoiningClauses {
/// Additionally, do two things: /// Additionally, do two things:
/// - Mark the pattern as known-empty if any argument is known non-numeric. /// - Mark the pattern as known-empty if any argument is known non-numeric.
/// - Mark any variables encountered as numeric. /// - Mark any variables encountered as numeric.
pub fn resolve_numeric_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result<QueryValue> { pub(crate) fn resolve_numeric_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result<QueryValue> {
use self::FnArg::*; use self::FnArg::*;
match arg { match arg {
FnArg::Variable(var) => { FnArg::Variable(var) => {
@ -67,7 +67,7 @@ impl ConjoiningClauses {
} }
/// Just like `resolve_numeric_argument`, but for `ValueType::Instant`. /// Just like `resolve_numeric_argument`, but for `ValueType::Instant`.
pub fn resolve_instant_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result<QueryValue> { pub(crate) fn resolve_instant_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result<QueryValue> {
use self::FnArg::*; use self::FnArg::*;
match arg { match arg {
FnArg::Variable(var) => { FnArg::Variable(var) => {

View file

@ -31,7 +31,7 @@ impl ConjoiningClauses {
/// - In the future, some functions that are implemented via function calls in SQLite. /// - In the future, some functions that are implemented via function calls in SQLite.
/// ///
/// At present we have implemented only a limited selection of functions. /// At present we have implemented only a limited selection of functions.
pub fn apply_where_fn(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { pub(crate) fn apply_where_fn(&mut self, known: Known, where_fn: WhereFn) -> Result<()> {
// Because we'll be growing the set of built-in functions, handling each differently, and // Because we'll be growing the set of built-in functions, handling each differently, and
// ultimately allowing user-specified functions, we match on the function name first. // ultimately allowing user-specified functions, we match on the function name first.
match where_fn.operator.0.as_str() { match where_fn.operator.0.as_str() {

View file

@ -310,4 +310,3 @@ pub use types::{
TableAlias, TableAlias,
VariableColumn, VariableColumn,
}; };

View file

@ -610,13 +610,6 @@ impl<T> PlaceOrEmpty<T> {
PlaceOrEmpty::Empty(e) => PlaceOrEmpty::Empty(e), PlaceOrEmpty::Empty(e) => PlaceOrEmpty::Empty(e),
} }
} }
pub fn then<F: FnOnce(T)>(self, f: F) {
match self {
PlaceOrEmpty::Place(x) => f(x),
PlaceOrEmpty::Empty(_e) => (),
}
}
} }
pub struct EvolvedPattern { pub struct EvolvedPattern {

View file

@ -44,7 +44,7 @@ use errors::{
/// ///
/// "As with rules, src-vars are not currently supported within the clauses of or, but are supported /// "As with rules, src-vars are not currently supported within the clauses of or, but are supported
/// on the or clause as a whole at top level." /// on the or clause as a whole at top level."
pub fn validate_or_join(or_join: &OrJoin) -> Result<()> { pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> {
// Grab our mentioned variables and ensure that the rules are followed. // Grab our mentioned variables and ensure that the rules are followed.
match or_join.unify_vars { match or_join.unify_vars {
UnifyVars::Implicit => { UnifyVars::Implicit => {
@ -75,7 +75,7 @@ pub fn validate_or_join(or_join: &OrJoin) -> Result<()> {
} }
} }
pub fn validate_not_join(not_join: &NotJoin) -> Result<()> { pub(crate) fn validate_not_join(not_join: &NotJoin) -> Result<()> {
// Grab our mentioned variables and ensure that the rules are followed. // Grab our mentioned variables and ensure that the rules are followed.
match not_join.unify_vars { match not_join.unify_vars {
UnifyVars::Implicit => { UnifyVars::Implicit => {

View file

@ -34,14 +34,6 @@ impl TempId {
TempId::Internal(_) => None, TempId::Internal(_) => None,
} }
} }
pub fn into_internal(self) -> Option<i64> {
match self {
TempId::Internal(x) => Some(x),
TempId::Tx |
TempId::External(_) => None,
}
}
} }
impl fmt::Display for TempId { impl fmt::Display for TempId {
@ -61,20 +53,6 @@ pub enum Entid {
} }
impl Entid { impl Entid {
pub fn is_backward(&self) -> bool {
match self {
&Entid::Entid(_) => false,
&Entid::Ident(ref a) => a.is_backward(),
}
}
pub fn to_reversed(&self) -> Option<Entid> {
match self {
&Entid::Entid(_) => None,
&Entid::Ident(ref a) => Some(Entid::Ident(a.to_reversed())),
}
}
pub fn unreversed(&self) -> Option<Entid> { pub fn unreversed(&self) -> Option<Entid> {
match self { match self {
&Entid::Entid(_) => None, &Entid::Entid(_) => None,