From 1817ce7c0b4a261c9dc5bd1942b81d6bcf2cf623 Mon Sep 17 00:00:00 2001 From: Richard Newman Date: Tue, 6 Mar 2018 09:03:00 -0800 Subject: [PATCH] Performance and cleanup. r=emily * Use fixed-size arrays for bootstrap datoms, not vecs. * Wide-ranging cleanup. This commit: - Deletes some dead code. - Marks some functions only used by tests as cfg(test). - Adds pub(crate) to a bunch of functions. - Cleans up a few other nits. --- db/src/bootstrap.rs | 34 ++++++------- db/src/debug.rs | 26 +++++----- db/src/internal_types.rs | 2 +- db/src/tx.rs | 2 +- db/src/upsert_resolution.rs | 16 +++--- edn/src/matcher.rs | 2 +- edn/src/pretty_print.rs | 4 +- parser-utils/src/lib.rs | 4 -- parser-utils/src/log.rs | 6 +-- query-algebrizer/src/clauses/convert.rs | 6 +-- query-algebrizer/src/clauses/fulltext.rs | 2 +- query-algebrizer/src/clauses/ground.rs | 2 +- query-algebrizer/src/clauses/inputs.rs | 5 +- query-algebrizer/src/clauses/mod.rs | 60 +++++++++++------------ query-algebrizer/src/clauses/not.rs | 2 +- query-algebrizer/src/clauses/or.rs | 2 +- query-algebrizer/src/clauses/pattern.rs | 21 ++++---- query-algebrizer/src/clauses/predicate.rs | 6 +-- query-algebrizer/src/clauses/resolve.rs | 4 +- query-algebrizer/src/clauses/where_fn.rs | 2 +- query-algebrizer/src/lib.rs | 1 - query-algebrizer/src/types.rs | 7 --- query-algebrizer/src/validate.rs | 4 +- tx/src/entities.rs | 22 --------- 24 files changed, 103 insertions(+), 139 deletions(-) diff --git a/db/src/bootstrap.rs b/db/src/bootstrap.rs index d814aa18..1d8f4418 100644 --- a/db/src/bootstrap.rs +++ b/db/src/bootstrap.rs @@ -39,8 +39,8 @@ pub const USER0: i64 = 0x10000; pub const CORE_SCHEMA_VERSION: u32 = 1; lazy_static! { - static ref V1_IDENTS: Vec<(symbols::NamespacedKeyword, i64)> = { - vec![(ns_keyword!("db", "ident"), entids::DB_IDENT), + static ref V1_IDENTS: [(symbols::NamespacedKeyword, i64); 40] = { + [(ns_keyword!("db", "ident"), entids::DB_IDENT), (ns_keyword!("db.part", "db"), entids::DB_PART_DB), (ns_keyword!("db", "txInstant"), entids::DB_TX_INSTANT), (ns_keyword!("db.install", "partition"), entids::DB_INSTALL_PARTITION), @@ -83,15 +83,15 @@ lazy_static! { ] }; - static ref V1_PARTS: Vec<(symbols::NamespacedKeyword, i64, i64)> = { - vec![(ns_keyword!("db.part", "db"), 0, (1 + V1_IDENTS.len()) as i64), + static ref V1_PARTS: [(symbols::NamespacedKeyword, i64, i64); 3] = { + [(ns_keyword!("db.part", "db"), 0, (1 + V1_IDENTS.len()) as i64), (ns_keyword!("db.part", "user"), USER0, USER0), (ns_keyword!("db.part", "tx"), TX0, TX0), ] }; - static ref V1_CORE_SCHEMA: Vec<(symbols::NamespacedKeyword)> = { - vec![(ns_keyword!("db", "ident")), + static ref V1_CORE_SCHEMA: [(symbols::NamespacedKeyword); 16] = { + [(ns_keyword!("db", "ident")), (ns_keyword!("db.install", "partition")), (ns_keyword!("db.install", "valueType")), (ns_keyword!("db.install", "attribute")), @@ -273,29 +273,29 @@ fn symbolic_schema_to_assertions(symbolic_schema: &Value) -> Result> Ok(assertions) } -pub fn bootstrap_partition_map() -> PartitionMap { - V1_PARTS[..].iter() - .map(|&(ref part, start, index)| (part.to_string(), Partition::new(start, index))) - .collect() +pub(crate) fn bootstrap_partition_map() -> PartitionMap { + V1_PARTS.iter() + .map(|&(ref part, start, index)| (part.to_string(), Partition::new(start, index))) + .collect() } -pub fn bootstrap_ident_map() -> IdentMap { - V1_IDENTS[..].iter() - .map(|&(ref ident, entid)| (ident.clone(), entid)) - .collect() +pub(crate) fn bootstrap_ident_map() -> IdentMap { + V1_IDENTS.iter() + .map(|&(ref ident, entid)| (ident.clone(), entid)) + .collect() } -pub fn bootstrap_schema() -> Schema { +pub(crate) fn bootstrap_schema() -> Schema { let ident_map = bootstrap_ident_map(); let bootstrap_triples = symbolic_schema_to_triples(&ident_map, &V1_SYMBOLIC_SCHEMA).unwrap(); Schema::from_ident_map_and_triples(ident_map, bootstrap_triples).unwrap() } -pub fn bootstrap_entities() -> Vec { +pub(crate) fn bootstrap_entities() -> Vec { let bootstrap_assertions: Value = Value::Vector([ symbolic_schema_to_assertions(&V1_SYMBOLIC_SCHEMA).unwrap(), idents_to_assertions(&V1_IDENTS[..]), - schema_attrs_to_assertions(CORE_SCHEMA_VERSION, &V1_CORE_SCHEMA), + schema_attrs_to_assertions(CORE_SCHEMA_VERSION, V1_CORE_SCHEMA.as_ref()), ].concat()); // Failure here is a coding error (since the inputs are fixed), not a runtime error. diff --git a/db/src/debug.rs b/db/src/debug.rs index 64aa30b5..b231796a 100644 --- a/db/src/debug.rs +++ b/db/src/debug.rs @@ -40,7 +40,7 @@ use types::Schema; /// Represents a *datom* (assertion) in the store. #[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)] -pub struct Datom { +pub(crate) struct Datom { // TODO: generalize this. e: Entid, a: Entid, @@ -54,7 +54,7 @@ pub struct Datom { /// To make comparision easier, we deterministically order. The ordering is the ascending tuple /// ordering determined by `(e, a, (value_type_tag, v), tx)`, where `value_type_tag` is an internal /// value that is not exposed but is deterministic. -pub struct Datoms(pub Vec); +pub(crate) struct Datoms(pub Vec); /// Represents an ordered sequence of transactions in the store. /// @@ -62,13 +62,13 @@ pub struct Datoms(pub Vec); /// ordering determined by `(e, a, (value_type_tag, v), tx, added)`, where `value_type_tag` is an /// internal value that is not exposed but is deterministic, and `added` is ordered such that /// retracted assertions appear before added assertions. -pub struct Transactions(pub Vec); +pub(crate) struct Transactions(pub Vec); /// Represents the fulltext values in the store. -pub struct FulltextValues(pub Vec<(i64, String)>); +pub(crate) struct FulltextValues(pub Vec<(i64, String)>); impl Datom { - pub fn into_edn(&self) -> edn::Value { + pub(crate) fn into_edn(&self) -> edn::Value { let f = |entid: &Entid| -> edn::Value { match *entid { Entid::Entid(ref y) => edn::Value::Integer(y.clone()), @@ -87,19 +87,19 @@ impl Datom { } impl Datoms { - pub fn into_edn(&self) -> edn::Value { + pub(crate) fn into_edn(&self) -> edn::Value { edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect()) } } impl Transactions { - pub fn into_edn(&self) -> edn::Value { + pub(crate) fn into_edn(&self) -> edn::Value { edn::Value::Vector((&self.0).into_iter().map(|x| x.into_edn()).collect()) } } impl FulltextValues { - pub fn into_edn(&self) -> edn::Value { + pub(crate) fn into_edn(&self) -> edn::Value { edn::Value::Vector((&self.0).into_iter().map(|&(x, ref y)| edn::Value::Vector(vec![edn::Value::Integer(x), edn::Value::Text(y.clone())])).collect()) } } @@ -126,7 +126,7 @@ fn to_entid(schema: &Schema, entid: i64) -> Entid { /// Return the set of datoms in the store, ordered by (e, a, v, tx), but not including any datoms of /// the form [... :db/txInstant ...]. -pub fn datoms>(conn: &rusqlite::Connection, schema: &S) -> Result { +pub(crate) fn datoms>(conn: &rusqlite::Connection, schema: &S) -> Result { datoms_after(conn, schema, bootstrap::TX0 - 1) } @@ -134,7 +134,7 @@ pub fn datoms>(conn: &rusqlite::Connection, schema: &S) -> Res /// ordered by (e, a, v, tx). /// /// The datom set returned does not include any datoms of the form [... :db/txInstant ...]. -pub fn datoms_after>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result { +pub(crate) fn datoms_after>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result { let borrowed_schema = schema.borrow(); let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx FROM datoms WHERE tx > ? ORDER BY e ASC, a ASC, value_type_tag ASC, v ASC, tx ASC")?; @@ -174,7 +174,7 @@ pub fn datoms_after>(conn: &rusqlite::Connection, schema: &S, /// given `tx`, ordered by (tx, e, a, v). /// /// Each transaction returned includes the [:db/tx :db/txInstant ...] datom. -pub fn transactions_after>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result { +pub(crate) fn transactions_after>(conn: &rusqlite::Connection, schema: &S, tx: i64) -> Result { let borrowed_schema = schema.borrow(); let mut stmt: rusqlite::Statement = conn.prepare("SELECT e, a, v, value_type_tag, tx, added FROM transactions WHERE tx > ? ORDER BY tx ASC, e ASC, a ASC, value_type_tag ASC, v ASC, added ASC")?; @@ -210,7 +210,7 @@ pub fn transactions_after>(conn: &rusqlite::Connection, schema } /// Return the set of fulltext values in the store, ordered by rowid. -pub fn fulltext_values(conn: &rusqlite::Connection) -> Result { +pub(crate) fn fulltext_values(conn: &rusqlite::Connection) -> Result { let mut stmt: rusqlite::Statement = conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?; let r: Result> = stmt.query_and_then(&[], |row| { @@ -227,7 +227,7 @@ pub fn fulltext_values(conn: &rusqlite::Connection) -> Result { /// /// The query is printed followed by a newline, then the returned columns followed by a newline, and /// then the data rows and columns. All columns are aligned. -pub fn dump_sql_query(conn: &rusqlite::Connection, sql: &str, params: &[&ToSql]) -> Result { +pub(crate) fn dump_sql_query(conn: &rusqlite::Connection, sql: &str, params: &[&ToSql]) -> Result { let mut stmt: rusqlite::Statement = conn.prepare(sql)?; let mut tw = TabWriter::new(Vec::new()).padding(2); diff --git a/db/src/internal_types.rs b/db/src/internal_types.rs index 67b2f3c6..11104499 100644 --- a/db/src/internal_types.rs +++ b/db/src/internal_types.rs @@ -65,7 +65,7 @@ impl TermWithTempIds { // These have no tempids by definition, and just need to be unwrapped. This operation might // also be called "lowering" or "level lowering", but the concept of "unwrapping" is common in // Rust and seems appropriate here. - pub fn unwrap(self) -> TermWithoutTempIds { + pub(crate) fn unwrap(self) -> TermWithoutTempIds { match self { Term::AddOrRetract(op, Left(n), a, Left(v)) => Term::AddOrRetract(op, n, a, v), _ => unreachable!(), diff --git a/db/src/tx.rs b/db/src/tx.rs index d416dfc8..8264e174 100644 --- a/db/src/tx.rs +++ b/db/src/tx.rs @@ -175,7 +175,7 @@ impl<'conn, 'a, W> Tx<'conn, 'a, W> where W: TransactWatcher { /// Given a collection of tempids and the [a v] pairs that they might upsert to, resolve exactly /// which [a v] pairs do upsert to entids, and map each tempid that upserts to the upserted /// entid. The keys of the resulting map are exactly those tempids that upserted. - pub fn resolve_temp_id_avs<'b>(&self, temp_id_avs: &'b [(TempIdHandle, AVPair)]) -> Result { + pub(crate) fn resolve_temp_id_avs<'b>(&self, temp_id_avs: &'b [(TempIdHandle, AVPair)]) -> Result { if temp_id_avs.is_empty() { return Ok(TempIdMap::default()); } diff --git a/db/src/upsert_resolution.rs b/db/src/upsert_resolution.rs index 3c74562f..da44edc9 100644 --- a/db/src/upsert_resolution.rs +++ b/db/src/upsert_resolution.rs @@ -56,7 +56,7 @@ struct UpsertEV(TempIdHandle, Entid, TempIdHandle); /// entid allocations. That's why we separate into special simple and complex upsert types /// immediately, and then collect the more general term types for final resolution. #[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)] -pub struct Generation { +pub(crate) struct Generation { /// "Simple upserts" that look like [:db/add TEMPID a v], where a is :db.unique/identity. upserts_e: Vec, @@ -79,7 +79,7 @@ pub struct Generation { } #[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)] -pub struct FinalPopulations { +pub(crate) struct FinalPopulations { /// Upserts that upserted. pub upserted: Vec, @@ -93,7 +93,7 @@ pub struct FinalPopulations { impl Generation { /// Split entities into a generation of populations that need to evolve to have their tempids /// resolved or allocated, and a population of inert entities that do not reference tempids. - pub fn from(terms: I, schema: &Schema) -> errors::Result<(Generation, Population)> where I: IntoIterator { + pub(crate) fn from(terms: I, schema: &Schema) -> errors::Result<(Generation, Population)> where I: IntoIterator { let mut generation = Generation::default(); let mut inert = vec![]; @@ -135,7 +135,7 @@ impl Generation { /// There can be complex upserts but no simple upserts to help resolve them. We accept the /// overhead of having the database try to resolve an empty set of simple upserts, to avoid /// having to special case complex upserts at entid allocation time. - pub fn can_evolve(&self) -> bool { + pub(crate) fn can_evolve(&self) -> bool { !self.upserts_e.is_empty() || !self.upserts_ev.is_empty() } @@ -143,7 +143,7 @@ impl Generation { /// given temporary IDs. /// /// TODO: Considering doing this in place; the function already consumes `self`. - pub fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation { + pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation { let mut next = Generation::default(); for UpsertE(t, a, v) in self.upserts_e { @@ -196,7 +196,7 @@ impl Generation { } // Collect id->[a v] pairs that might upsert at this evolutionary step. - pub fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> { + pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> { let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![]; // TODO: map/collect. for &UpsertE(ref t, ref a, ref v) in &self.upserts_e { @@ -210,7 +210,7 @@ impl Generation { /// After evolution is complete, yield the set of tempids that require entid allocation. These /// are the tempids that appeared in [:db/add ...] entities, but that didn't upsert to existing /// entids. - pub fn temp_ids_in_allocations(&self) -> BTreeSet { + pub(crate) fn temp_ids_in_allocations(&self) -> BTreeSet { assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!"); assert!(self.upserts_ev.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!"); @@ -241,7 +241,7 @@ impl Generation { /// After evolution is complete, use the provided allocated entids to segment `self` into /// populations, each with no references to tempids. - pub fn into_final_populations(self, temp_id_map: &TempIdMap) -> errors::Result { + pub(crate) fn into_final_populations(self, temp_id_map: &TempIdMap) -> errors::Result { assert!(self.upserts_e.is_empty()); assert!(self.upserts_ev.is_empty()); diff --git a/edn/src/matcher.rs b/edn/src/matcher.rs index a548ab5f..8d462045 100644 --- a/edn/src/matcher.rs +++ b/edn/src/matcher.rs @@ -66,7 +66,7 @@ impl<'a> Matcher<'a> { /// Performs pattern matching between two EDN `Value` instances (`value` /// and `pattern`) utilizing a specified pattern matching ruleset `T`. /// Returns true if matching succeeds. - pub fn match_with_rules(value: &'a Value, pattern: &'a Value) -> bool + fn match_with_rules(value: &'a Value, pattern: &'a Value) -> bool where T: PatternMatchingRules<'a, Value> { let matcher = Matcher::new(); matcher.match_internal::(value, pattern) diff --git a/edn/src/pretty_print.rs b/edn/src/pretty_print.rs index e52cdb5f..de0525dd 100644 --- a/edn/src/pretty_print.rs +++ b/edn/src/pretty_print.rs @@ -25,7 +25,7 @@ impl Value { } /// Write a pretty representation of this `Value` to the given writer. - pub fn write_pretty(&self, width: usize, out: &mut W) -> Result<(), io::Error> where W: io::Write { + fn write_pretty(&self, width: usize, out: &mut W) -> Result<(), io::Error> where W: io::Write { self.as_doc(&pretty::BoxAllocator).1.render(width, out) } @@ -51,7 +51,7 @@ impl Value { /// Recursively traverses this value and creates a pretty.rs document. /// This pretty printing implementation is optimized for edn queries /// readability and limited whitespace expansion. - pub fn as_doc<'a, A>(&'a self, pp: &'a A) -> pretty::DocBuilder<'a, A> + fn as_doc<'a, A>(&'a self, pp: &'a A) -> pretty::DocBuilder<'a, A> where A: pretty::DocAllocator<'a> { match *self { Value::Vector(ref vs) => self.bracket(pp, "[", vs, "]"), diff --git a/parser-utils/src/lib.rs b/parser-utils/src/lib.rs index 5f033e89..ca85bc64 100644 --- a/parser-utils/src/lib.rs +++ b/parser-utils/src/lib.rs @@ -39,10 +39,6 @@ pub use value_and_span::{ Stream, }; -pub use log::{ - LogParsing, -}; - impl std::fmt::Debug for ValueParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, diff --git a/parser-utils/src/log.rs b/parser-utils/src/log.rs index 0c030e25..e1cac87f 100644 --- a/parser-utils/src/log.rs +++ b/parser-utils/src/log.rs @@ -16,7 +16,7 @@ use combine::{ }; #[derive(Clone)] -pub struct Log(P, T) +pub(crate) struct Log(P, T) where P: Parser, T: ::std::fmt::Debug; @@ -46,7 +46,7 @@ impl Parser for Log } #[inline(always)] -pub fn log(p: P, msg: T) -> Log +pub(crate) fn log(p: P, msg: T) -> Log where P: Parser, T: ::std::fmt::Debug, { @@ -54,7 +54,7 @@ pub fn log(p: P, msg: T) -> Log } /// We need a trait to define `Parser.log` and have it live outside of the `combine` crate. -pub trait LogParsing: Parser + Sized { +pub(crate) trait LogParsing: Parser + Sized { fn log(self, msg: T) -> Log where Self: Sized, T: ::std::fmt::Debug; diff --git a/query-algebrizer/src/clauses/convert.rs b/query-algebrizer/src/clauses/convert.rs index ed528751..fd72a36e 100644 --- a/query-algebrizer/src/clauses/convert.rs +++ b/query-algebrizer/src/clauses/convert.rs @@ -52,7 +52,7 @@ macro_rules! coerce_to_typed_value { } } } -pub trait ValueTypes { +pub(crate) trait ValueTypes { fn potential_types(&self, schema: &Schema) -> Result; } @@ -99,7 +99,7 @@ impl ValueTypes for FnArg { } } -pub enum ValueConversion { +pub(crate) enum ValueConversion { Val(TypedValue), Impossible(EmptyBecause), } @@ -110,7 +110,7 @@ impl ConjoiningClauses { /// The conversion depends on, and can fail because of: /// - Existing known types of a variable to which this arg will be bound. /// - Existing bindings of a variable `FnArg`. - pub fn typed_value_from_arg<'s>(&self, schema: &'s Schema, var: &Variable, arg: FnArg, known_types: ValueTypeSet) -> Result { + pub(crate) fn typed_value_from_arg<'s>(&self, schema: &'s Schema, var: &Variable, arg: FnArg, known_types: ValueTypeSet) -> Result { use self::ValueConversion::*; if known_types.is_empty() { // If this happens, it likely means the pattern has already failed! diff --git a/query-algebrizer/src/clauses/fulltext.rs b/query-algebrizer/src/clauses/fulltext.rs index 642e8d54..642d1a71 100644 --- a/query-algebrizer/src/clauses/fulltext.rs +++ b/query-algebrizer/src/clauses/fulltext.rs @@ -51,7 +51,7 @@ use Known; impl ConjoiningClauses { #[allow(unused_variables)] - pub fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { + pub(crate) fn apply_fulltext(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { if where_fn.args.len() != 3 { bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 3)); } diff --git a/query-algebrizer/src/clauses/ground.rs b/query-algebrizer/src/clauses/ground.rs index f6c93c74..5d638485 100644 --- a/query-algebrizer/src/clauses/ground.rs +++ b/query-algebrizer/src/clauses/ground.rs @@ -115,7 +115,7 @@ impl ConjoiningClauses { Ok(()) } - pub fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { + pub(crate) fn apply_ground(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { if where_fn.args.len() != 1 { bail!(ErrorKind::InvalidNumberOfArguments(where_fn.operator.clone(), where_fn.args.len(), 1)); } diff --git a/query-algebrizer/src/clauses/inputs.rs b/query-algebrizer/src/clauses/inputs.rs index 904c536c..6a4c73af 100644 --- a/query-algebrizer/src/clauses/inputs.rs +++ b/query-algebrizer/src/clauses/inputs.rs @@ -31,9 +31,8 @@ use errors::{ /// When built correctly, `types` is guaranteed to contain the types of `values` -- use /// `QueryInputs::new` or `QueryInputs::with_values` to construct an instance. pub struct QueryInputs { - // These should be crate-private. - pub types: BTreeMap, - pub values: BTreeMap, + pub(crate) types: BTreeMap, + pub(crate) values: BTreeMap, } impl Default for QueryInputs { diff --git a/query-algebrizer/src/clauses/mod.rs b/query-algebrizer/src/clauses/mod.rs index 1acc09c3..f43aa5fb 100644 --- a/query-algebrizer/src/clauses/mod.rs +++ b/query-algebrizer/src/clauses/mod.rs @@ -290,21 +290,22 @@ impl ConjoiningClauses { /// Construct a new `ConjoiningClauses` with the provided alias counter. This allows a caller /// to share a counter with an enclosing scope, and to start counting at a particular offset /// for testing. - pub fn with_alias_counter(counter: RcCounter) -> ConjoiningClauses { + pub(crate) fn with_alias_counter(counter: RcCounter) -> ConjoiningClauses { ConjoiningClauses { alias_counter: counter, ..Default::default() } } + #[cfg(test)] pub fn with_inputs(in_variables: BTreeSet, inputs: T) -> ConjoiningClauses where T: Into> { ConjoiningClauses::with_inputs_and_alias_counter(in_variables, inputs, RcCounter::new()) } - pub fn with_inputs_and_alias_counter(in_variables: BTreeSet, - inputs: T, - alias_counter: RcCounter) -> ConjoiningClauses + pub(crate) fn with_inputs_and_alias_counter(in_variables: BTreeSet, + inputs: T, + alias_counter: RcCounter) -> ConjoiningClauses where T: Into> { match inputs.into() { None => ConjoiningClauses::with_alias_counter(alias_counter), @@ -389,7 +390,7 @@ impl ConjoiningClauses { self.value_bindings.get(var).cloned() } - pub fn is_value_bound(&self, var: &Variable) -> bool { + pub(crate) fn is_value_bound(&self, var: &Variable) -> bool { self.value_bindings.contains_key(var) } @@ -397,13 +398,8 @@ impl ConjoiningClauses { self.value_bindings.with_intersected_keys(variables) } - /// Return an interator over the variables externally bound to values. - pub fn value_bound_variables(&self) -> ::std::collections::btree_map::Keys { - self.value_bindings.keys() - } - /// Return a set of the variables externally bound to values. - pub fn value_bound_variable_set(&self) -> BTreeSet { + pub(crate) fn value_bound_variable_set(&self) -> BTreeSet { self.value_bindings.keys().cloned().collect() } @@ -418,11 +414,11 @@ impl ConjoiningClauses { } } - pub fn known_type_set(&self, var: &Variable) -> ValueTypeSet { + pub(crate) fn known_type_set(&self, var: &Variable) -> ValueTypeSet { self.known_types.get(var).cloned().unwrap_or(ValueTypeSet::any()) } - pub fn bind_column_to_var>(&mut self, schema: &Schema, table: TableAlias, column: C, var: Variable) { + pub(crate) fn bind_column_to_var>(&mut self, schema: &Schema, table: TableAlias, column: C, var: Variable) { let column = column.into(); // Do we have an external binding for this? if let Some(bound_val) = self.bound_value(&var) { @@ -515,7 +511,7 @@ impl ConjoiningClauses { self.column_bindings.entry(var).or_insert(vec![]).push(alias); } - pub fn constrain_column_to_constant>(&mut self, table: TableAlias, column: C, constant: TypedValue) { + pub(crate) fn constrain_column_to_constant>(&mut self, table: TableAlias, column: C, constant: TypedValue) { match constant { // Be a little more explicit. TypedValue::Ref(entid) => self.constrain_column_to_entity(table, column, entid), @@ -526,23 +522,23 @@ impl ConjoiningClauses { } } - pub fn constrain_column_to_entity>(&mut self, table: TableAlias, column: C, entity: Entid) { + pub(crate) fn constrain_column_to_entity>(&mut self, table: TableAlias, column: C, entity: Entid) { let column = column.into(); self.wheres.add_intersection(ColumnConstraint::Equals(QualifiedAlias(table, column), QueryValue::Entid(entity))) } - pub fn constrain_attribute(&mut self, table: TableAlias, attribute: Entid) { + pub(crate) fn constrain_attribute(&mut self, table: TableAlias, attribute: Entid) { self.constrain_column_to_entity(table, DatomsColumn::Attribute, attribute) } - pub fn constrain_value_to_numeric(&mut self, table: TableAlias, value: i64) { + pub(crate) fn constrain_value_to_numeric(&mut self, table: TableAlias, value: i64) { self.wheres.add_intersection(ColumnConstraint::Equals( QualifiedAlias(table, Column::Fixed(DatomsColumn::Value)), QueryValue::PrimitiveLong(value))) } /// Mark the given value as a long. - pub fn constrain_var_to_long(&mut self, variable: Variable) { + pub(crate) fn constrain_var_to_long(&mut self, variable: Variable) { self.narrow_types_for_var(variable, ValueTypeSet::of_one(ValueType::Long)); } @@ -551,7 +547,7 @@ impl ConjoiningClauses { self.narrow_types_for_var(variable, ValueTypeSet::of_numeric_types()); } - pub fn can_constrain_var_to_type(&self, var: &Variable, this_type: ValueType) -> Option { + pub(crate) fn can_constrain_var_to_type(&self, var: &Variable, this_type: ValueType) -> Option { self.can_constrain_var_to_types(var, ValueTypeSet::of_one(this_type)) } @@ -591,7 +587,7 @@ impl ConjoiningClauses { /// /// If the intersection will leave the variable so that it cannot be any /// type, we'll call `mark_known_empty`. - pub fn add_type_requirement(&mut self, var: Variable, types: ValueTypeSet) { + pub(crate) fn add_type_requirement(&mut self, var: Variable, types: ValueTypeSet) { if types.is_empty() { // This shouldn't happen, but if it does… self.mark_known_empty(EmptyBecause::NoValidTypes(var)); @@ -632,7 +628,7 @@ impl ConjoiningClauses { /// actually move from a state in which a variable can have no type to one that can /// yield results! We never do so at present -- we carefully set-union types before we /// set-intersect them -- but this is worth bearing in mind. - pub fn broaden_types(&mut self, additional_types: BTreeMap) { + pub(crate) fn broaden_types(&mut self, additional_types: BTreeMap) { for (var, new_types) in additional_types { match self.known_types.entry(var) { Entry::Vacant(e) => { @@ -696,7 +692,7 @@ impl ConjoiningClauses { /// Restrict the sets of types for the provided vars to the provided types. /// See `narrow_types_for_var`. - pub fn narrow_types(&mut self, additional_types: BTreeMap) { + pub(crate) fn narrow_types(&mut self, additional_types: BTreeMap) { if additional_types.is_empty() { return; } @@ -831,7 +827,7 @@ impl ConjoiningClauses { } } - pub fn next_alias_for_table(&mut self, table: DatomsTable) -> TableAlias { + pub(crate) fn next_alias_for_table(&mut self, table: DatomsTable) -> TableAlias { match table { DatomsTable::Computed(u) => format!("{}{:02}", table.name(), u), @@ -900,7 +896,7 @@ impl ConjoiningClauses { /// datoms12.e = datoms13.v /// datoms12.e = datoms14.e /// ``` - pub fn expand_column_bindings(&mut self) { + pub(crate) fn expand_column_bindings(&mut self) { for cols in self.column_bindings.values() { if cols.len() > 1 { let ref primary = cols[0]; @@ -916,7 +912,7 @@ impl ConjoiningClauses { } /// Eliminate any type extractions for variables whose types are definitely known. - pub fn prune_extracted_types(&mut self) { + pub(crate) fn prune_extracted_types(&mut self) { if self.extracted_types.is_empty() || self.known_types.is_empty() { return; } @@ -927,7 +923,7 @@ impl ConjoiningClauses { } } - pub fn process_required_types(&mut self) -> Result<()> { + pub(crate) fn process_required_types(&mut self) -> Result<()> { if self.empty_because.is_some() { return Ok(()) } @@ -986,6 +982,7 @@ impl ConjoiningClauses { } Ok(()) } + /// When a CC has accumulated all patterns, generate value_type_tag entries in `wheres` /// to refine value types for which two things are true: /// @@ -1004,7 +1001,8 @@ impl ConjoiningClauses { /// /// where `?y` must simultaneously be a ref-typed attribute and a boolean-typed attribute. This /// function deduces that and calls `self.mark_known_empty`. #293. - pub fn expand_type_tags(&mut self) { + #[allow(dead_code)] + pub(crate) fn expand_type_tags(&mut self) { // TODO. } } @@ -1023,7 +1021,7 @@ impl ConjoiningClauses { Ok(()) } - pub fn apply_clauses(&mut self, known: Known, where_clauses: Vec) -> Result<()> { + pub(crate) fn apply_clauses(&mut self, known: Known, where_clauses: Vec) -> Result<()> { // We apply (top level) type predicates first as an optimization. for clause in where_clauses.iter() { if let &WhereClause::TypeAnnotation(ref anno) = clause { @@ -1065,7 +1063,7 @@ impl ConjoiningClauses { // This is here, rather than in `lib.rs`, because it's recursive: `or` can contain `or`, // and so on. - pub fn apply_clause(&mut self, known: Known, where_clause: WhereClause) -> Result<()> { + pub(crate) fn apply_clause(&mut self, known: Known, where_clause: WhereClause) -> Result<()> { match where_clause { WhereClause::Pattern(p) => { match self.make_evolved_pattern(known, p) { @@ -1096,7 +1094,7 @@ impl ConjoiningClauses { } } -pub trait PushComputed { +pub(crate) trait PushComputed { fn push_computed(&mut self, item: ComputedTable) -> DatomsTable; } @@ -1121,7 +1119,7 @@ fn add_attribute(schema: &mut Schema, e: Entid, a: Attribute) { } #[cfg(test)] -pub fn ident(ns: &str, name: &str) -> PatternNonValuePlace { +pub(crate) fn ident(ns: &str, name: &str) -> PatternNonValuePlace { PatternNonValuePlace::Ident(::std::rc::Rc::new(NamespacedKeyword::new(ns, name))) } diff --git a/query-algebrizer/src/clauses/not.rs b/query-algebrizer/src/clauses/not.rs index 4051948a..c5aa52e0 100644 --- a/query-algebrizer/src/clauses/not.rs +++ b/query-algebrizer/src/clauses/not.rs @@ -29,7 +29,7 @@ use types::{ use Known; impl ConjoiningClauses { - pub fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> { + pub(crate) fn apply_not_join(&mut self, known: Known, not_join: NotJoin) -> Result<()> { let unified = match not_join.unify_vars { UnifyVars::Implicit => not_join.collect_mentioned_variables(), UnifyVars::Explicit(vs) => vs, diff --git a/query-algebrizer/src/clauses/or.rs b/query-algebrizer/src/clauses/or.rs index 03853b38..e54be351 100644 --- a/query-algebrizer/src/clauses/or.rs +++ b/query-algebrizer/src/clauses/or.rs @@ -105,7 +105,7 @@ impl ConjoiningClauses { } } - pub fn apply_or_join(&mut self, known: Known, mut or_join: OrJoin) -> Result<()> { + pub(crate) fn apply_or_join(&mut self, known: Known, mut or_join: OrJoin) -> Result<()> { // Simple optimization. Empty `or` clauses disappear. Unit `or` clauses // are equivalent to just the inner clause. diff --git a/query-algebrizer/src/clauses/pattern.rs b/query-algebrizer/src/clauses/pattern.rs index d5104ebf..28ea8a79 100644 --- a/query-algebrizer/src/clauses/pattern.rs +++ b/query-algebrizer/src/clauses/pattern.rs @@ -81,7 +81,7 @@ impl ConjoiningClauses { /// existence subquery instead of a join. /// /// This method is only public for use from `or.rs`. - pub fn apply_pattern_clause_for_alias(&mut self, known: Known, pattern: &EvolvedPattern, alias: &SourceAlias) { + pub(crate) fn apply_pattern_clause_for_alias(&mut self, known: Known, pattern: &EvolvedPattern, alias: &SourceAlias) { if self.is_known_empty() { return; } @@ -444,7 +444,7 @@ impl ConjoiningClauses { self.make_evolved_non_value(known, DatomsColumn::Tx, tx) } - pub fn make_evolved_attribute(&self, known: &Known, attribute: PatternNonValuePlace) -> PlaceOrEmpty<(EvolvedNonValuePlace, Option)> { + pub(crate) fn make_evolved_attribute(&self, known: &Known, attribute: PatternNonValuePlace) -> PlaceOrEmpty<(EvolvedNonValuePlace, Option)> { use self::PlaceOrEmpty::*; self.make_evolved_non_value(known, DatomsColumn::Attribute, attribute) .and_then(|a| { @@ -461,10 +461,10 @@ impl ConjoiningClauses { }) } - pub fn make_evolved_value(&self, - known: &Known, - value_type: Option, - value: PatternValuePlace) -> PlaceOrEmpty { + pub(crate) fn make_evolved_value(&self, + known: &Known, + value_type: Option, + value: PatternValuePlace) -> PlaceOrEmpty { use self::PlaceOrEmpty::*; match value { PatternValuePlace::Placeholder => Place(EvolvedValuePlace::Placeholder), @@ -524,7 +524,7 @@ impl ConjoiningClauses { } } - pub fn make_evolved_pattern(&self, known: Known, pattern: Pattern) -> PlaceOrEmpty { + pub(crate) fn make_evolved_pattern(&self, known: Known, pattern: Pattern) -> PlaceOrEmpty { let (e, a, v, tx, source) = (pattern.entity, pattern.attribute, pattern.value, pattern.tx, pattern.source); use self::PlaceOrEmpty::*; match self.make_evolved_entity(&known, e) { @@ -558,7 +558,7 @@ impl ConjoiningClauses { /// Re-examine the pattern to see if it can be specialized or is now known to fail. #[allow(unused_variables)] - pub fn evolve_pattern(&mut self, known: Known, mut pattern: EvolvedPattern) -> PlaceOrEmpty { + pub(crate) fn evolve_pattern(&mut self, known: Known, mut pattern: EvolvedPattern) -> PlaceOrEmpty { use self::PlaceOrEmpty::*; let mut new_entity: Option = None; @@ -606,7 +606,8 @@ impl ConjoiningClauses { Place(pattern) } - pub fn apply_parsed_pattern(&mut self, known: Known, pattern: Pattern) { + #[cfg(test)] + pub(crate) fn apply_parsed_pattern(&mut self, known: Known, pattern: Pattern) { use self::PlaceOrEmpty::*; match self.make_evolved_pattern(known, pattern) { Empty(e) => self.mark_known_empty(e), @@ -614,7 +615,7 @@ impl ConjoiningClauses { }; } - pub fn apply_pattern(&mut self, known: Known, pattern: EvolvedPattern) { + pub(crate) fn apply_pattern(&mut self, known: Known, pattern: EvolvedPattern) { // For now we only support the default source. if pattern.source != SrcVar::DefaultSrc { unimplemented!(); diff --git a/query-algebrizer/src/clauses/predicate.rs b/query-algebrizer/src/clauses/predicate.rs index ba058a3d..f2b6a509 100644 --- a/query-algebrizer/src/clauses/predicate.rs +++ b/query-algebrizer/src/clauses/predicate.rs @@ -45,7 +45,7 @@ impl ConjoiningClauses { /// - In the future, some predicates that are implemented via function calls in SQLite. /// /// At present we have implemented only the five built-in comparison binary operators. - pub fn apply_predicate(&mut self, known: Known, predicate: Predicate) -> Result<()> { + pub(crate) fn apply_predicate(&mut self, known: Known, predicate: Predicate) -> Result<()> { // Because we'll be growing the set of built-in predicates, handling each differently, // and ultimately allowing user-specified predicates, we match on the predicate name first. if let Some(op) = Inequality::from_datalog_operator(predicate.operator.0.as_str()) { @@ -64,7 +64,7 @@ impl ConjoiningClauses { /// Apply a type annotation, which is a construct like a predicate that constrains the argument /// to be a specific ValueType. - pub fn apply_type_anno(&mut self, anno: &TypeAnnotation) -> Result<()> { + pub(crate) fn apply_type_anno(&mut self, anno: &TypeAnnotation) -> Result<()> { self.add_type_requirement(anno.variable.clone(), ValueTypeSet::of_one(anno.value_type)); Ok(()) } @@ -73,7 +73,7 @@ impl ConjoiningClauses { /// - Resolves variables and converts types to those more amenable to SQL. /// - Ensures that the predicate functions name a known operator. /// - Accumulates an `Inequality` constraint into the `wheres` list. - pub fn apply_inequality(&mut self, known: Known, comparison: Inequality, predicate: Predicate) -> Result<()> { + pub(crate) fn apply_inequality(&mut self, known: Known, comparison: Inequality, predicate: Predicate) -> Result<()> { if predicate.args.len() != 2 { bail!(ErrorKind::InvalidNumberOfArguments(predicate.operator.clone(), predicate.args.len(), 2)); } diff --git a/query-algebrizer/src/clauses/resolve.rs b/query-algebrizer/src/clauses/resolve.rs index 4abeb5ca..9b86a10c 100644 --- a/query-algebrizer/src/clauses/resolve.rs +++ b/query-algebrizer/src/clauses/resolve.rs @@ -39,7 +39,7 @@ impl ConjoiningClauses { /// Additionally, do two things: /// - Mark the pattern as known-empty if any argument is known non-numeric. /// - Mark any variables encountered as numeric. - pub fn resolve_numeric_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result { + pub(crate) fn resolve_numeric_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result { use self::FnArg::*; match arg { FnArg::Variable(var) => { @@ -67,7 +67,7 @@ impl ConjoiningClauses { } /// Just like `resolve_numeric_argument`, but for `ValueType::Instant`. - pub fn resolve_instant_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result { + pub(crate) fn resolve_instant_argument(&mut self, function: &PlainSymbol, position: usize, arg: FnArg) -> Result { use self::FnArg::*; match arg { FnArg::Variable(var) => { diff --git a/query-algebrizer/src/clauses/where_fn.rs b/query-algebrizer/src/clauses/where_fn.rs index 01691881..73087b3c 100644 --- a/query-algebrizer/src/clauses/where_fn.rs +++ b/query-algebrizer/src/clauses/where_fn.rs @@ -31,7 +31,7 @@ impl ConjoiningClauses { /// - In the future, some functions that are implemented via function calls in SQLite. /// /// At present we have implemented only a limited selection of functions. - pub fn apply_where_fn(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { + pub(crate) fn apply_where_fn(&mut self, known: Known, where_fn: WhereFn) -> Result<()> { // Because we'll be growing the set of built-in functions, handling each differently, and // ultimately allowing user-specified functions, we match on the function name first. match where_fn.operator.0.as_str() { diff --git a/query-algebrizer/src/lib.rs b/query-algebrizer/src/lib.rs index ed13ba75..32781d92 100644 --- a/query-algebrizer/src/lib.rs +++ b/query-algebrizer/src/lib.rs @@ -310,4 +310,3 @@ pub use types::{ TableAlias, VariableColumn, }; - diff --git a/query-algebrizer/src/types.rs b/query-algebrizer/src/types.rs index beff197b..1755d568 100644 --- a/query-algebrizer/src/types.rs +++ b/query-algebrizer/src/types.rs @@ -610,13 +610,6 @@ impl PlaceOrEmpty { PlaceOrEmpty::Empty(e) => PlaceOrEmpty::Empty(e), } } - - pub fn then(self, f: F) { - match self { - PlaceOrEmpty::Place(x) => f(x), - PlaceOrEmpty::Empty(_e) => (), - } - } } pub struct EvolvedPattern { diff --git a/query-algebrizer/src/validate.rs b/query-algebrizer/src/validate.rs index 6ad9483b..25a23f9e 100644 --- a/query-algebrizer/src/validate.rs +++ b/query-algebrizer/src/validate.rs @@ -44,7 +44,7 @@ use errors::{ /// /// "As with rules, src-vars are not currently supported within the clauses of or, but are supported /// on the or clause as a whole at top level." -pub fn validate_or_join(or_join: &OrJoin) -> Result<()> { +pub(crate) fn validate_or_join(or_join: &OrJoin) -> Result<()> { // Grab our mentioned variables and ensure that the rules are followed. match or_join.unify_vars { UnifyVars::Implicit => { @@ -75,7 +75,7 @@ pub fn validate_or_join(or_join: &OrJoin) -> Result<()> { } } -pub fn validate_not_join(not_join: &NotJoin) -> Result<()> { +pub(crate) fn validate_not_join(not_join: &NotJoin) -> Result<()> { // Grab our mentioned variables and ensure that the rules are followed. match not_join.unify_vars { UnifyVars::Implicit => { diff --git a/tx/src/entities.rs b/tx/src/entities.rs index 0da757e9..e37814ff 100644 --- a/tx/src/entities.rs +++ b/tx/src/entities.rs @@ -34,14 +34,6 @@ impl TempId { TempId::Internal(_) => None, } } - - pub fn into_internal(self) -> Option { - match self { - TempId::Internal(x) => Some(x), - TempId::Tx | - TempId::External(_) => None, - } - } } impl fmt::Display for TempId { @@ -61,20 +53,6 @@ pub enum Entid { } impl Entid { - pub fn is_backward(&self) -> bool { - match self { - &Entid::Entid(_) => false, - &Entid::Ident(ref a) => a.is_backward(), - } - } - - pub fn to_reversed(&self) -> Option { - match self { - &Entid::Entid(_) => None, - &Entid::Ident(ref a) => Some(Entid::Ident(a.to_reversed())), - } - } - pub fn unreversed(&self) -> Option { match self { &Entid::Entid(_) => None,