Automatically remove trailing whitespace from text files. (#527) r=rnewman

This was done using the following shell script:

```
find . -type f -not -path "*target*" \
       '(' -name '*.rs' -o -name '*.md' -o -name '*.toml' ')' -print0 | \
    xargs -0 sed -i '' -E 's/[[:space:]]*$//'
```

Which is admittedly imperfect, but manages to hit everything that was a problem in this repo.
This commit is contained in:
Thom 2018-01-19 22:21:04 -05:00 committed by Richard Newman
parent 023fd9b70b
commit 9740cafdbd
19 changed files with 88 additions and 88 deletions

View file

@ -25,19 +25,19 @@ We've observed that data storage is a particular area of difficulty for software
- Model their domain entities and relationships. - Model their domain entities and relationships.
- Encode that model _efficiently_ and _correctly_ using the features available in the database. - Encode that model _efficiently_ and _correctly_ using the features available in the database.
- Plan for future extensions and performance tuning. - Plan for future extensions and performance tuning.
In a SQL database, the same schema definition defines everything from high-level domain relationships through to numeric field sizes in the same smear of keywords. It's difficult for someone unfamiliar with the domain to determine from such a schema what's a domain fact and what's an implementation concession — are all part numbers always 16 characters long, or are we trying to save space? — or, indeed, whether a missing constraint is deliberate or a bug. In a SQL database, the same schema definition defines everything from high-level domain relationships through to numeric field sizes in the same smear of keywords. It's difficult for someone unfamiliar with the domain to determine from such a schema what's a domain fact and what's an implementation concession — are all part numbers always 16 characters long, or are we trying to save space? — or, indeed, whether a missing constraint is deliberate or a bug.
The developer must think about foreign key constraints, compound uniqueness, and nullability. They must consider indexing, synchronizing, and stable identifiers. Most developers simply don't do enough work in SQL to get all of these things right. Storage thus becomes the specialty of a few individuals. The developer must think about foreign key constraints, compound uniqueness, and nullability. They must consider indexing, synchronizing, and stable identifiers. Most developers simply don't do enough work in SQL to get all of these things right. Storage thus becomes the specialty of a few individuals.
Which one of these is correct? Which one of these is correct?
```edn ```edn
{:db/id :person/email {:db/id :person/email
:db/valueType :db.type/string :db/valueType :db.type/string
:db/cardinality :db.cardinality/many ; People can have multiple email addresses. :db/cardinality :db.cardinality/many ; People can have multiple email addresses.
:db/unique :db.unique/identity ; For our purposes, each email identifies one person. :db/unique :db.unique/identity ; For our purposes, each email identifies one person.
:db/index true} ; We want fast lookups by email. :db/index true} ; We want fast lookups by email.
{:db/id :person/friend {:db/id :person/friend
:db/valueType :db.type/ref :db/valueType :db.type/ref
:db/cardinality :db.cardinality/many} ; People can have many friends. :db/cardinality :db.cardinality/many} ; People can have many friends.
@ -53,7 +53,7 @@ We've observed that data storage is a particular area of difficulty for software
FOREIGN KEY friend REFERENCES people(id), -- Bug: no compound uniqueness constraint, so we can have dupe friendships. FOREIGN KEY friend REFERENCES people(id), -- Bug: no compound uniqueness constraint, so we can have dupe friendships.
); );
``` ```
They both have limitations — the Mentat schema allows only for an open world (it's possible to declare friendships with people whose email isn't known), and requires validation code to enforce email string correctness — but we think that even such a tiny SQL example is harder to understand and obscures important domain decisions. They both have limitations — the Mentat schema allows only for an open world (it's possible to declare friendships with people whose email isn't known), and requires validation code to enforce email string correctness — but we think that even such a tiny SQL example is harder to understand and obscures important domain decisions.
- Queries are intimately tied to structural storage choices. That not only hides the declarative domain-level meaning of the query — it's hard to tell what a query is trying to do when it's a 100-line mess of subqueries and `LEFT OUTER JOIN`s — but it also means a simple structural schema change requires auditing _every query_ for correctness. - Queries are intimately tied to structural storage choices. That not only hides the declarative domain-level meaning of the query — it's hard to tell what a query is trying to do when it's a 100-line mess of subqueries and `LEFT OUTER JOIN`s — but it also means a simple structural schema change requires auditing _every query_ for correctness.

View file

@ -61,4 +61,4 @@ mod tests {
assert_eq!(d.next(), 2); assert_eq!(d.next(), 2);
assert_eq!(c.next(), 3); assert_eq!(c.next(), 3);
} }
} }

View file

@ -61,4 +61,4 @@ impl<T> InternSet<T> where T: Eq + Hash {
self.inner.get(&key).unwrap().clone() self.inner.get(&key).unwrap().clone()
} }
} }
} }

View file

@ -568,9 +568,9 @@ impl Attribute {
match self.unique { match self.unique {
Some(attribute::Unique::Value) => { attribute_map.insert(values::DB_UNIQUE.clone(), values::DB_UNIQUE_VALUE.clone()); }, Some(attribute::Unique::Value) => { attribute_map.insert(values::DB_UNIQUE.clone(), values::DB_UNIQUE_VALUE.clone()); },
Some(attribute::Unique::Identity) => { attribute_map.insert(values::DB_UNIQUE.clone(), values::DB_UNIQUE_IDENTITY.clone()); }, Some(attribute::Unique::Identity) => { attribute_map.insert(values::DB_UNIQUE.clone(), values::DB_UNIQUE_IDENTITY.clone()); },
None => (), None => (),
} }
if self.index { if self.index {
attribute_map.insert(values::DB_INDEX.clone(), edn::Value::Boolean(true)); attribute_map.insert(values::DB_INDEX.clone(), edn::Value::Boolean(true));
} }
@ -667,7 +667,7 @@ impl Schema {
/// Returns an symbolic representation of the schema suitable for applying across Mentat stores. /// Returns an symbolic representation of the schema suitable for applying across Mentat stores.
pub fn to_edn_value(&self) -> edn::Value { pub fn to_edn_value(&self) -> edn::Value {
edn::Value::Vector((&self.schema_map).iter() edn::Value::Vector((&self.schema_map).iter()
.map(|(entid, attribute)| .map(|(entid, attribute)|
attribute.to_edn_value(self.get_ident(*entid).cloned())) attribute.to_edn_value(self.get_ident(*entid).cloned()))
.collect()) .collect())
} }

View file

@ -275,9 +275,9 @@ mod test {
use self::edn::NamespacedKeyword; use self::edn::NamespacedKeyword;
use errors::Error; use errors::Error;
fn add_attribute(schema: &mut Schema, fn add_attribute(schema: &mut Schema,
ident: NamespacedKeyword, ident: NamespacedKeyword,
entid: Entid, entid: Entid,
attribute: Attribute) { attribute: Attribute) {
schema.entid_map.insert(entid, ident.clone()); schema.entid_map.insert(entid, ident.clone());
@ -351,10 +351,10 @@ mod test {
multival: false, multival: false,
component: false, component: false,
}); });
let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err(); let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err();
assert!(err.is_some()); assert!(err.is_some());
match err.unwrap() { match err.unwrap() {
Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/unique :db/unique_value without :db/index true for entid: :foo/bar"); }, Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/unique :db/unique_value without :db/index true for entid: :foo/bar"); },
x => panic!("expected Bad Schema Assertion error, got {:?}", x), x => panic!("expected Bad Schema Assertion error, got {:?}", x),
@ -373,10 +373,10 @@ mod test {
multival: false, multival: false,
component: false, component: false,
}); });
let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err(); let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err();
assert!(err.is_some()); assert!(err.is_some());
match err.unwrap() { match err.unwrap() {
Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/unique :db/unique_identity without :db/index true for entid: :foo/bar"); }, Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/unique :db/unique_identity without :db/index true for entid: :foo/bar"); },
x => panic!("expected Bad Schema Assertion error, got {:?}", x), x => panic!("expected Bad Schema Assertion error, got {:?}", x),
@ -395,10 +395,10 @@ mod test {
multival: false, multival: false,
component: true, component: true,
}); });
let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err(); let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err();
assert!(err.is_some()); assert!(err.is_some());
match err.unwrap() { match err.unwrap() {
Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/isComponent true without :db/valueType :db.type/ref for entid: :foo/bar"); }, Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/isComponent true without :db/valueType :db.type/ref for entid: :foo/bar"); },
x => panic!("expected Bad Schema Assertion error, got {:?}", x), x => panic!("expected Bad Schema Assertion error, got {:?}", x),
@ -417,10 +417,10 @@ mod test {
multival: false, multival: false,
component: false, component: false,
}); });
let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err(); let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err();
assert!(err.is_some()); assert!(err.is_some());
match err.unwrap() { match err.unwrap() {
Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/fulltext true without :db/index true for entid: :foo/bar"); }, Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/fulltext true without :db/index true for entid: :foo/bar"); },
x => panic!("expected Bad Schema Assertion error, got {:?}", x), x => panic!("expected Bad Schema Assertion error, got {:?}", x),
@ -438,10 +438,10 @@ mod test {
multival: false, multival: false,
component: false, component: false,
}); });
let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err(); let err = validate_schema_map(&schema.entid_map, &schema.schema_map).err();
assert!(err.is_some()); assert!(err.is_some());
match err.unwrap() { match err.unwrap() {
Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/fulltext true without :db/valueType :db.type/string for entid: :foo/bar"); }, Error(ErrorKind::BadSchemaAssertion(message), _) => { assert_eq!(message, ":db/fulltext true without :db/valueType :db.type/string for entid: :foo/bar"); },
x => panic!("expected Bad Schema Assertion error, got {:?}", x), x => panic!("expected Bad Schema Assertion error, got {:?}", x),

View file

@ -578,4 +578,4 @@ mod test {
assert_match!("[#{?x ?y} ?x]" =~ "[#{1 2} 1]"); assert_match!("[#{?x ?y} ?x]" =~ "[#{1 2} 1]");
assert_match!("[#{?x ?y} ?y]" =~ "[#{1 2} 2]"); assert_match!("[#{?x ?y} ?y]" =~ "[#{1 2} 2]");
} }
} }

View file

@ -12,8 +12,8 @@ use mentat_core::Schema;
use mentat_query::{ use mentat_query::{
ContainsVariables, ContainsVariables,
NotJoin, NotJoin,
UnifyVars, UnifyVars,
}; };
use clauses::ConjoiningClauses; use clauses::ConjoiningClauses;
@ -24,7 +24,7 @@ use errors::{
}; };
use types::{ use types::{
ColumnConstraint, ColumnConstraint,
ComputedTable, ComputedTable,
}; };
@ -36,7 +36,7 @@ impl ConjoiningClauses {
}; };
let mut template = self.use_as_template(&unified); let mut template = self.use_as_template(&unified);
for v in unified.iter() { for v in unified.iter() {
if self.value_bindings.contains_key(&v) { if self.value_bindings.contains_key(&v) {
let val = self.value_bindings.get(&v).unwrap().clone(); let val = self.value_bindings.get(&v).unwrap().clone();
@ -78,14 +78,14 @@ mod testing {
use super::*; use super::*;
use mentat_core::{ use mentat_core::{
Attribute, Attribute,
TypedValue, TypedValue,
ValueType, ValueType,
ValueTypeSet, ValueTypeSet,
}; };
use mentat_query::{ use mentat_query::{
NamespacedKeyword, NamespacedKeyword,
PlainSymbol, PlainSymbol,
Variable Variable
}; };
@ -93,8 +93,8 @@ mod testing {
use self::mentat_query_parser::parse_find_string; use self::mentat_query_parser::parse_find_string;
use clauses::{ use clauses::{
QueryInputs, QueryInputs,
add_attribute, add_attribute,
associate_ident, associate_ident,
}; };
@ -104,20 +104,20 @@ mod testing {
}; };
use types::{ use types::{
ColumnAlternation, ColumnAlternation,
ColumnConstraint, ColumnConstraint,
ColumnConstraintOrAlternation, ColumnConstraintOrAlternation,
ColumnIntersection, ColumnIntersection,
DatomsColumn, DatomsColumn,
DatomsTable, DatomsTable,
Inequality, Inequality,
QualifiedAlias, QualifiedAlias,
QueryValue, QueryValue,
SourceAlias, SourceAlias,
}; };
use { use {
algebrize, algebrize,
algebrize_with_inputs, algebrize_with_inputs,
}; };
@ -248,7 +248,7 @@ mod testing {
:where [?x :foo/knows ?y] :where [?x :foo/knows ?y]
[?x :foo/age 11] [?x :foo/age 11]
[?x :foo/name "John"] [?x :foo/name "John"]
(not-join [?x ?y] (not-join [?x ?y]
[?x :foo/parent ?y])]"#; [?x :foo/parent ?y])]"#;
let cc = alg(&schema, query); let cc = alg(&schema, query);
@ -374,7 +374,7 @@ mod testing {
assert_eq!(cc.column_bindings.get(&vx), Some(&vec![d0e])); assert_eq!(cc.column_bindings.get(&vx), Some(&vec![d0e]));
assert_eq!(cc.from, vec![SourceAlias(DatomsTable::Datoms, d0)]); assert_eq!(cc.from, vec![SourceAlias(DatomsTable::Datoms, d0)]);
} }
// not with an or // not with an or
#[test] #[test]
fn test_not_with_or() { fn test_not_with_or() {
@ -524,7 +524,7 @@ mod testing {
assert!(!cc.is_known_empty()); assert!(!cc.is_known_empty());
compare_ccs(cc, compare_ccs(cc,
alg(&schema, r#"[:find ?x :where [?x :foo/knows "John"]]"#)); alg(&schema, r#"[:find ?x :where [?x :foo/knows "John"]]"#));
} }
#[test] #[test]

View file

@ -81,7 +81,7 @@ impl ConjoiningClauses {
// Sorry for the duplication; Rust makes it a pain to abstract this. // Sorry for the duplication; Rust makes it a pain to abstract this.
// The transaction part of a pattern must be an entid, variable, or placeholder. // The transaction part of a pattern must be an entid, variable, or placeholder.
self.constrain_to_tx(&pattern.tx); self.constrain_to_tx(&pattern.tx);
self.constrain_to_ref(&pattern.entity); self.constrain_to_ref(&pattern.entity);
self.constrain_to_ref(&pattern.attribute); self.constrain_to_ref(&pattern.attribute);

View file

@ -53,7 +53,7 @@ error_chain! {
description("no such function") description("no such function")
display("no function named {}", name) display("no function named {}", name)
} }
InvalidNumberOfArguments(function: PlainSymbol, number: usize, expected: usize) { InvalidNumberOfArguments(function: PlainSymbol, number: usize, expected: usize) {
description("invalid number of arguments") description("invalid number of arguments")
display("invalid number of arguments to {}: expected {}, got {}.", function, expected, number) display("invalid number of arguments to {}: expected {}, got {}.", function, expected, number)

View file

@ -352,7 +352,7 @@ mod tests {
_ => panic!(), _ => panic!(),
}; };
} }
/// Test that a `not-join` that does not use the joining var fails to validate. /// Test that a `not-join` that does not use the joining var fails to validate.
#[test] #[test]
fn test_invalid_explicit_not_join_non_matching_join_vars() { fn test_invalid_explicit_not_join_non_matching_join_vars() {
@ -374,5 +374,5 @@ mod tests {
WhereClause::NotJoin(not_join) => assert!(validate_not_join(&not_join).is_err()), WhereClause::NotJoin(not_join) => assert!(validate_not_join(&not_join).is_err()),
_ => panic!(), _ => panic!(),
} }
} }
} }

View file

@ -756,7 +756,7 @@ mod test {
let e = edn::PlainSymbol::new("?e"); let e = edn::PlainSymbol::new("?e");
let a = edn::PlainSymbol::new("?a"); let a = edn::PlainSymbol::new("?a");
let v = edn::PlainSymbol::new("?v"); let v = edn::PlainSymbol::new("?v");
assert_edn_parses_to!(Where::not_join_clause, assert_edn_parses_to!(Where::not_join_clause,
"(not-join [?e] [?e ?a ?v])", "(not-join [?e] [?e ?a ?v])",
WhereClause::NotJoin( WhereClause::NotJoin(

View file

@ -275,7 +275,7 @@ fn test_tx() {
]"#).expect("successful transaction"); ]"#).expect("successful transaction");
let r = conn.q_once(&mut c, let r = conn.q_once(&mut c,
r#"[:find ?tx r#"[:find ?tx
:where [?x :foo/uuid #uuid "cf62d552-6569-4d1b-b667-04703041dfc4" ?tx]]"#, None); :where [?x :foo/uuid #uuid "cf62d552-6569-4d1b-b667-04703041dfc4" ?tx]]"#, None);
match r { match r {
Result::Ok(QueryResults::Rel(ref v)) => { Result::Ok(QueryResults::Rel(ref v)) => {
@ -309,7 +309,7 @@ fn test_tx_as_input() {
let tx = (Variable::from_valid_name("?tx"), TypedValue::Ref(t.tx_id)); let tx = (Variable::from_valid_name("?tx"), TypedValue::Ref(t.tx_id));
let inputs = QueryInputs::with_value_sequence(vec![tx]); let inputs = QueryInputs::with_value_sequence(vec![tx]);
let r = conn.q_once(&mut c, let r = conn.q_once(&mut c,
r#"[:find ?uuid r#"[:find ?uuid
:in ?tx :in ?tx
:where [?x :foo/uuid ?uuid ?tx]]"#, inputs); :where [?x :foo/uuid ?uuid ?tx]]"#, inputs);
match r { match r {

View file

@ -65,7 +65,7 @@ pub mod tests {
let mut conn = setup_conn_bare(); let mut conn = setup_conn_bare();
assert!(ensure_current_version(&mut conn).is_ok()); assert!(ensure_current_version(&mut conn).is_ok());
let mut stmt = conn.prepare("SELECT key FROM tolstoy_metadata WHERE value = zeroblob(16)").unwrap(); let mut stmt = conn.prepare("SELECT key FROM tolstoy_metadata WHERE value = zeroblob(16)").unwrap();
let mut keys_iter = stmt.query_map(&[], |r| r.get(0)).expect("query works"); let mut keys_iter = stmt.query_map(&[], |r| r.get(0)).expect("query works");

View file

@ -10,21 +10,21 @@
use combine::{ use combine::{
any, any,
eof, eof,
look_ahead, look_ahead,
many1, many1,
satisfy, satisfy,
sep_end_by, sep_end_by,
token, token,
Parser Parser
}; };
use combine::char::{ use combine::char::{
space, space,
spaces, spaces,
string string
}; };
use combine::combinator::{ use combine::combinator::{
choice, choice,
try try
}; };
@ -59,7 +59,7 @@ pub enum Command {
impl Command { impl Command {
/// is_complete returns true if no more input is required for the command to be successfully executed. /// is_complete returns true if no more input is required for the command to be successfully executed.
/// false is returned if the command is not considered valid. /// false is returned if the command is not considered valid.
/// Defaults to true for all commands except Query and Transact. /// Defaults to true for all commands except Query and Transact.
/// TODO: for query and transact commands, they will be considered complete if a parsable EDN has been entered as an argument /// TODO: for query and transact commands, they will be considered complete if a parsable EDN has been entered as an argument
pub fn is_complete(&self) -> bool { pub fn is_complete(&self) -> bool {
@ -129,7 +129,7 @@ pub fn command(s: &str) -> Result<Command, cli::Error> {
} }
Ok(Command::Open(args[0].clone())) Ok(Command::Open(args[0].clone()))
}); });
let no_arg_parser = || arguments() let no_arg_parser = || arguments()
.skip(spaces()) .skip(spaces())
.skip(eof()); .skip(eof());
@ -142,7 +142,7 @@ pub fn command(s: &str) -> Result<Command, cli::Error> {
} }
Ok(Command::Close) Ok(Command::Close)
}); });
let schema_parser = string(SCHEMA_COMMAND) let schema_parser = string(SCHEMA_COMMAND)
.with(no_arg_parser()) .with(no_arg_parser())
.map(|args| { .map(|args| {
@ -204,7 +204,7 @@ pub fn command(s: &str) -> Result<Command, cli::Error> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_help_parser_multiple_args() { fn test_help_parser_multiple_args() {
@ -298,7 +298,7 @@ mod tests {
_ => assert!(false) _ => assert!(false)
} }
} }
#[test] #[test]
fn test_open_parser_no_args() { fn test_open_parser_no_args() {
let input = ".open"; let input = ".open";
@ -376,7 +376,7 @@ mod tests {
_ => assert!(false) _ => assert!(false)
} }
} }
#[test] #[test]
fn test_schema_parser_with_args() { fn test_schema_parser_with_args() {
let input = ".schema arg1"; let input = ".schema arg1";
@ -393,7 +393,7 @@ mod tests {
_ => assert!(false) _ => assert!(false)
} }
} }
#[test] #[test]
fn test_schema_parser_no_args_trailing_whitespace() { fn test_schema_parser_no_args_trailing_whitespace() {
let input = ".schema "; let input = ".schema ";

View file

@ -19,7 +19,7 @@ use linefeed::{
use self::InputResult::*; use self::InputResult::*;
use command_parser::{ use command_parser::{
Command, Command,
command, command,
}; };

View file

@ -84,7 +84,7 @@ pub fn run() -> i32 {
last_arg = Some(&arg); last_arg = Some(&arg);
None None
}, },
} }
}).collect(); }).collect();
let repl = repl::Repl::new(); let repl = repl::Repl::new();

View file

@ -8,7 +8,7 @@
// CONDITIONS OF ANY KIND, either express or implied. See the License for the // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License. // specific language governing permissions and limitations under the License.
use std::collections::HashMap; use std::collections::HashMap;
use std::process; use std::process;
use mentat::query::{ use mentat::query::{
@ -18,8 +18,8 @@ use mentat::query::{
use mentat_core::TypedValue; use mentat_core::TypedValue;
use command_parser::{ use command_parser::{
Command, Command,
HELP_COMMAND, HELP_COMMAND,
OPEN_COMMAND, OPEN_COMMAND,
LONG_QUERY_COMMAND, LONG_QUERY_COMMAND,
SHORT_QUERY_COMMAND, SHORT_QUERY_COMMAND,
@ -33,12 +33,12 @@ use command_parser::{
}; };
use input::InputReader; use input::InputReader;
use input::InputResult::{ use input::InputResult::{
MetaCommand, MetaCommand,
Empty, Empty,
More, More,
Eof Eof
}; };
use store::{ use store::{
Store, Store,
db_output_name db_output_name
}; };
@ -127,7 +127,7 @@ impl Repl {
Ok(s) => println!("{}", s), Ok(s) => println!("{}", s),
Err(e) => println!("{}", e) Err(e) => println!("{}", e)
}; };
} }
Command::Transact(transaction) => self.execute_transact(transaction), Command::Transact(transaction) => self.execute_transact(transaction),
Command::Exit => { Command::Exit => {
@ -153,7 +153,7 @@ impl Repl {
} }
} else { } else {
for mut arg in args { for mut arg in args {
if arg.chars().nth(0).unwrap() == '.' { if arg.chars().nth(0).unwrap() == '.' {
arg.remove(0); arg.remove(0);
} }
let msg = COMMAND_HELP.get(arg.as_str()); let msg = COMMAND_HELP.get(arg.as_str());
@ -177,23 +177,23 @@ impl Repl {
if results.is_empty() { if results.is_empty() {
println!("No results found.") println!("No results found.")
} }
let mut output:String = String::new(); let mut output:String = String::new();
match results { match results {
QueryResults::Scalar(Some(val)) => { QueryResults::Scalar(Some(val)) => {
output.push_str(&self.typed_value_as_string(val) ); output.push_str(&self.typed_value_as_string(val) );
}, },
QueryResults::Tuple(Some(vals)) => { QueryResults::Tuple(Some(vals)) => {
for val in vals { for val in vals {
output.push_str(&format!("{}\t", self.typed_value_as_string(val))); output.push_str(&format!("{}\t", self.typed_value_as_string(val)));
} }
}, },
QueryResults::Coll(vv) => { QueryResults::Coll(vv) => {
for val in vv { for val in vv {
output.push_str(&format!("{}\n", self.typed_value_as_string(val))); output.push_str(&format!("{}\n", self.typed_value_as_string(val)));
} }
}, },
QueryResults::Rel(vvv) => { QueryResults::Rel(vvv) => {
for vv in vvv { for vv in vvv {
for v in vv { for v in vv {
output.push_str(&format!("{}\t", self.typed_value_as_string(v))); output.push_str(&format!("{}\t", self.typed_value_as_string(v)));

View file

@ -37,9 +37,9 @@ pub fn db_output_name(db_name: &String) -> String {
impl Store { impl Store {
pub fn new(database: Option<String>) -> Result<Store, cli::Error> { pub fn new(database: Option<String>) -> Result<Store, cli::Error> {
let db_name = database.unwrap_or("".to_string()); let db_name = database.unwrap_or("".to_string());
let mut handle = try!(new_connection(&db_name)); let mut handle = try!(new_connection(&db_name));
let conn = try!(Conn::connect(&mut handle)); let conn = try!(Conn::connect(&mut handle));
Ok(Store { handle, conn, db_name }) Ok(Store { handle, conn, db_name })
} }

View file

@ -1 +1 @@
This sub-crate implements the core types used by the transaction processor. This sub-crate implements the core types used by the transaction processor.