Compare commits
1 commit
master
...
dependabot
Author | SHA1 | Date | |
---|---|---|---|
|
fdad1ceb5c |
58 changed files with 267 additions and 472 deletions
17
.github/dependabot.yml
vendored
17
.github/dependabot.yml
vendored
|
@ -1,11 +1,12 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: cargo
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
interval: daily
|
||||
time: "10:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- gburd
|
||||
assignees:
|
||||
- gburd
|
||||
|
|
2
.github/workflows/audit.yml
vendored
2
.github/workflows/audit.yml
vendored
|
@ -2,7 +2,7 @@ name: Security audit
|
|||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 * *'
|
||||
- cron: '0 0 0 * *'
|
||||
push:
|
||||
paths:
|
||||
- '**/Cargo.toml'
|
||||
|
|
1
.github/workflows/clippy_check.yml
vendored
1
.github/workflows/clippy_check.yml
vendored
|
@ -12,5 +12,4 @@ jobs:
|
|||
override: true
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
args: --all-targets --all-features -- -D warnings
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
2
.github/workflows/grcov.yml
vendored
2
.github/workflows/grcov.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
# - windows-latest
|
||||
- windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -15,8 +15,6 @@
|
|||
.lein-plugins/
|
||||
.lein-repl-history
|
||||
.nrepl-port
|
||||
.bundle/
|
||||
docs/vendor/
|
||||
/.lein-*
|
||||
/.nrepl-port
|
||||
Cargo.lock
|
||||
|
|
27
Cargo.toml
27
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[package]
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
authors = [
|
||||
"Richard Newman <rnewman@twinql.com>",
|
||||
"Nicholas Alexander <nalexander@mozilla.com>",
|
||||
|
@ -14,7 +14,7 @@ authors = [
|
|||
"Gregory Burd <greg@burd.me>",
|
||||
]
|
||||
name = "mentat"
|
||||
version = "0.14.0"
|
||||
version = "0.13.0"
|
||||
build = "build/version.rs"
|
||||
|
||||
[features]
|
||||
|
@ -24,23 +24,18 @@ sqlcipher = ["rusqlite/sqlcipher", "mentat_db/sqlcipher"]
|
|||
syncable = ["mentat_tolstoy", "tolstoy_traits", "mentat_db/syncable"]
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"tools/cli",
|
||||
"ffi", "core", "core-traits","db", "db-traits", "edn", "public-traits", "query-algebrizer",
|
||||
"query-algebrizer-traits", "query-projector", "query-projector-traits","query-pull",
|
||||
"query-sql", "sql", "sql-traits", "tolstoy-traits", "tolstoy", "transaction"
|
||||
]
|
||||
members = ["tools/cli", "ffi"]
|
||||
|
||||
[build-dependencies]
|
||||
rustc_version = "~0.4"
|
||||
rustc_version = "~0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_approx_eq = "~1.1"
|
||||
|
||||
#[dev-dependencies.cargo-husky]
|
||||
#version = "1"
|
||||
#default-features = false # Disable features which are enabled by default
|
||||
#features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
||||
[dev-dependencies.cargo-husky]
|
||||
version = "1"
|
||||
default-features = false # Disable features which are enabled by default
|
||||
features = ["run-for-all", "precommit-hook", "run-cargo-fmt", "run-cargo-test", "run-cargo-check", "run-cargo-clippy"]
|
||||
#cargo audit
|
||||
#cargo outdated
|
||||
|
||||
|
@ -48,12 +43,12 @@ assert_approx_eq = "~1.1"
|
|||
chrono = "~0.4"
|
||||
failure = "~0.1"
|
||||
lazy_static = "~1.4"
|
||||
time = "0.3.1"
|
||||
time = "0.2.15"
|
||||
log = "~0.4"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
uuid = { version = "~0.8", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
3
Makefile
3
Makefile
|
@ -1,4 +1,3 @@
|
|||
.PHONY: outdated fix
|
||||
|
||||
outdated:
|
||||
for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo outdated -R); done
|
||||
|
@ -7,5 +6,3 @@ outdated:
|
|||
fix:
|
||||
$(for p in $(dirname $(ls Cargo.toml */Cargo.toml */*/Cargo.toml)); do echo $p; (cd $p; cargo fix --allow-dirty --broken-code --edition-idioms); done)
|
||||
|
||||
upgrades:
|
||||
cargo upgrades
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::process::exit;
|
|||
|
||||
/// MIN_VERSION should be changed when there's a new minimum version of rustc required
|
||||
/// to build the project.
|
||||
static MIN_VERSION: &str = "1.69.0";
|
||||
static MIN_VERSION: &str = "1.43.0";
|
||||
|
||||
fn main() {
|
||||
let ver = version().unwrap();
|
||||
|
|
|
@ -11,12 +11,11 @@ path = "lib.rs"
|
|||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0.8"
|
||||
lazy_static = "~1.4"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
indexmap = "~1.5"
|
||||
ordered-float = { version = "~2.0", features = ["serde"] }
|
||||
uuid = { version = "~0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", features = ["rc"] }
|
||||
serde_derive = "~1.0"
|
||||
bytes = { version = "1.0.1", features = ["serde"] }
|
||||
|
||||
[dependencies.edn]
|
||||
path = "../edn"
|
||||
|
|
|
@ -14,7 +14,6 @@ extern crate indexmap;
|
|||
extern crate ordered_float;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate bytes;
|
||||
extern crate edn;
|
||||
extern crate uuid;
|
||||
#[macro_use]
|
||||
|
@ -34,7 +33,6 @@ use std::sync::Arc;
|
|||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use bytes::Bytes;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use enum_set::EnumSet;
|
||||
|
@ -282,7 +280,6 @@ pub enum ValueType {
|
|||
String,
|
||||
Keyword,
|
||||
Uuid,
|
||||
Bytes,
|
||||
}
|
||||
|
||||
impl ValueType {
|
||||
|
@ -297,7 +294,6 @@ impl ValueType {
|
|||
s.insert(ValueType::String);
|
||||
s.insert(ValueType::Keyword);
|
||||
s.insert(ValueType::Uuid);
|
||||
s.insert(ValueType::Bytes);
|
||||
s
|
||||
}
|
||||
}
|
||||
|
@ -325,7 +321,6 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -343,7 +338,6 @@ impl ValueType {
|
|||
"string" => Some(ValueType::String),
|
||||
"keyword" => Some(ValueType::Keyword),
|
||||
"uuid" => Some(ValueType::Uuid),
|
||||
"bytes" => Some(ValueType::Bytes),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -361,7 +355,6 @@ impl ValueType {
|
|||
ValueType::String => "string",
|
||||
ValueType::Keyword => "keyword",
|
||||
ValueType::Uuid => "uuid",
|
||||
ValueType::Bytes => "bytes",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -376,7 +369,6 @@ impl ValueType {
|
|||
ValueType::String => values::DB_TYPE_STRING.clone(),
|
||||
ValueType::Keyword => values::DB_TYPE_KEYWORD.clone(),
|
||||
ValueType::Uuid => values::DB_TYPE_UUID.clone(),
|
||||
ValueType::Bytes => values::DB_TYPE_BYTES.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -399,7 +391,6 @@ impl fmt::Display for ValueType {
|
|||
ValueType::String => ":db.type/string",
|
||||
ValueType::Keyword => ":db.type/keyword",
|
||||
ValueType::Uuid => ":db.type/uuid",
|
||||
ValueType::Bytes => ":db.type/bytes",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -423,7 +414,6 @@ pub enum TypedValue {
|
|||
String(ValueRc<String>),
|
||||
Keyword(ValueRc<Keyword>),
|
||||
Uuid(Uuid), // It's only 128 bits, so this should be acceptable to clone.
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
impl From<KnownEntid> for TypedValue {
|
||||
|
@ -455,7 +445,6 @@ impl TypedValue {
|
|||
TypedValue::String(_) => ValueType::String,
|
||||
TypedValue::Keyword(_) => ValueType::Keyword,
|
||||
TypedValue::Uuid(_) => ValueType::Uuid,
|
||||
TypedValue::Bytes(_) => ValueType::Bytes,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -582,7 +571,7 @@ impl TypedValue {
|
|||
match self {
|
||||
TypedValue::Uuid(v) => {
|
||||
// Get an independent copy of the string.
|
||||
let s: String = v.hyphenated().to_string();
|
||||
let s: String = v.to_hyphenated().to_string();
|
||||
|
||||
// Make a CString out of the new bytes.
|
||||
let c: CString = CString::new(s).expect("String conversion failed!");
|
||||
|
@ -603,14 +592,7 @@ impl TypedValue {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
TypedValue::Uuid(v) => Some(v.hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_bytes(self) -> Option<Bytes> {
|
||||
match self {
|
||||
TypedValue::Bytes(b) => Some(b),
|
||||
TypedValue::Uuid(v) => Some(v.to_hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -704,12 +686,6 @@ impl From<f64> for TypedValue {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&[u8]> for TypedValue {
|
||||
fn from(bslice: &[u8]) -> Self {
|
||||
TypedValue::Bytes(Bytes::copy_from_slice(bslice))
|
||||
}
|
||||
}
|
||||
|
||||
trait MicrosecondPrecision {
|
||||
/// Truncate the provided `DateTime` to microsecond precision.
|
||||
fn microsecond_precision(self) -> Self;
|
||||
|
@ -963,7 +939,7 @@ impl Binding {
|
|||
|
||||
pub fn into_uuid_string(self) -> Option<String> {
|
||||
match self {
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.hyphenated().to_string()),
|
||||
Binding::Scalar(TypedValue::Uuid(v)) => Some(v.to_hyphenated().to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,6 @@ lazy_static_namespaced_keyword_value!(DB_TYPE_REF, "db.type", "ref");
|
|||
lazy_static_namespaced_keyword_value!(DB_TYPE_STRING, "db.type", "string");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_URI, "db.type", "uri");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_UUID, "db.type", "uuid");
|
||||
lazy_static_namespaced_keyword_value!(DB_TYPE_BYTES, "db.type", "bytes");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE, "db", "unique");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_IDENTITY, "db.unique", "identity");
|
||||
lazy_static_namespaced_keyword_value!(DB_UNIQUE_VALUE, "db.unique", "value");
|
||||
|
|
|
@ -7,9 +7,9 @@ workspace = ".."
|
|||
chrono = { version = "~0.4", features = ["serde"] }
|
||||
enum-set = "~0.0"
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
ordered-float = { version = "~2.8", features = ["serde"] }
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
indexmap = "~1.5"
|
||||
ordered-float = { version = "~2.0", features = ["serde"] }
|
||||
uuid = { version = "~0.8", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.core_traits]
|
||||
path = "../core-traits"
|
||||
|
|
|
@ -51,7 +51,6 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => (10, None),
|
||||
ValueType::Uuid => (11, None),
|
||||
ValueType::Keyword => (13, None),
|
||||
ValueType::Bytes => (15, Some(SQLTypeAffinity::Blob)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,7 +71,6 @@ impl SQLValueType for ValueType {
|
|||
ValueType::String => false,
|
||||
Keyword => false,
|
||||
Uuid => false,
|
||||
Bytes => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,5 +21,5 @@ path = "../edn"
|
|||
path = "../core-traits"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
|
|
@ -118,10 +118,10 @@ impl ::std::fmt::Display for InputError {
|
|||
match self {
|
||||
BadDbId => {
|
||||
writeln!(f, ":db/id in map notation must either not be present or be an entid, an ident, or a tempid")
|
||||
}
|
||||
},
|
||||
BadEntityPlace => {
|
||||
writeln!(f, "cannot convert value place into entity place")
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,19 +10,19 @@ syncable = ["serde", "serde_json", "serde_derive"]
|
|||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
itertools = "~0.10"
|
||||
indexmap = "~1.5"
|
||||
itertools = "~0.9"
|
||||
lazy_static = "~1.4"
|
||||
log = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
time = "~0.3"
|
||||
petgraph = "~0.6"
|
||||
ordered-float = "~2.0"
|
||||
time = "~0.2"
|
||||
petgraph = "~0.5"
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_json = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
@ -45,5 +45,5 @@ path = "../sql"
|
|||
version = "~1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.9"
|
||||
env_logger = "0.7"
|
||||
#tabwriter = { version = "1.2.1" }
|
||||
|
|
|
@ -61,7 +61,6 @@ use std::iter::Peekable;
|
|||
use failure::ResultExt;
|
||||
|
||||
use rusqlite;
|
||||
use rusqlite::params_from_iter;
|
||||
|
||||
use core_traits::{Binding, Entid, TypedValue};
|
||||
|
||||
|
@ -1072,9 +1071,7 @@ impl AttributeCaches {
|
|||
replacing: bool,
|
||||
) -> Result<()> {
|
||||
let mut aev_factory = AevFactory::new();
|
||||
let rows = statement.query_map(params_from_iter(&args), |row| {
|
||||
Ok(aev_factory.row_to_aev(row))
|
||||
})?;
|
||||
let rows = statement.query_map(&args, |row| Ok(aev_factory.row_to_aev(row)))?;
|
||||
let aevs = AevRows { rows };
|
||||
self.accumulate_into_cache(
|
||||
None,
|
||||
|
|
13
db/src/db.rs
13
db/src/db.rs
|
@ -22,7 +22,6 @@ use itertools;
|
|||
use itertools::Itertools;
|
||||
use rusqlite;
|
||||
use rusqlite::limits::Limit;
|
||||
use rusqlite::params_from_iter;
|
||||
use rusqlite::types::{ToSql, ToSqlOutput};
|
||||
use rusqlite::TransactionBehavior;
|
||||
|
||||
|
@ -434,7 +433,6 @@ impl TypedSQLValue for TypedValue {
|
|||
Ok(TypedValue::Uuid(u))
|
||||
}
|
||||
(13, rusqlite::types::Value::Text(x)) => to_namespaced_keyword(&x).map(|k| k.into()),
|
||||
(15, rusqlite::types::Value::Blob(x)) => Ok(TypedValue::Bytes(x.into())),
|
||||
(_, value) => bail!(DbErrorKind::BadSQLValuePair(value, value_type_tag)),
|
||||
}
|
||||
}
|
||||
|
@ -455,7 +453,6 @@ impl TypedSQLValue for TypedValue {
|
|||
Value::Float(ref x) => Some(TypedValue::Double(*x)),
|
||||
Value::Text(ref x) => Some(x.clone().into()),
|
||||
Value::Keyword(ref x) => Some(x.clone().into()),
|
||||
Value::Bytes(b) => Some(TypedValue::Bytes(b.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -472,7 +469,6 @@ impl TypedSQLValue for TypedValue {
|
|||
TypedValue::String(ref x) => (x.as_str().into(), 10),
|
||||
TypedValue::Uuid(ref u) => (u.as_bytes().to_vec().into(), 11),
|
||||
TypedValue::Keyword(ref x) => (x.to_string().into(), 13),
|
||||
TypedValue::Bytes(b) => (b.to_vec().into(), 15),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -487,7 +483,6 @@ impl TypedSQLValue for TypedValue {
|
|||
TypedValue::String(ref x) => (Value::Text(x.as_ref().clone()), ValueType::String),
|
||||
TypedValue::Uuid(ref u) => (Value::Uuid(*u), ValueType::Uuid),
|
||||
TypedValue::Keyword(ref x) => (Value::Keyword(x.as_ref().clone()), ValueType::Keyword),
|
||||
TypedValue::Bytes(b) => (Value::Bytes(b.clone()), ValueType::Bytes),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -809,7 +804,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
values);
|
||||
let mut stmt: rusqlite::Statement = self.prepare(s.as_str())?;
|
||||
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(params_from_iter(¶ms), |row| -> Result<(i64, Entid)> {
|
||||
let m: Result<Vec<(i64, Entid)>> = stmt.query_and_then(¶ms, |row| -> Result<(i64, Entid)> {
|
||||
Ok((row.get(0)?, row.get(1)?))
|
||||
})?.collect();
|
||||
m
|
||||
|
@ -953,7 +948,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(params_from_iter(¶ms))
|
||||
stmt.execute(¶ms)
|
||||
.context(DbErrorKind::NonFtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
|
@ -1047,7 +1042,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(fts_s.as_str())?;
|
||||
stmt.execute(params_from_iter(&fts_params)).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
stmt.execute(&fts_params).context(DbErrorKind::FtsInsertionFailed)?;
|
||||
|
||||
// Second, insert searches.
|
||||
// `params` reference computed values in `block`.
|
||||
|
@ -1075,7 +1070,7 @@ impl MentatStoring for rusqlite::Connection {
|
|||
|
||||
// TODO: consider ensuring we inserted the expected number of rows.
|
||||
let mut stmt = self.prepare_cached(s.as_str())?;
|
||||
stmt.execute(params_from_iter(¶ms)).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
stmt.execute(¶ms).context(DbErrorKind::FtsInsertionIntoTempSearchTableFailed)
|
||||
.map_err(|e| e.into())
|
||||
.map(|_c| ())
|
||||
}).collect::<Result<Vec<()>>>();
|
||||
|
|
|
@ -306,9 +306,10 @@ pub fn transactions_after<S: Borrow<Schema>>(
|
|||
pub fn fulltext_values(conn: &rusqlite::Connection) -> Result<FulltextValues> {
|
||||
let mut stmt: rusqlite::Statement =
|
||||
conn.prepare("SELECT rowid, text FROM fulltext_values ORDER BY rowid")?;
|
||||
let params: &[i32; 0] = &[];
|
||||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then([], |row| {
|
||||
.query_and_then(params, |row| {
|
||||
let rowid: i64 = row.get(0)?;
|
||||
let text: String = row.get(1)?;
|
||||
Ok((rowid, text))
|
||||
|
@ -340,7 +341,7 @@ pub fn dump_sql_query(
|
|||
|
||||
let r: Result<Vec<_>> = stmt
|
||||
.query_and_then(params, |row| {
|
||||
for i in 0..row.as_ref().column_count() {
|
||||
for i in 0..row.column_count() {
|
||||
let value: rusqlite::types::Value = row.get(i)?;
|
||||
write!(&mut tw, "{:?}\t", value).unwrap();
|
||||
}
|
||||
|
|
|
@ -75,7 +75,7 @@ impl TransactableValue for ValueAndSpan {
|
|||
}
|
||||
}
|
||||
Nil | Boolean(_) | Instant(_) | BigInteger(_) | Float(_) | Uuid(_) | PlainSymbol(_)
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) | Bytes(_) => {
|
||||
| NamespacedSymbol(_) | Vector(_) | Set(_) | Map(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
|
@ -105,8 +105,7 @@ impl TransactableValue for TypedValue {
|
|||
| TypedValue::Long(_)
|
||||
| TypedValue::Double(_)
|
||||
| TypedValue::Instant(_)
|
||||
| TypedValue::Uuid(_)
|
||||
| TypedValue::Bytes(_) => {
|
||||
| TypedValue::Uuid(_) => {
|
||||
bail!(DbErrorKind::InputError(errors::InputError::BadEntityPlace))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -248,7 +248,6 @@ pub fn update_attribute_map_from_entid_triples(
|
|||
TypedValue::Ref(entids::DB_TYPE_REF) => { builder.value_type(ValueType::Ref); },
|
||||
TypedValue::Ref(entids::DB_TYPE_STRING) => { builder.value_type(ValueType::String); },
|
||||
TypedValue::Ref(entids::DB_TYPE_UUID) => { builder.value_type(ValueType::Uuid); },
|
||||
TypedValue::Ref(entids::DB_TYPE_BYTES) => { builder.value_type(ValueType::Bytes); },
|
||||
_ => bail!(DbErrorKind::BadSchemaAssertion(format!("Expected [... :db/valueType :db.type/*] but got [... :db/valueType {:?}] for entid {} and attribute {}", value, entid, attr)))
|
||||
}
|
||||
},
|
||||
|
|
|
@ -362,7 +362,6 @@ impl SchemaTypeChecking for Schema {
|
|||
(ValueType::Uuid, tv @ TypedValue::Uuid(_)) => Ok(tv),
|
||||
(ValueType::Instant, tv @ TypedValue::Instant(_)) => Ok(tv),
|
||||
(ValueType::Keyword, tv @ TypedValue::Keyword(_)) => Ok(tv),
|
||||
(ValueType::Bytes, tv @ TypedValue::Bytes(_)) => Ok(tv),
|
||||
// Ref coerces a little: we interpret some things depending on the schema as a Ref.
|
||||
(ValueType::Ref, TypedValue::Long(x)) => Ok(TypedValue::Ref(x)),
|
||||
(ValueType::Ref, TypedValue::Keyword(ref x)) => {
|
||||
|
@ -380,7 +379,6 @@ impl SchemaTypeChecking for Schema {
|
|||
| (vt @ ValueType::Uuid, _)
|
||||
| (vt @ ValueType::Instant, _)
|
||||
| (vt @ ValueType::Keyword, _)
|
||||
| (vt @ ValueType::Bytes, _)
|
||||
| (vt @ ValueType::Ref, _) => {
|
||||
bail!(DbErrorKind::BadValuePair(format!("{}", value), vt))
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::ops::RangeFrom;
|
||||
|
||||
use rusqlite::{self, params_from_iter};
|
||||
use rusqlite;
|
||||
|
||||
use db_traits::errors::{DbErrorKind, Result};
|
||||
|
||||
|
@ -81,7 +81,10 @@ fn move_transactions_to(
|
|||
new_timeline,
|
||||
crate::repeat_values(tx_ids.len(), 1)
|
||||
),
|
||||
params_from_iter(tx_ids.iter()),
|
||||
&(tx_ids
|
||||
.iter()
|
||||
.map(|x| x as &dyn rusqlite::types::ToSql)
|
||||
.collect::<Vec<_>>()),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -67,11 +67,6 @@ fn test_from_sql_value_pair() {
|
|||
.unwrap(),
|
||||
TypedValue::typed_ns_keyword("db", "keyword")
|
||||
);
|
||||
assert_eq!(
|
||||
TypedValue::from_sql_value_pair(rusqlite::types::Value::Blob(vec![1, 2, 3, 42]), 15)
|
||||
.unwrap(),
|
||||
TypedValue::Bytes((vec![1, 2, 3, 42]).into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -11,7 +11,7 @@ source "https://rubygems.org"
|
|||
# gem "jekyll", "~> 3.7.3"
|
||||
|
||||
# This is the default theme for new Jekyll sites. You may change this to anything you like.
|
||||
gem "minima", "~> 2.5.1"
|
||||
gem "minima", "~> 2.0"
|
||||
|
||||
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
||||
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
||||
|
@ -19,9 +19,9 @@ gem "minima", "~> 2.5.1"
|
|||
|
||||
# If you have any plugins, put them here!
|
||||
group :jekyll_plugins do
|
||||
gem "jekyll-feed", "~> 0.15.1"
|
||||
gem "github-pages", "~> 215"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.6"
|
||||
gem "jekyll-feed", "~> 0.9.3"
|
||||
gem "github-pages", "~> 186"
|
||||
gem "jekyll-commonmark-ghpages", "~> 0.1.5"
|
||||
end
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
|
|
|
@ -1,161 +1,148 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (6.0.4)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
activesupport (4.2.10)
|
||||
i18n (~> 0.7)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
zeitwerk (~> 2.2, >= 2.2.2)
|
||||
addressable (2.8.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
addressable (2.5.2)
|
||||
public_suffix (>= 2.0.2, < 4.0)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.11.1)
|
||||
colorator (1.1.0)
|
||||
commonmarker (0.17.13)
|
||||
commonmarker (0.17.9)
|
||||
ruby-enum (~> 0.5)
|
||||
concurrent-ruby (1.1.9)
|
||||
dnsruby (1.61.7)
|
||||
simpleidn (~> 0.1)
|
||||
em-websocket (0.5.2)
|
||||
concurrent-ruby (1.0.5)
|
||||
dnsruby (1.60.2)
|
||||
em-websocket (0.5.1)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
ethon (0.14.0)
|
||||
ffi (>= 1.15.0)
|
||||
ethon (0.11.0)
|
||||
ffi (>= 1.3.0)
|
||||
eventmachine (1.2.7)
|
||||
execjs (2.8.1)
|
||||
faraday (1.4.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.1)
|
||||
execjs (2.7.0)
|
||||
faraday (0.15.2)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-net_http (1.0.1)
|
||||
faraday-net_http_persistent (1.1.0)
|
||||
ffi (1.15.3)
|
||||
ffi (1.9.25)
|
||||
forwardable-extended (2.6.0)
|
||||
gemoji (3.0.1)
|
||||
github-pages (215)
|
||||
github-pages-health-check (= 1.17.2)
|
||||
jekyll (= 3.9.0)
|
||||
jekyll-avatar (= 0.7.0)
|
||||
gemoji (3.0.0)
|
||||
github-pages (186)
|
||||
activesupport (= 4.2.10)
|
||||
github-pages-health-check (= 1.8.1)
|
||||
jekyll (= 3.7.3)
|
||||
jekyll-avatar (= 0.5.0)
|
||||
jekyll-coffeescript (= 1.1.1)
|
||||
jekyll-commonmark-ghpages (= 0.1.6)
|
||||
jekyll-commonmark-ghpages (= 0.1.5)
|
||||
jekyll-default-layout (= 0.1.4)
|
||||
jekyll-feed (= 0.15.1)
|
||||
jekyll-feed (= 0.9.3)
|
||||
jekyll-gist (= 1.5.0)
|
||||
jekyll-github-metadata (= 2.13.0)
|
||||
jekyll-mentions (= 1.6.0)
|
||||
jekyll-optional-front-matter (= 0.3.2)
|
||||
jekyll-github-metadata (= 2.9.4)
|
||||
jekyll-mentions (= 1.3.0)
|
||||
jekyll-optional-front-matter (= 0.3.0)
|
||||
jekyll-paginate (= 1.1.0)
|
||||
jekyll-readme-index (= 0.3.0)
|
||||
jekyll-redirect-from (= 0.16.0)
|
||||
jekyll-relative-links (= 0.6.1)
|
||||
jekyll-remote-theme (= 0.4.3)
|
||||
jekyll-readme-index (= 0.2.0)
|
||||
jekyll-redirect-from (= 0.13.0)
|
||||
jekyll-relative-links (= 0.5.3)
|
||||
jekyll-remote-theme (= 0.3.1)
|
||||
jekyll-sass-converter (= 1.5.2)
|
||||
jekyll-seo-tag (= 2.7.1)
|
||||
jekyll-sitemap (= 1.4.0)
|
||||
jekyll-swiss (= 1.0.0)
|
||||
jekyll-seo-tag (= 2.4.0)
|
||||
jekyll-sitemap (= 1.2.0)
|
||||
jekyll-swiss (= 0.4.0)
|
||||
jekyll-theme-architect (= 0.1.1)
|
||||
jekyll-theme-cayman (= 0.1.1)
|
||||
jekyll-theme-dinky (= 0.1.1)
|
||||
jekyll-theme-hacker (= 0.1.2)
|
||||
jekyll-theme-hacker (= 0.1.1)
|
||||
jekyll-theme-leap-day (= 0.1.1)
|
||||
jekyll-theme-merlot (= 0.1.1)
|
||||
jekyll-theme-midnight (= 0.1.1)
|
||||
jekyll-theme-minimal (= 0.1.1)
|
||||
jekyll-theme-modernist (= 0.1.1)
|
||||
jekyll-theme-primer (= 0.5.4)
|
||||
jekyll-theme-primer (= 0.5.3)
|
||||
jekyll-theme-slate (= 0.1.1)
|
||||
jekyll-theme-tactile (= 0.1.1)
|
||||
jekyll-theme-time-machine (= 0.1.1)
|
||||
jekyll-titles-from-headings (= 0.5.3)
|
||||
jemoji (= 0.12.0)
|
||||
kramdown (= 2.3.1)
|
||||
kramdown-parser-gfm (= 1.1.0)
|
||||
liquid (= 4.0.3)
|
||||
jekyll-titles-from-headings (= 0.5.1)
|
||||
jemoji (= 0.9.0)
|
||||
kramdown (= 1.16.2)
|
||||
liquid (= 4.0.0)
|
||||
listen (= 3.1.5)
|
||||
mercenary (~> 0.3)
|
||||
minima (= 2.5.1)
|
||||
nokogiri (>= 1.10.4, < 2.0)
|
||||
rouge (= 3.26.0)
|
||||
minima (= 2.4.1)
|
||||
nokogiri (>= 1.8.2, < 2.0)
|
||||
rouge (= 2.2.1)
|
||||
terminal-table (~> 1.4)
|
||||
github-pages-health-check (1.17.2)
|
||||
github-pages-health-check (1.8.1)
|
||||
addressable (~> 2.3)
|
||||
dnsruby (~> 1.60)
|
||||
octokit (~> 4.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
public_suffix (~> 2.0)
|
||||
typhoeus (~> 1.3)
|
||||
html-pipeline (2.14.0)
|
||||
html-pipeline (2.8.0)
|
||||
activesupport (>= 2)
|
||||
nokogiri (>= 1.4)
|
||||
http_parser.rb (0.6.0)
|
||||
i18n (0.9.5)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (3.9.0)
|
||||
jekyll (3.7.3)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
i18n (~> 0.7)
|
||||
jekyll-sass-converter (~> 1.0)
|
||||
jekyll-watch (~> 2.0)
|
||||
kramdown (>= 1.17, < 3)
|
||||
kramdown (~> 1.14)
|
||||
liquid (~> 4.0)
|
||||
mercenary (~> 0.3.3)
|
||||
pathutil (~> 0.9)
|
||||
rouge (>= 1.7, < 4)
|
||||
safe_yaml (~> 1.0)
|
||||
jekyll-avatar (0.7.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-avatar (0.5.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-coffeescript (1.1.1)
|
||||
coffee-script (~> 2.2)
|
||||
coffee-script-source (~> 1.11.1)
|
||||
jekyll-commonmark (1.3.1)
|
||||
jekyll-commonmark (1.2.0)
|
||||
commonmarker (~> 0.14)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-commonmark-ghpages (0.1.6)
|
||||
jekyll (>= 3.0, < 4.0)
|
||||
jekyll-commonmark-ghpages (0.1.5)
|
||||
commonmarker (~> 0.17.6)
|
||||
jekyll-commonmark (~> 1.2)
|
||||
rouge (>= 2.0, < 4.0)
|
||||
jekyll-commonmark (~> 1)
|
||||
rouge (~> 2)
|
||||
jekyll-default-layout (0.1.4)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-feed (0.15.1)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-feed (0.9.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-gist (1.5.0)
|
||||
octokit (~> 4.2)
|
||||
jekyll-github-metadata (2.13.0)
|
||||
jekyll (>= 3.4, < 5.0)
|
||||
jekyll-github-metadata (2.9.4)
|
||||
jekyll (~> 3.1)
|
||||
octokit (~> 4.0, != 4.4.0)
|
||||
jekyll-mentions (1.6.0)
|
||||
jekyll-mentions (1.3.0)
|
||||
activesupport (~> 4.0)
|
||||
html-pipeline (~> 2.3)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-optional-front-matter (0.3.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-optional-front-matter (0.3.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-paginate (1.1.0)
|
||||
jekyll-readme-index (0.3.0)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
jekyll-redirect-from (0.16.0)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-relative-links (0.6.1)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-remote-theme (0.4.3)
|
||||
addressable (~> 2.0)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
|
||||
rubyzip (>= 1.3.0, < 3.0)
|
||||
jekyll-readme-index (0.2.0)
|
||||
jekyll (~> 3.0)
|
||||
jekyll-redirect-from (0.13.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-relative-links (0.5.3)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-remote-theme (0.3.1)
|
||||
jekyll (~> 3.5)
|
||||
rubyzip (>= 1.2.1, < 3.0)
|
||||
jekyll-sass-converter (1.5.2)
|
||||
sass (~> 3.4)
|
||||
jekyll-seo-tag (2.7.1)
|
||||
jekyll (>= 3.8, < 5.0)
|
||||
jekyll-sitemap (1.4.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-swiss (1.0.0)
|
||||
jekyll-seo-tag (2.4.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-sitemap (1.2.0)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-swiss (0.4.0)
|
||||
jekyll-theme-architect (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
|
@ -165,8 +152,8 @@ GEM
|
|||
jekyll-theme-dinky (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-hacker (0.1.2)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-theme-hacker (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-leap-day (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
|
@ -183,8 +170,8 @@ GEM
|
|||
jekyll-theme-modernist (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-primer (0.5.4)
|
||||
jekyll (> 3.5, < 5.0)
|
||||
jekyll-theme-primer (0.5.3)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-github-metadata (~> 2.9)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-theme-slate (0.1.1)
|
||||
|
@ -196,82 +183,71 @@ GEM
|
|||
jekyll-theme-time-machine (0.1.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-seo-tag (~> 2.0)
|
||||
jekyll-titles-from-headings (0.5.3)
|
||||
jekyll (>= 3.3, < 5.0)
|
||||
jekyll-watch (2.2.1)
|
||||
jekyll-titles-from-headings (0.5.1)
|
||||
jekyll (~> 3.3)
|
||||
jekyll-watch (2.0.0)
|
||||
listen (~> 3.0)
|
||||
jemoji (0.12.0)
|
||||
jemoji (0.9.0)
|
||||
activesupport (~> 4.0, >= 4.2.9)
|
||||
gemoji (~> 3.0)
|
||||
html-pipeline (~> 2.2)
|
||||
jekyll (>= 3.0, < 5.0)
|
||||
kramdown (2.3.1)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
liquid (4.0.3)
|
||||
listen (3.5.1)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
jekyll (~> 3.0)
|
||||
kramdown (1.16.2)
|
||||
liquid (4.0.0)
|
||||
listen (3.1.5)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
ruby_dep (~> 1.2)
|
||||
mercenary (0.3.6)
|
||||
mini_portile2 (2.6.1)
|
||||
minima (2.5.1)
|
||||
jekyll (>= 3.5, < 5.0)
|
||||
mini_portile2 (2.3.0)
|
||||
minima (2.4.1)
|
||||
jekyll (~> 3.5)
|
||||
jekyll-feed (~> 0.9)
|
||||
jekyll-seo-tag (~> 2.1)
|
||||
minitest (5.14.4)
|
||||
multipart-post (2.1.1)
|
||||
nokogiri (1.12.5)
|
||||
mini_portile2 (~> 2.6.1)
|
||||
racc (~> 1.4)
|
||||
octokit (4.21.0)
|
||||
faraday (>= 0.9)
|
||||
minitest (5.11.3)
|
||||
multipart-post (2.0.0)
|
||||
nokogiri (1.8.3)
|
||||
mini_portile2 (~> 2.3.0)
|
||||
octokit (4.9.0)
|
||||
sawyer (~> 0.8.0, >= 0.5.3)
|
||||
pathutil (0.16.2)
|
||||
pathutil (0.16.1)
|
||||
forwardable-extended (~> 2.6)
|
||||
public_suffix (4.0.6)
|
||||
racc (1.5.2)
|
||||
rb-fsevent (0.11.0)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.5)
|
||||
rouge (3.26.0)
|
||||
ruby-enum (0.9.0)
|
||||
public_suffix (2.0.5)
|
||||
rb-fsevent (0.10.3)
|
||||
rb-inotify (0.9.10)
|
||||
ffi (>= 0.5.0, < 2)
|
||||
rouge (2.2.1)
|
||||
ruby-enum (0.7.2)
|
||||
i18n
|
||||
ruby2_keywords (0.0.4)
|
||||
rubyzip (2.3.0)
|
||||
safe_yaml (1.0.5)
|
||||
sass (3.7.4)
|
||||
ruby_dep (1.5.0)
|
||||
rubyzip (1.2.1)
|
||||
safe_yaml (1.0.4)
|
||||
sass (3.5.6)
|
||||
sass-listen (~> 4.0.0)
|
||||
sass-listen (4.0.0)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
sawyer (0.8.2)
|
||||
addressable (>= 2.3.5)
|
||||
faraday (> 0.8, < 2.0)
|
||||
simpleidn (0.2.1)
|
||||
unf (~> 0.1.4)
|
||||
sawyer (0.8.1)
|
||||
addressable (>= 2.3.5, < 2.6)
|
||||
faraday (~> 0.8, < 1.0)
|
||||
terminal-table (1.8.0)
|
||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||
thread_safe (0.3.6)
|
||||
typhoeus (1.4.0)
|
||||
typhoeus (1.3.0)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (1.2.9)
|
||||
tzinfo (1.2.5)
|
||||
thread_safe (~> 0.1)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.7.7)
|
||||
unicode-display_width (1.7.0)
|
||||
zeitwerk (2.4.2)
|
||||
unicode-display_width (1.4.0)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
github-pages (~> 215)
|
||||
jekyll-commonmark-ghpages (~> 0.1.6)
|
||||
jekyll-feed (~> 0.15.1)
|
||||
minima (~> 2.5.1)
|
||||
github-pages (~> 186)
|
||||
jekyll-commonmark-ghpages (~> 0.1.5)
|
||||
jekyll-feed (~> 0.9.3)
|
||||
minima (~> 2.0)
|
||||
tzinfo-data
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.21
|
||||
1.16.2
|
||||
|
|
|
@ -11,16 +11,14 @@ readme = "./README.md"
|
|||
|
||||
[dependencies]
|
||||
chrono = "~0.4"
|
||||
itertools = "~0.10"
|
||||
num = "~0.4"
|
||||
ordered-float = "~2.8"
|
||||
pretty = "~0.12"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
itertools = "~0.9"
|
||||
num = "~0.3"
|
||||
ordered-float = "~2.0"
|
||||
pretty = "~0.10"
|
||||
uuid = { version = "~0.8", features = ["v4", "serde"] }
|
||||
serde = { version = "~1.0", optional = true }
|
||||
serde_derive = { version = "~1.0", optional = true }
|
||||
peg = "~0.8"
|
||||
bytes = "1.0.1"
|
||||
hex = "0.4.3"
|
||||
peg = "~0.6"
|
||||
|
||||
[dev-dependencies]
|
||||
serde_test = "~1.0"
|
||||
|
|
|
@ -8,9 +8,7 @@
|
|||
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations under the License.
|
||||
|
||||
extern crate bytes;
|
||||
extern crate chrono;
|
||||
extern crate hex;
|
||||
extern crate itertools;
|
||||
extern crate num;
|
||||
extern crate ordered_float;
|
||||
|
@ -40,9 +38,7 @@ pub mod value_rc;
|
|||
pub use crate::value_rc::{Cloned, FromRc, ValueRc};
|
||||
|
||||
// Re-export the types we use.
|
||||
use bytes::Bytes;
|
||||
pub use chrono::{DateTime, Utc};
|
||||
use hex::decode;
|
||||
pub use num::BigInt;
|
||||
pub use ordered_float::OrderedFloat;
|
||||
pub use uuid::Uuid;
|
||||
|
@ -128,7 +124,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
// result = r#""foo\\bar""#
|
||||
// For the typical case, string_normal_chars will match multiple, leading to a single-element vec.
|
||||
pub rule raw_text() -> String = "\"" t:((string_special_char() / string_normal_chars())*) "\""
|
||||
{ t.join("") }
|
||||
{ t.join(&"") }
|
||||
|
||||
pub rule text() -> SpannedValue
|
||||
= v:raw_text() { SpannedValue::Text(v) }
|
||||
|
@ -153,16 +149,16 @@ peg::parser!(pub grammar parse() for str {
|
|||
"#instmicros" whitespace()+ d:$( digit()+ ) {
|
||||
let micros = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = micros / 1_000_000;
|
||||
let nanos: u32 = ((micros % 1_000_000).unsigned_abs() as u32) * 1000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
let nanos: u32 = ((micros % 1_000_000).abs() as u32) * 1000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
}
|
||||
|
||||
rule inst_millis() -> DateTime<Utc> =
|
||||
"#instmillis" whitespace()+ d:$( digit()+ ) {
|
||||
let millis = d.parse::<i64>().unwrap();
|
||||
let seconds: i64 = millis / 1000;
|
||||
let nanos: u32 = ((millis % 1000).unsigned_abs() as u32) * 1_000_000;
|
||||
Utc.timestamp_opt(seconds, nanos).unwrap()
|
||||
let nanos: u32 = ((millis % 1000).abs() as u32) * 1_000_000;
|
||||
Utc.timestamp(seconds, nanos)
|
||||
}
|
||||
|
||||
rule inst() -> SpannedValue = t:(inst_millis() / inst_micros() / inst_string())
|
||||
|
@ -176,14 +172,6 @@ peg::parser!(pub grammar parse() for str {
|
|||
pub rule uuid() -> SpannedValue = "#uuid" whitespace()+ u:uuid_string()
|
||||
{ SpannedValue::Uuid(u) }
|
||||
|
||||
rule byte_buffer() -> Bytes =
|
||||
u:$( hex()+ ) {
|
||||
let b = decode(u).expect("this is a valid hex byte string");
|
||||
Bytes::copy_from_slice(&b)
|
||||
}
|
||||
pub rule bytes() -> SpannedValue = "#bytes" whitespace()+ u:byte_buffer()
|
||||
{ SpannedValue::Bytes(u) }
|
||||
|
||||
rule namespace_divider() = "."
|
||||
rule namespace_separator() = "/"
|
||||
|
||||
|
@ -231,7 +219,7 @@ peg::parser!(pub grammar parse() for str {
|
|||
|
||||
// Note: It's important that float comes before integer or the parser assumes that floats are integers and fails to parse.
|
||||
pub rule value() -> ValueAndSpan =
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / bytes() / text() / keyword() / symbol() / list() / vector() / map() / set() ) end:position!() __ {
|
||||
__ start:position!() v:(nil() / nan() / infinity() / boolean() / number() / inst() / uuid() / text() / keyword() / symbol() / list() / vector() / map() / set()) end:position!() __ {
|
||||
ValueAndSpan {
|
||||
inner: v,
|
||||
span: Span::new(start, end)
|
||||
|
|
|
@ -121,7 +121,7 @@ impl NamespaceableName {
|
|||
if name.starts_with('_') {
|
||||
Self::new(self.namespace(), &name[1..])
|
||||
} else {
|
||||
Self::new(self.namespace(), format!("_{}", name))
|
||||
Self::new(self.namespace(), &format!("_{}", name))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -205,8 +205,8 @@ impl fmt::Display for NamespaceableName {
|
|||
// friendly and automatic (e.g. `derive`d), and just pass all work off to it in our custom
|
||||
// implementation of Serialize and Deserialize.
|
||||
#[cfg(feature = "serde_support")]
|
||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde_support", serde(rename = "NamespaceableName"))]
|
||||
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
|
||||
struct SerializedNamespaceableName<'a> {
|
||||
namespace: Option<&'a str>,
|
||||
name: &'a str,
|
||||
|
|
|
@ -57,11 +57,10 @@ impl Value {
|
|||
{
|
||||
let open = open.into();
|
||||
let n = open.len() as isize;
|
||||
let i = {
|
||||
let this = vs.into_iter().map(|v| v.as_doc(allocator));
|
||||
let element = allocator.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
let i = vs
|
||||
.into_iter()
|
||||
.map(|v| v.as_doc(allocator))
|
||||
.intersperse(allocator.line());
|
||||
allocator
|
||||
.text(open)
|
||||
.append(allocator.concat(i).nest(n))
|
||||
|
@ -82,14 +81,11 @@ impl Value {
|
|||
Value::List(ref vs) => self.bracket(pp, "(", vs, ")"),
|
||||
Value::Set(ref vs) => self.bracket(pp, "#{", vs, "}"),
|
||||
Value::Map(ref vs) => {
|
||||
let xs = {
|
||||
let this = vs
|
||||
let xs = vs
|
||||
.iter()
|
||||
.rev()
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group());
|
||||
let element = pp.line();
|
||||
Itertools::intersperse(this, element)
|
||||
};
|
||||
.map(|(k, v)| k.as_doc(pp).append(pp.line()).append(v.as_doc(pp)).group())
|
||||
.intersperse(pp.line());
|
||||
pp.text("{")
|
||||
.append(pp.concat(xs).nest(1))
|
||||
.append(pp.text("}"))
|
||||
|
@ -101,7 +97,7 @@ impl Value {
|
|||
Value::Text(ref v) => pp.text("\"").append(v.as_str()).append("\""),
|
||||
Value::Uuid(ref u) => pp
|
||||
.text("#uuid \"")
|
||||
.append(u.hyphenated().to_string())
|
||||
.append(u.to_hyphenated().to_string())
|
||||
.append("\""),
|
||||
Value::Instant(ref v) => pp
|
||||
.text("#inst \"")
|
||||
|
|
|
@ -233,7 +233,7 @@ impl FromValue<FnArg> for FnArg {
|
|||
{
|
||||
Some(FnArg::Constant(x.clone().into()))
|
||||
}
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) | Bytes(_) => None,
|
||||
Nil | NamespacedSymbol(_) | Vector(_) | List(_) | Set(_) | Map(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -410,7 +410,6 @@ impl FromValue<PatternValuePlace> for PatternValuePlace {
|
|||
crate::SpannedValue::List(_) => None,
|
||||
crate::SpannedValue::Set(_) => None,
|
||||
crate::SpannedValue::Vector(_) => None,
|
||||
crate::SpannedValue::Bytes(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1028,8 +1027,8 @@ impl ParsedQuery {
|
|||
Ok(ParsedQuery {
|
||||
find_spec: find_spec.ok_or("expected :find")?,
|
||||
default_source: SrcVar::DefaultSrc,
|
||||
with: with.unwrap_or_default(),
|
||||
in_vars: in_vars.unwrap_or_default(),
|
||||
with: with.unwrap_or_else(Vec::new), //
|
||||
in_vars: in_vars.unwrap_or_else(Vec::new),
|
||||
in_sources: BTreeSet::default(),
|
||||
limit: limit.unwrap_or(Limit::None),
|
||||
where_clauses: where_clauses.ok_or("expected :where")?,
|
||||
|
|
|
@ -27,8 +27,6 @@ use uuid::Uuid;
|
|||
|
||||
use crate::symbols;
|
||||
|
||||
use bytes::Bytes;
|
||||
use hex::encode;
|
||||
/// Value represents one of the allowed values in an EDN string.
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub enum Value {
|
||||
|
@ -54,7 +52,6 @@ pub enum Value {
|
|||
// See https://internals.rust-lang.org/t/implementing-hash-for-hashset-hashmap/3817/1
|
||||
Set(BTreeSet<Value>),
|
||||
Map(BTreeMap<Value, Value>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// `SpannedValue` is the parallel to `Value` but used in `ValueAndSpan`.
|
||||
|
@ -76,7 +73,6 @@ pub enum SpannedValue {
|
|||
List(LinkedList<ValueAndSpan>),
|
||||
Set(BTreeSet<ValueAndSpan>),
|
||||
Map(BTreeMap<ValueAndSpan, ValueAndSpan>),
|
||||
Bytes(Bytes),
|
||||
}
|
||||
|
||||
/// Span represents the current offset (start, end) into the input string.
|
||||
|
@ -176,7 +172,6 @@ impl From<SpannedValue> for Value {
|
|||
.map(|(x, y)| (x.without_spans(), y.without_spans()))
|
||||
.collect(),
|
||||
),
|
||||
SpannedValue::Bytes(b) => Value::Bytes(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -333,7 +328,6 @@ macro_rules! def_common_value_methods {
|
|||
def_is!(is_list, $t::List(_));
|
||||
def_is!(is_set, $t::Set(_));
|
||||
def_is!(is_map, $t::Map(_));
|
||||
def_is!(is_bytes, $t::Bytes(_));
|
||||
|
||||
pub fn is_keyword(&self) -> bool {
|
||||
match self {
|
||||
|
@ -366,7 +360,6 @@ macro_rules! def_common_value_methods {
|
|||
def_as_ref!(as_uuid, $t::Uuid, Uuid);
|
||||
def_as_ref!(as_symbol, $t::PlainSymbol, symbols::PlainSymbol);
|
||||
def_as_ref!(as_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol);
|
||||
def_as_ref!(as_bytes, $t::Bytes, Bytes);
|
||||
|
||||
pub fn as_keyword(&self) -> Option<&symbols::Keyword> {
|
||||
match self {
|
||||
|
@ -404,7 +397,6 @@ macro_rules! def_common_value_methods {
|
|||
def_into!(into_uuid, $t::Uuid, Uuid,);
|
||||
def_into!(into_symbol, $t::PlainSymbol, symbols::PlainSymbol,);
|
||||
def_into!(into_namespaced_symbol, $t::NamespacedSymbol, symbols::NamespacedSymbol,);
|
||||
def_into!(into_bytes, $t::Bytes, Bytes,);
|
||||
|
||||
pub fn into_keyword(self) -> Option<symbols::Keyword> {
|
||||
match self {
|
||||
|
@ -475,7 +467,6 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => 13,
|
||||
$t::Set(_) => 14,
|
||||
$t::Map(_) => 15,
|
||||
$t::Bytes(_) => 16,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -496,7 +487,6 @@ macro_rules! def_common_value_methods {
|
|||
$t::List(_) => true,
|
||||
$t::Set(_) => true,
|
||||
$t::Map(_) => true,
|
||||
$t::Bytes(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -534,7 +524,6 @@ macro_rules! def_common_value_ord {
|
|||
(&$t::List(ref a), &$t::List(ref b)) => b.cmp(a),
|
||||
(&$t::Set(ref a), &$t::Set(ref b)) => b.cmp(a),
|
||||
(&$t::Map(ref a), &$t::Map(ref b)) => b.cmp(a),
|
||||
(&$t::Bytes(ref a), &$t::Bytes(ref b)) => b.cmp(a),
|
||||
_ => $value.precedence().cmp(&$other.precedence()),
|
||||
}
|
||||
};
|
||||
|
@ -569,7 +558,7 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
// TODO: EDN escaping.
|
||||
$t::Text(ref v) => write!($f, "\"{}\"", v),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.hyphenated().to_string()),
|
||||
$t::Uuid(ref u) => write!($f, "#uuid \"{}\"", u.to_hyphenated().to_string()),
|
||||
$t::PlainSymbol(ref v) => v.fmt($f),
|
||||
$t::NamespacedSymbol(ref v) => v.fmt($f),
|
||||
$t::Keyword(ref v) => v.fmt($f),
|
||||
|
@ -601,10 +590,6 @@ macro_rules! def_common_value_display {
|
|||
}
|
||||
write!($f, " }}")
|
||||
}
|
||||
$t::Bytes(ref v) => {
|
||||
let s = encode(v);
|
||||
write!($f, "#bytes {}", s)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -668,7 +653,7 @@ pub trait FromMicros {
|
|||
|
||||
impl FromMicros for DateTime<Utc> {
|
||||
fn from_micros(ts: i64) -> Self {
|
||||
Utc.timestamp_opt(ts / 1_000_000, ((ts % 1_000_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
Utc.timestamp(ts / 1_000_000, ((ts % 1_000_000).abs() as u32) * 1_000)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -690,7 +675,7 @@ pub trait FromMillis {
|
|||
|
||||
impl FromMillis for DateTime<Utc> {
|
||||
fn from_millis(ts: i64) -> Self {
|
||||
Utc.timestamp_opt(ts / 1_000, ((ts % 1_000).unsigned_abs() as u32) * 1_000).unwrap()
|
||||
Utc.timestamp(ts / 1_000, ((ts % 1_000).abs() as u32) * 1_000)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,9 +21,9 @@ use crate::types::Value;
|
|||
/// TODO: implement `merge` for [Value], following the `concat`/`SliceConcatExt` pattern.
|
||||
pub fn merge(left: &Value, right: &Value) -> Option<Value> {
|
||||
match (left, right) {
|
||||
(Value::Map(l), Value::Map(r)) => {
|
||||
(&Value::Map(ref l), &Value::Map(ref r)) => {
|
||||
let mut result = l.clone();
|
||||
result.extend(r.clone());
|
||||
result.extend(r.clone().into_iter());
|
||||
Some(Value::Map(result))
|
||||
}
|
||||
_ => None,
|
||||
|
|
|
@ -82,7 +82,6 @@ fn_parse_into_value!(vector);
|
|||
fn_parse_into_value!(set);
|
||||
fn_parse_into_value!(map);
|
||||
fn_parse_into_value!(value);
|
||||
fn_parse_into_value!(bytes);
|
||||
|
||||
#[test]
|
||||
fn test_nil() {
|
||||
|
@ -317,38 +316,6 @@ fn test_uuid() {
|
|||
assert_eq!(value.to_pretty(100).unwrap(), s);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bytes() {
|
||||
assert!(parse::bytes("#bytes01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes _ZZ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#bytes 01 ").is_err()); // No whitespace.
|
||||
assert!(parse::bytes("#01 ").is_err()); // No whitespace.
|
||||
|
||||
let expected = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let s = format!("{} {}", "#bytes", hex::encode(expected.clone()));
|
||||
let actual: Value = parse::bytes(&s).expect("parse success").into();
|
||||
assert!(actual.is_bytes());
|
||||
assert_eq!(expected, actual.as_bytes().unwrap().to_vec());
|
||||
|
||||
assert_eq!(
|
||||
self::bytes("#bytes 010203050403022a").unwrap(),
|
||||
Value::Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
let data =
|
||||
r#"[ { :test/instant #inst "2018-01-01T11:00:00Z" :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let result = parse::value(data).unwrap().without_spans().to_string();
|
||||
assert_eq!(data, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_entities() {
|
||||
let d2 = r#"[ { :test/boolean true :test/long 33 :test/double 1.4 :test/string "foo" :test/keyword :foo/bar :test/uuid #uuid "12341234-1234-1234-1234-123412341234" :test/instant #inst "2018-01-01T11:00:00Z" :test/ref 1 :test/bytes #bytes 010203050403022a } ]"#;
|
||||
let r2 = parse::entities(d2);
|
||||
assert!(r2.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inst() {
|
||||
assert!(parse::value("#inst\"2016-01-01T11:00:00.000Z\"").is_err()); // No whitespace.
|
||||
|
@ -617,12 +584,6 @@ fn test_value() {
|
|||
value("#inst \"2017-04-28T20:23:05.187Z\"").unwrap(),
|
||||
Instant(Utc.timestamp(1493410985, 187000000))
|
||||
);
|
||||
assert_eq!(
|
||||
value("#bytes 010203050403022a").unwrap(),
|
||||
Bytes(bytes::Bytes::copy_from_slice(&vec!(
|
||||
1, 2, 3, 5, 4, 3, 2, 42
|
||||
)))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1499,7 +1499,7 @@ pub unsafe extern "C" fn query_builder_bind_ref_kw(
|
|||
let kw = kw_from_string(c_char_to_string(value));
|
||||
let query_builder = &mut *query_builder;
|
||||
if let Some(err) = query_builder.bind_ref_from_kw(&var, kw).err() {
|
||||
std::panic::panic_any(err);
|
||||
panic!(err);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,15 +16,15 @@ syncable = ["tolstoy_traits", "hyper", "serde_json"]
|
|||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
http = "~0.2"
|
||||
tokio = { version = "1.8.0", features = ["full"] }
|
||||
uuid = "~1.0"
|
||||
tokio = { version = "~0.2", features = ["rt-core"] }
|
||||
uuid = "~0.8"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.hyper]
|
||||
version = "~0.14"
|
||||
version = "~0.13"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde_json]
|
||||
|
|
|
@ -19,4 +19,4 @@ path = "../core-traits"
|
|||
path = "../query-algebrizer-traits"
|
||||
|
||||
[dev-dependencies]
|
||||
itertools = "~0.10"
|
||||
itertools = "~0.9"
|
||||
|
|
|
@ -34,7 +34,6 @@ fn prepopulated_schema() -> Schema {
|
|||
.define_simple_attr("test", "uuid", ValueType::Uuid, false)
|
||||
.define_simple_attr("test", "instant", ValueType::Instant, false)
|
||||
.define_simple_attr("test", "ref", ValueType::Ref, false)
|
||||
.define_simple_attr("test", "bytes", ValueType::Bytes, false)
|
||||
.schema
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ failure = "~0.1"
|
|||
failure_derive = "~0.1"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
|
@ -110,7 +110,7 @@ impl SimpleAggregationOp {
|
|||
String => Ok(the_type),
|
||||
|
||||
// Unordered types.
|
||||
Keyword | Ref | Uuid | Bytes => {
|
||||
Keyword | Ref | Uuid => {
|
||||
bail!(ProjectorError::CannotApplyAggregateOperationToTypes(
|
||||
self,
|
||||
possibilities
|
||||
|
|
|
@ -8,10 +8,10 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
indexmap = "~1.9"
|
||||
indexmap = "~1.5"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.core_traits]
|
||||
|
|
|
@ -123,7 +123,7 @@ impl TupleTwoStagePullProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
self.templates
|
||||
.iter()
|
||||
.map(|ti| ti.lookup(row))
|
||||
|
@ -226,7 +226,7 @@ impl RelTwoStagePullProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
let mut count = 0;
|
||||
for binding in self.templates.iter().map(|ti| ti.lookup(&row)) {
|
||||
out.push(binding?);
|
||||
|
|
|
@ -93,7 +93,7 @@ impl TupleProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
self.templates
|
||||
.iter()
|
||||
.map(|ti| ti.lookup(&row))
|
||||
|
@ -163,7 +163,7 @@ impl RelProjector {
|
|||
// There will be at least as many SQL columns as Datalog columns.
|
||||
// gte 'cos we might be querying extra columns for ordering.
|
||||
// The templates will take care of ignoring columns.
|
||||
assert!(row.as_ref().column_count() >= self.len);
|
||||
assert!(row.column_count() >= self.len);
|
||||
let mut count = 0;
|
||||
for binding in self.templates.iter().map(|ti| ti.lookup(&row)) {
|
||||
out.push(binding?);
|
||||
|
|
|
@ -13,7 +13,7 @@ failure = "~0.1"
|
|||
path = "../query-pull-traits"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
|
@ -4,7 +4,7 @@ version = "0.0.2"
|
|||
workspace = ".."
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
nightly-2023-11-27
|
38
shell.nix
38
shell.nix
|
@ -1,38 +0,0 @@
|
|||
{ pkgs ? import <nixpkgs> {} }:
|
||||
pkgs.mkShell rec {
|
||||
buildInputs = with pkgs; [
|
||||
# Necessary for the openssl-sys crate:
|
||||
pkgs.openssl
|
||||
pkgs.pkg-config
|
||||
# Compiler
|
||||
clang
|
||||
# Replace llvmPackages with llvmPackages_X, where X is the latest LLVM version (at the time of writing, 16)
|
||||
llvmPackages.bintools
|
||||
rustup
|
||||
];
|
||||
RUSTC_VERSION = pkgs.lib.readFile ./rust-toolchain;
|
||||
# https://github.com/rust-lang/rust-bindgen#environment-variables
|
||||
LIBCLANG_PATH = pkgs.lib.makeLibraryPath [ pkgs.llvmPackages_latest.libclang.lib ];
|
||||
shellHook = ''
|
||||
export PATH=$PATH:''${CARGO_HOME:-~/.cargo}/bin
|
||||
export PATH=$PATH:''${RUSTUP_HOME:-~/.rustup}/toolchains/$RUSTC_VERSION-x86_64-unknown-linux-gnu/bin/
|
||||
'';
|
||||
# Add precompiled library to rustc search path
|
||||
RUSTFLAGS = (builtins.map (a: ''-L ${a}/lib'') [
|
||||
# add libraries here (e.g. pkgs.libvmi)
|
||||
]);
|
||||
# Add glibc, clang, glib and other headers to bindgen search path
|
||||
BINDGEN_EXTRA_CLANG_ARGS =
|
||||
# Includes with normal include path
|
||||
(builtins.map (a: ''-I"${a}/include"'') [
|
||||
# add dev libraries here (e.g. pkgs.libvmi.dev)
|
||||
pkgs.glibc.dev
|
||||
])
|
||||
# Includes with special directory paths
|
||||
++ [
|
||||
''-I"${pkgs.llvmPackages_latest.libclang.lib}/lib/clang/${pkgs.llvmPackages_latest.libclang.version}/include"''
|
||||
''-I"${pkgs.glib.dev}/include/glib-2.0"''
|
||||
''-I${pkgs.glib.out}/lib/glib-2.0/include/''
|
||||
];
|
||||
|
||||
}
|
|
@ -8,10 +8,10 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
|
||||
[dependencies]
|
||||
failure = "~0.1"
|
||||
ordered-float = "~2.8"
|
||||
ordered-float = "~2.0"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.core_traits]
|
||||
|
|
|
@ -181,17 +181,6 @@ impl QueryBuilder for SQLiteQueryBuilder {
|
|||
let v = Rc::new(rusqlite::types::Value::Text(s.as_ref().to_string()));
|
||||
self.push_static_arg(v);
|
||||
}
|
||||
Bytes(b) => {
|
||||
let bytes = b.to_vec();
|
||||
if let Some(arg) = self.byte_args.get(&bytes).cloned() {
|
||||
// Why, borrow checker, why?!
|
||||
self.push_named_arg(arg.as_str());
|
||||
} else {
|
||||
let arg = self.next_argument_name();
|
||||
self.push_named_arg(arg.as_str());
|
||||
self.byte_args.insert(bytes, arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -424,7 +424,7 @@ mod test {
|
|||
entid: i64,
|
||||
boolean: bool,
|
||||
long_val: i64,
|
||||
}
|
||||
};
|
||||
|
||||
let mut results: Vec<Res> = QueryBuilder::new(
|
||||
&mut store,
|
||||
|
|
|
@ -20,7 +20,6 @@ use chrono::FixedOffset;
|
|||
|
||||
use core_traits::{Entid, KnownEntid, ValueType, ValueTypeSet};
|
||||
|
||||
use edn::OrderedFloat;
|
||||
use mentat_core::{DateTime, HasSchema, Utc, Uuid};
|
||||
|
||||
use query_projector_traits::aggregates::SimpleAggregationOp;
|
||||
|
@ -254,7 +253,7 @@ fn test_unbound_inputs() {
|
|||
fn test_instants_and_uuids() {
|
||||
// We assume, perhaps foolishly, that the clocks on test machines won't lose more than an
|
||||
// hour while this test is running.
|
||||
let start = Utc::now() + FixedOffset::west_opt(60 * 60).unwrap();
|
||||
let start = Utc::now() + FixedOffset::west(60 * 60);
|
||||
|
||||
let mut c = new_connection("").expect("Couldn't open conn.");
|
||||
let mut conn = Conn::connect(&mut c).expect("Couldn't open DB.");
|
||||
|
@ -471,7 +470,7 @@ fn test_fulltext() {
|
|||
) => {
|
||||
assert_eq!(x, v);
|
||||
assert_eq!(text.as_str(), "hello darkness my old friend");
|
||||
assert_approx_eq!(score, OrderedFloat(0.0f64));
|
||||
assert_approx_eq!(score, 0.0f64.into());
|
||||
}
|
||||
_ => panic!("Unexpected results."),
|
||||
}
|
||||
|
@ -835,7 +834,6 @@ fn test_type_reqs() {
|
|||
{:db/ident :test/uuid :db/valueType :db.type/uuid :db/cardinality :db.cardinality/one}
|
||||
{:db/ident :test/instant :db/valueType :db.type/instant :db/cardinality :db.cardinality/one}
|
||||
{:db/ident :test/ref :db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
|
||||
{:db/ident :test/bytes :db/valueType :db.type/bytes :db/cardinality :db.cardinality/one}
|
||||
]"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -850,8 +848,7 @@ fn test_type_reqs() {
|
|||
:test/keyword :foo/bar
|
||||
:test/uuid #uuid "12341234-1234-1234-1234-123412341234"
|
||||
:test/instant #inst "2018-01-01T11:00:00.000Z"
|
||||
:test/ref 1
|
||||
:test/bytes #bytes 010203050403022a }
|
||||
:test/ref 1}
|
||||
]"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -1956,7 +1953,7 @@ fn run_tx_data_test(mut store: Store) {
|
|||
}
|
||||
x => panic!("Got unexpected results {:?}", x),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
assert_tx_data(&store, &tx1, "1".into());
|
||||
assert_tx_data(&store, &tx2, "2".into());
|
||||
|
|
|
@ -1145,7 +1145,7 @@ fn test_upgrade_with_functions() {
|
|||
)?;
|
||||
ip.transact_builder(builder).and(Ok(())).map_err(|e| e)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
let mut in_progress = store.begin_transaction().expect("began");
|
||||
|
|
|
@ -14,12 +14,12 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
http = "~0.2"
|
||||
hyper = "~0.14"
|
||||
hyper = "~0.13"
|
||||
serde_json = "~1.0"
|
||||
uuid = { version = "~1" }
|
||||
uuid = { version = "~0.8" }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.db_traits]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[package]
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
name = "mentat_tolstoy"
|
||||
version = "0.0.2"
|
||||
workspace = ".."
|
||||
|
@ -11,21 +11,21 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
[dependencies]
|
||||
failure = "~0.1"
|
||||
futures = "~0.3"
|
||||
hyper = { version = "~0.14", features = ["full"] }
|
||||
hyper-tls = "~0.5"
|
||||
hyper = "~0.13"
|
||||
hyper-tls = "~0.4"
|
||||
http = "~0.2"
|
||||
log = "~0.4"
|
||||
mime = "~0.3"
|
||||
tokio = { version = "1.8.0", features = ["full"] }
|
||||
tokio = { version = "~0.2", features = ["full"] }
|
||||
serde = "~1.0"
|
||||
serde_json = "~1.0"
|
||||
serde_cbor = "~0.11"
|
||||
serde_derive = "~1.0"
|
||||
lazy_static = "~1.4"
|
||||
uuid = { version = "~1", features = ["v4", "serde"] }
|
||||
uuid = { version = "~0.8", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
|
@ -741,14 +741,13 @@ impl Syncer {
|
|||
// Since we've "merged" with the remote bootstrap, the "no-op" and
|
||||
// "local fast-forward" cases are reported as merges.
|
||||
match Syncer::what_do(remote_state, local_state) {
|
||||
SyncAction::NoOp => Ok(SyncReport::Merge(SyncFollowup::None)),
|
||||
SyncAction::NoOp => {
|
||||
Ok(SyncReport::Merge(SyncFollowup::None))
|
||||
}
|
||||
|
||||
SyncAction::PopulateRemote => {
|
||||
// This is a programming error.
|
||||
bail!(TolstoyError::UnexpectedState(
|
||||
"Remote state can't be empty on first sync against non-empty remote"
|
||||
.to_string()
|
||||
))
|
||||
bail!(TolstoyError::UnexpectedState("Remote state can't be empty on first sync against non-empty remote".to_string()))
|
||||
}
|
||||
|
||||
SyncAction::RemoteFastForward => {
|
||||
|
@ -762,11 +761,12 @@ impl Syncer {
|
|||
|
||||
SyncAction::CombineChanges => {
|
||||
let local_txs = Processor::process(
|
||||
&ip.transaction,
|
||||
Some(local_metadata.root),
|
||||
LocalTxSet::new(),
|
||||
)?;
|
||||
Syncer::merge(ip, incoming_txs[1..].to_vec(), local_txs)
|
||||
&ip.transaction, Some(local_metadata.root), LocalTxSet::new())?;
|
||||
Syncer::merge(
|
||||
ip,
|
||||
incoming_txs[1..].to_vec(),
|
||||
local_txs,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,9 @@ doc = false
|
|||
test = false
|
||||
|
||||
[dependencies]
|
||||
combine = "~4.6"
|
||||
dirs = "~4.0"
|
||||
env_logger = "~0.9"
|
||||
combine = "~4.3"
|
||||
dirs = "~3.0"
|
||||
env_logger = "~0.8"
|
||||
failure = "~0.1"
|
||||
failure_derive = "~0.1"
|
||||
getopts = "~0.2"
|
||||
|
@ -29,14 +29,12 @@ lazy_static = "~1.4"
|
|||
linefeed = "~0.6"
|
||||
log = "~0.4"
|
||||
tabwriter = "~1.2"
|
||||
tempfile = "~3.5"
|
||||
tempfile = "~3.1"
|
||||
termion = "~1.5"
|
||||
time = "~0.3"
|
||||
bytes = { version = "1.0.1", features = ["serde"] }
|
||||
hex = "0.4.3"
|
||||
time = "~0.2"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.mentat]
|
||||
|
|
|
@ -613,7 +613,6 @@ impl Repl {
|
|||
Ref(r) => format!("{}", r),
|
||||
String(ref s) => format!("{:?}", s.to_string()),
|
||||
Uuid(ref u) => format!("{}", u),
|
||||
Bytes(b) => format!("#bytes {:?}", b.to_vec()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ sqlcipher = ["rusqlite/sqlcipher"]
|
|||
failure = "~0.1"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "~0.29"
|
||||
version = "~0.24"
|
||||
features = ["limits", "bundled"]
|
||||
|
||||
[dependencies.edn]
|
||||
|
|
Loading…
Reference in a new issue