Remove Clojure and JS application code.
This commit is contained in:
parent
44d50c9005
commit
cbd278dd7e
45 changed files with 0 additions and 6774 deletions
|
@ -1,47 +0,0 @@
|
|||
(ns datomish.cljify)
|
||||
|
||||
(defn cljify
|
||||
"Does what `(js->clj o :keywordize-keys true) is supposed to do, but works
|
||||
in environments with more than one context (e.g., web browsers).
|
||||
|
||||
See <http://dev.clojure.org/jira/browse/CLJS-439?focusedCommentId=43909>.
|
||||
|
||||
Note that Date instances are passed through."
|
||||
[o]
|
||||
(cond
|
||||
(nil? o)
|
||||
nil
|
||||
|
||||
;; Primitives.
|
||||
(or
|
||||
(true? o)
|
||||
(false? o)
|
||||
(number? o)
|
||||
(string? o)
|
||||
;; Dates are passed through.
|
||||
(not (nil? (aget (aget o "__proto__") "getUTCMilliseconds"))))
|
||||
o
|
||||
|
||||
;; Array.
|
||||
(.isArray js/Array o)
|
||||
(let [n (.-length o)]
|
||||
(loop [i 0
|
||||
acc (transient [])]
|
||||
(if (< i n)
|
||||
(recur (inc i) (conj! acc (cljify (aget o i))))
|
||||
(persistent! acc))))
|
||||
|
||||
;; Object.
|
||||
(not (nil? (aget (aget o "__proto__") "hasOwnProperty")))
|
||||
(let [a (.keys js/Object o)
|
||||
n (.-length a)]
|
||||
(loop [i 0
|
||||
acc (transient {})]
|
||||
(if (< i n)
|
||||
(let [key (aget a i)]
|
||||
(recur (inc i) (assoc! acc
|
||||
(keyword key)
|
||||
(cljify (aget o key)))))
|
||||
(persistent! acc))))
|
||||
|
||||
:else o))
|
|
@ -1,19 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.core
|
||||
(:require
|
||||
[honeysql.format :as sql]
|
||||
[datomish.db :as db]
|
||||
[datomish.db-factory :as db-factory]
|
||||
[datomish.js-sqlite :as js-sqlite]
|
||||
[datomish.sqlite :as sqlite]
|
||||
[datomish.transact :as transact]))
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.js-sqlite
|
||||
(:require
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.js-util :refer [is-node?]]
|
||||
[datomish.sqlitejsm-sqlite :as sqlitejsm-sqlite]))
|
||||
|
||||
(def open sqlitejsm-sqlite/open)
|
||||
|
||||
(extend-protocol s/ISQLiteConnectionFactory
|
||||
string
|
||||
(<sqlite-connection [path]
|
||||
(open path))
|
||||
|
||||
object
|
||||
(<sqlite-connection [tempfile]
|
||||
(open (.-name tempfile))))
|
|
@ -1,14 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.preload)
|
||||
|
||||
(enable-console-print!)
|
||||
(println "Console printing enabled.")
|
|
@ -1,63 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.sqlitejsm-sqlite
|
||||
(:require
|
||||
[cljs-promises.async]
|
||||
[datomish.sqlite :as s]))
|
||||
|
||||
(def sqlite (.import (aget (js/require "chrome") "Cu") "resource://gre/modules/Sqlite.jsm"))
|
||||
|
||||
(println "sqlite is" (pr-str sqlite))
|
||||
|
||||
;; mozIStorageRow instances expose two methods: getResultByIndex and getResultByName.
|
||||
;; Our code expects to treat rows as associative containers, from keyword to value.
|
||||
;; So we implement ILookup (which has a different signature for ClojureScript than
|
||||
;; Clojure!), hope that we handle nil/NULL correctly, and switch between integers
|
||||
;; and keywords.
|
||||
(deftype
|
||||
StorageRow
|
||||
[row]
|
||||
|
||||
ILookup
|
||||
(-lookup [o k]
|
||||
(-lookup o k nil))
|
||||
|
||||
(-lookup [o k not-found]
|
||||
(or (if (integer? k)
|
||||
(.getResultByIndex row k)
|
||||
(.getResultByName row (clj->js (name k))))
|
||||
not-found)))
|
||||
|
||||
(defrecord SQLite3Connection [db]
|
||||
s/ISQLiteConnection
|
||||
(-execute!
|
||||
[db sql bindings]
|
||||
(cljs-promises.async/pair-port
|
||||
(.execute (.-db db) sql (or (clj->js bindings) #js []))))
|
||||
|
||||
(-each
|
||||
[db sql bindings row-cb]
|
||||
(let [cb (fn [row]
|
||||
(row-cb (StorageRow. row)))]
|
||||
(cljs-promises.async/pair-port
|
||||
(.execute (.-db db) sql (or (clj->js bindings) #js []) (when row-cb cb)))))
|
||||
|
||||
(close
|
||||
[db]
|
||||
(cljs-promises.async/pair-port
|
||||
(.close (.-db db)))))
|
||||
|
||||
(defn open
|
||||
[path & {:keys [mode] :or {mode 6}}]
|
||||
(cljs-promises.async/pair-port
|
||||
(->
|
||||
(.openConnection (aget sqlite "Sqlite") (clj->js {:path path :sharedMemoryCache false}))
|
||||
(.then ->SQLite3Connection))))
|
|
@ -1,41 +0,0 @@
|
|||
var Object = {};
|
||||
Object.keys = function (object) {};
|
||||
Object.__proto__ = {};
|
||||
Object.hasOwnProperty = function () {};
|
||||
var Array = {};
|
||||
Array.length = 0;
|
||||
Array.isArray = function () {};
|
||||
|
||||
var SqliteStatic = {};
|
||||
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @return {Promise.<Sqlite>}
|
||||
*/
|
||||
SqliteStatic.openConnection = function (options) {}
|
||||
|
||||
var Sqlite = {}
|
||||
|
||||
/**
|
||||
* @param {string} sql
|
||||
* @param {Array} bindings
|
||||
* @return {Promise}
|
||||
*/
|
||||
Sqlite.execute = function (sql, bindings) {}
|
||||
|
||||
/**
|
||||
* @return {Promise}
|
||||
*/
|
||||
Sqlite.close = function() {}
|
||||
|
||||
var StorageRow = {};
|
||||
|
||||
/**
|
||||
* @param {string} index
|
||||
*/
|
||||
StorageRow.getResultByIndex = function (index) {}
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
StorageRow.getResultByName = function (name) {}
|
|
@ -1,56 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.api
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :as a :refer [go]]))
|
||||
(:require
|
||||
[datomish.db :as db]
|
||||
[datomish.db-factory :as db-factory]
|
||||
[datomish.sqlite :as sqlite]
|
||||
[datomish.transact :as transact]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :refer [go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [<! >!]]])))
|
||||
|
||||
(defn <connect [uri]
|
||||
;; Eventually, URI. For now, just a plain path (no file://).
|
||||
(go-pair
|
||||
(let [conn (<? (sqlite/<sqlite-connection uri))
|
||||
db (<? (db-factory/<db-with-sqlite-connection conn))]
|
||||
(transact/connection-with-db db))))
|
||||
|
||||
(def <transact! transact/<transact!)
|
||||
|
||||
(def listen! transact/listen!)
|
||||
(def listen-chan! transact/listen-chan!)
|
||||
(def unlisten-chan! transact/unlisten-chan!)
|
||||
|
||||
;; TODO: use Potemkin, or a subset of Potemkin that is CLJS friendly (like
|
||||
;; https://github.com/ztellman/potemkin/issues/31) to improve this re-exporting process.
|
||||
(def <close transact/close)
|
||||
|
||||
(def id-literal db/id-literal)
|
||||
(def id-literal? db/id-literal?)
|
||||
|
||||
(def lookup-ref db/lookup-ref)
|
||||
|
||||
(def db transact/db)
|
||||
|
||||
(def entid db/entid)
|
||||
|
||||
(def ident db/ident)
|
||||
|
||||
(def <q db/<?q)
|
||||
|
||||
(def schema db/schema)
|
|
@ -1,143 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
;; Purloined from DataScript.
|
||||
|
||||
(ns datomish.datom)
|
||||
|
||||
(declare hash-datom equiv-datom seq-datom val-at-datom nth-datom assoc-datom)
|
||||
|
||||
(deftype Datom [e a v tx added]
|
||||
#?@(:cljs
|
||||
[IHash
|
||||
(-hash [d] (or (.-__hash d)
|
||||
(set! (.-__hash d) (hash-datom d))))
|
||||
IEquiv
|
||||
(-equiv [d o] (and (instance? Datom o) (equiv-datom d o)))
|
||||
|
||||
ISeqable
|
||||
(-seq [d] (seq-datom d))
|
||||
|
||||
ILookup
|
||||
(-lookup [d k] (val-at-datom d k nil))
|
||||
(-lookup [d k nf] (val-at-datom d k nf))
|
||||
|
||||
IIndexed
|
||||
(-nth [this i] (nth-datom this i))
|
||||
(-nth [this i not-found] (nth-datom this i not-found))
|
||||
|
||||
IAssociative
|
||||
(-assoc [d k v] (assoc-datom d k v))
|
||||
|
||||
IPrintWithWriter
|
||||
(-pr-writer [d writer opts]
|
||||
(pr-sequential-writer writer pr-writer
|
||||
"#datascript/Datom [" " " "]"
|
||||
opts [(.-e d) (.-a d) (.-v d) (.-tx d) (.-added d)]))
|
||||
]
|
||||
:clj
|
||||
[Object
|
||||
(hashCode [d] (hash-datom d))
|
||||
|
||||
clojure.lang.IHashEq
|
||||
(hasheq [d] (hash-datom d))
|
||||
|
||||
clojure.lang.Seqable
|
||||
(seq [d] (seq-datom d))
|
||||
|
||||
clojure.lang.IPersistentCollection
|
||||
(equiv [d o] (and (instance? Datom o) (equiv-datom d o)))
|
||||
(empty [d] (throw (UnsupportedOperationException. "empty is not supported on Datom")))
|
||||
(count [d] 5)
|
||||
(cons [d [k v]] (assoc-datom d k v))
|
||||
|
||||
clojure.lang.Indexed
|
||||
(nth [this i] (nth-datom this i))
|
||||
(nth [this i not-found] (nth-datom this i not-found))
|
||||
|
||||
clojure.lang.ILookup
|
||||
(valAt [d k] (val-at-datom d k nil))
|
||||
(valAt [d k nf] (val-at-datom d k nf))
|
||||
|
||||
clojure.lang.Associative
|
||||
(entryAt [d k] (some->> (val-at-datom d k nil) (clojure.lang.MapEntry k)))
|
||||
(containsKey [e k] (#{:e :a :v :tx :added} k))
|
||||
(assoc [d k v] (assoc-datom d k v))
|
||||
]))
|
||||
|
||||
(defn ^Datom datom
|
||||
([e a v tx] (Datom. e a v tx true))
|
||||
([e a v tx added] (Datom. e a v tx added)))
|
||||
|
||||
(defn datom? [x] (instance? Datom x))
|
||||
|
||||
(defn- hash-datom [^Datom d]
|
||||
(-> (hash (.-e d))
|
||||
(hash-combine (hash (.-a d)))
|
||||
(hash-combine (hash (.-v d)))))
|
||||
|
||||
(defn- equiv-datom [^Datom d ^Datom o]
|
||||
(and (= (.-e d) (.-e o))
|
||||
(= (.-a d) (.-a o))
|
||||
(= (.-v d) (.-v o))))
|
||||
|
||||
(defn- seq-datom [^Datom d]
|
||||
(list (.-e d) (.-a d) (.-v d) (.-tx d) (.-added d)))
|
||||
|
||||
;; keep it fast by duplicating for both keyword and string cases
|
||||
;; instead of using sets or some other matching func
|
||||
(defn- val-at-datom [^Datom d k not-found]
|
||||
(case k
|
||||
:e (.-e d) "e" (.-e d)
|
||||
:a (.-a d) "a" (.-a d)
|
||||
:v (.-v d) "v" (.-v d)
|
||||
:tx (.-tx d) "tx" (.-tx d)
|
||||
:added (.-added d) "added" (.-added d)
|
||||
not-found))
|
||||
|
||||
(defn- nth-datom
|
||||
([^Datom d ^long i]
|
||||
(case i
|
||||
0 (.-e d)
|
||||
1 (.-a d)
|
||||
2 (.-v d)
|
||||
3 (.-tx d)
|
||||
4 (.-added d)
|
||||
#?(:clj (throw (IndexOutOfBoundsException.))
|
||||
:cljs (throw (js/Error. (str "Datom/-nth: Index out of bounds: " i))))))
|
||||
([^Datom d ^long i not-found]
|
||||
(case i
|
||||
0 (.-e d)
|
||||
1 (.-a d)
|
||||
2 (.-v d)
|
||||
3 (.-tx d)
|
||||
4 (.-added d)
|
||||
not-found)))
|
||||
|
||||
(defn- ^Datom assoc-datom [^Datom d k v]
|
||||
(case k
|
||||
:e (Datom. v (.-a d) (.-v d) (.-tx d) (.-added d))
|
||||
:a (Datom. (.-e d) v (.-v d) (.-tx d) (.-added d))
|
||||
:v (Datom. (.-e d) (.-a d) v (.-tx d) (.-added d))
|
||||
:tx (Datom. (.-e d) (.-a d) (.-v d) v (.-added d))
|
||||
:added (Datom. (.-e d) (.-a d) (.-v d) (.-tx d) v)
|
||||
#?(:clj (throw (IllegalArgumentException. (str "invalid key for #datascript/Datom: " k)))
|
||||
:cljs (throw (js/Error. (str "invalid key for #datascript/Datom: " k))))))
|
||||
|
||||
;; printing and reading
|
||||
|
||||
(defn ^Datom datom-from-reader [vec]
|
||||
(apply datom vec))
|
||||
|
||||
#?(:clj
|
||||
(defmethod print-method Datom [^Datom d, ^java.io.Writer w]
|
||||
(.write w (str "#datascript/Datom "))
|
||||
(binding [*out* w]
|
||||
(pr [(.-e d) (.-a d) (.-v d) (.-tx d) (.-added d)]))))
|
File diff suppressed because it is too large
Load diff
|
@ -1,74 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.db.debug
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[datomish.db :as db]
|
||||
[datomish.datom :as dd :refer [datom datom? #?@(:cljs [Datom])]]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.sqlite :as s]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]]))
|
||||
#?(:clj
|
||||
(:import
|
||||
[datomish.datom Datom])))
|
||||
|
||||
(defn <datoms-after [db tx]
|
||||
(go-pair
|
||||
(->>
|
||||
(s/all-rows (:sqlite-connection db) ["SELECT e, a, v, tx FROM datoms WHERE tx > ?" tx])
|
||||
(<?)
|
||||
(mapv #(vector (:e %) (db/ident db (:a %)) (:v %)))
|
||||
(filter #(not (= :db/txInstant (second %))))
|
||||
(set))))
|
||||
|
||||
(defn <datoms>= [db tx]
|
||||
(go-pair
|
||||
(->>
|
||||
(s/all-rows (:sqlite-connection db) ["SELECT e, a, v, tx FROM datoms WHERE tx >= ?" tx])
|
||||
(<?)
|
||||
(mapv #(vector (:e %) (db/ident db (:a %)) (:v %)))
|
||||
(filter #(not (= :db/txInstant (second %))))
|
||||
(set))))
|
||||
|
||||
(defn <datoms [db]
|
||||
(<datoms-after db 0))
|
||||
|
||||
(defn <shallow-entity [db eid]
|
||||
;; TODO: make this actually be <entity. Handle :db.cardinality/many and :db/isComponent.
|
||||
(go-pair
|
||||
(->>
|
||||
(s/all-rows (:sqlite-connection db) ["SELECT a, v FROM datoms WHERE e = ?" eid])
|
||||
(<?)
|
||||
(mapv #(vector (db/ident db (:a %)) (:v %)))
|
||||
(reduce conj {}))))
|
||||
|
||||
(defn <transactions-after [db tx]
|
||||
(go-pair
|
||||
(->>
|
||||
(s/all-rows (:sqlite-connection db) ["SELECT e, a, v, tx, added FROM transactions WHERE tx > ? ORDER BY tx ASC, e, a, v, added" tx])
|
||||
(<?)
|
||||
(mapv #(vector (:e %) (db/ident db (:a %)) (:v %) (:tx %) (:added %))))))
|
||||
|
||||
(defn <transactions [db]
|
||||
(<transactions-after db 0))
|
||||
|
||||
(defn <fulltext-values [db]
|
||||
(go-pair
|
||||
(->>
|
||||
(s/all-rows (:sqlite-connection db) ["SELECT rowid, text FROM fulltext_values"])
|
||||
(<?)
|
||||
(mapv #(vector (:rowid %) (:text %))))))
|
|
@ -1,184 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.db-factory
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[datomish.db :as db]
|
||||
[datomish.transact :as transact]
|
||||
[datomish.transact.bootstrap :as bootstrap]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.schema :as ds]
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.sqlite-schema :as sqlite-schema]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]]))
|
||||
#?(:clj
|
||||
(:import
|
||||
[datomish.datom Datom])))
|
||||
|
||||
(defn <idents [sqlite-connection]
|
||||
"Read the ident map materialized view from the given SQLite store.
|
||||
Returns a map (keyword ident) -> (integer entid), like {:db/ident 0}."
|
||||
|
||||
(go-pair
|
||||
(let [rows (<? (s/all-rows sqlite-connection ["SELECT ident, entid FROM idents"]))]
|
||||
(into {} (map (fn [row] [(sqlite-schema/<-SQLite :db.type/keyword (:ident row)) (:entid row)])) rows))))
|
||||
|
||||
(defn <parts [sqlite-connection]
|
||||
"Read the parts map materialized view from the given SQLite store.
|
||||
Returns a map (keyword part) -> {:start integer :idx integer}, like {:db.part/user {start: 0x100 idx: 0x101}}."
|
||||
|
||||
(go-pair
|
||||
(let [rows (<? (s/all-rows sqlite-connection ["SELECT part, start, idx FROM parts"]))]
|
||||
(into {} (map (fn [row] [(sqlite-schema/<-SQLite :db.type/keyword (:part row)) (select-keys row [:start :idx])])) rows))))
|
||||
|
||||
(defn <bootstrapper! [sqlite-connection from-version]
|
||||
(let [exec (partial s/execute! sqlite-connection)
|
||||
part->vector (fn [[part {:keys [start idx]}]]
|
||||
[(sqlite-schema/->SQLite part) start idx])
|
||||
fail-alter-attr
|
||||
(fn [old new]
|
||||
(if-not (= old new)
|
||||
(raise "Altering schema attributes is not yet supported, got " new " altering existing schema attribute " old
|
||||
{:error :schema/alter-schema :old old :new new})
|
||||
new))]
|
||||
(case from-version
|
||||
0
|
||||
(go-pair
|
||||
;; TODO: think more carefully about allocating new parts and bitmasking part ranges.
|
||||
;; TODO: install these using bootstrap assertions. It's tricky because the part ranges are implicit.
|
||||
;; TODO: chunk into 999/3 sections, for safety.
|
||||
(<? (exec
|
||||
(cons (str "INSERT INTO parts VALUES "
|
||||
(apply str (interpose ", " (repeat (count bootstrap/parts) "(?, ?, ?)"))))
|
||||
(mapcat part->vector bootstrap/parts))))
|
||||
|
||||
;; We use <with-internal rather than <transact! to apply the bootstrap transaction
|
||||
;; data but to not follow the regular schema application process. We can't apply the
|
||||
;; schema changes, since the applied datoms would conflict with the bootstrapping
|
||||
;; idents and schema. (The bootstrapping idents and schema are required to be able to
|
||||
;; write to the database conveniently; without them, we'd have to manually write
|
||||
;; datoms to the store. It's feasible but awkward.) After bootstrapping, we read
|
||||
;; back the idents and schema, just like when we re-open.
|
||||
;;
|
||||
;; Note that we use `bootstrap/parts` here to initialize our DB… and that means we
|
||||
;; have a fixed starting tx.
|
||||
(<? (transact/<with-internal
|
||||
(db/db sqlite-connection bootstrap/idents bootstrap/parts bootstrap/symbolic-schema)
|
||||
(bootstrap/tx-data bootstrap/idents bootstrap/symbolic-schema)
|
||||
fail-alter-attr)))
|
||||
|
||||
1
|
||||
;; We just need to add the new stuff.
|
||||
(go-pair
|
||||
(<?
|
||||
(transact/<with-internal
|
||||
;; We read the parts out of the DB so we don't accidentally reuse a tx ID.
|
||||
;; We use the v1 symbolic schema so that the rest of the system doesn't
|
||||
;; get confused and think we're implicitly altering an existing schema.
|
||||
(db/db sqlite-connection
|
||||
bootstrap/idents
|
||||
(<? (<parts sqlite-connection))
|
||||
bootstrap/v1-symbolic-schema)
|
||||
(bootstrap/tx-data bootstrap/v2-idents bootstrap/v2-symbolic-schema)
|
||||
fail-alter-attr))))))
|
||||
|
||||
(defn <symbolic-schema [sqlite-connection idents]
|
||||
"Read the schema map materialized view from the given SQLite store.
|
||||
Returns a map (keyword ident) -> (map (keyword attribute -> keyword value)), like
|
||||
{:db/ident {:db/cardinality :db.cardinality/one}}."
|
||||
|
||||
(go-pair
|
||||
(let [ident-map (clojure.set/map-invert idents)
|
||||
ref-tag (sqlite-schema/->tag :db.type/ref)
|
||||
kw<-SQLite (partial sqlite-schema/<-SQLite :db.type/keyword)]
|
||||
(->>
|
||||
(s/all-rows sqlite-connection ["SELECT ident, attr, value, value_type_tag FROM schema"])
|
||||
(<?)
|
||||
|
||||
(group-by (comp kw<-SQLite :ident))
|
||||
(map (fn [[ident rows]]
|
||||
[ident
|
||||
(into {} (map (fn [row]
|
||||
(let [tag (:value_type_tag row)
|
||||
;; We want a symbolic schema, but most of our values are
|
||||
;; :db.type/ref attributes. Map those entids back to idents.
|
||||
;; This is ad-hoc since we haven't built a functional DB
|
||||
;; instance yet.
|
||||
v (if (= tag ref-tag) (get ident-map (:value row)) (:value row))]
|
||||
[(kw<-SQLite (:attr row))
|
||||
(sqlite-schema/<-tagged-SQLite tag v)])) rows))]))
|
||||
(into {})))))
|
||||
|
||||
(defn <initialize-connection [sqlite-connection]
|
||||
(go-pair
|
||||
;; Some of these return values when set, and some don't, hence the craziness here.
|
||||
(<? (s/execute! sqlite-connection ["PRAGMA page_size=32768"]))
|
||||
(<? (s/all-rows sqlite-connection ["PRAGMA journal_mode=wal"]))
|
||||
(<? (s/all-rows sqlite-connection ["PRAGMA wal_autocheckpoint=32"]))
|
||||
(<? (s/all-rows sqlite-connection ["PRAGMA journal_size_limit=3145728"]))
|
||||
(<? (s/execute! sqlite-connection ["PRAGMA foreign_keys=ON"]))))
|
||||
|
||||
(defn- submap?
|
||||
"Returns true if every key in m1 is present in m2 with the same value."
|
||||
[m1 m2]
|
||||
(every? (fn [[k v]]
|
||||
(= v (get m2 k)))
|
||||
m1))
|
||||
|
||||
(defn <db-with-sqlite-connection
|
||||
[sqlite-connection]
|
||||
(go-pair
|
||||
(<? (<initialize-connection sqlite-connection))
|
||||
|
||||
(let [[previous-version current-version]
|
||||
(<? (sqlite-schema/<ensure-current-version
|
||||
sqlite-connection
|
||||
<bootstrapper!))]
|
||||
(when-not (= sqlite-schema/current-version current-version)
|
||||
(raise "Could not ensure current SQLite schema version."))
|
||||
|
||||
;; We just bootstrapped, or we are returning to an already bootstrapped DB.
|
||||
(let [idents (<? (<idents sqlite-connection))
|
||||
parts (<? (<parts sqlite-connection))
|
||||
symbolic-schema (<? (<symbolic-schema sqlite-connection idents))]
|
||||
(when-not (= previous-version current-version)
|
||||
(when (not (submap? bootstrap/idents idents))
|
||||
(raise "After bootstrapping database, expected new materialized idents to include all old bootstrapped idents"
|
||||
{:error :bootstrap/bad-idents,
|
||||
:new idents
|
||||
:old bootstrap/idents}))
|
||||
|
||||
(when (not (every? (fn [[k {:keys [start idx]}]]
|
||||
(let [now (get parts k)]
|
||||
(and now
|
||||
(= start (:start now))
|
||||
(<= idx (:idx now)))))
|
||||
bootstrap/parts))
|
||||
(raise "After bootstrapping database, expected new materialized parts and old bootstrapped parts to be congruent."
|
||||
{:error :bootstrap/bad-parts,
|
||||
:new parts
|
||||
:old bootstrap/parts}))
|
||||
|
||||
(when (not (submap? bootstrap/symbolic-schema symbolic-schema))
|
||||
(raise "After bootstrapping database, expected new materialized symbolic schema to include bootstrapped symbolic schema"
|
||||
{:error :bootstrap/bad-symbolic-schema,
|
||||
:new symbolic-schema
|
||||
:old bootstrap/symbolic-schema})))
|
||||
|
||||
;; Finally, return a usable DB instance with the metadata that we
|
||||
;; read from the SQLite database.
|
||||
(db/db sqlite-connection idents parts symbolic-schema)))))
|
|
@ -1,56 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.jdbc-sqlite
|
||||
(:require
|
||||
[datomish.pair-chan :refer [go-pair]]
|
||||
[datomish.sqlite :as s]
|
||||
[clojure.java.jdbc :as j]
|
||||
[clojure.core.async :as a]))
|
||||
|
||||
(deftype JDBCSQLiteConnection [spec]
|
||||
s/ISQLiteConnection
|
||||
(-execute!
|
||||
[db sql bindings]
|
||||
(go-pair
|
||||
(j/execute! (.-spec db) (into [sql] bindings) {:transaction? false})))
|
||||
|
||||
(-each
|
||||
[db sql bindings row-cb]
|
||||
(go-pair
|
||||
(let [rows (j/query (.-spec db) (into [sql] bindings))]
|
||||
(when row-cb
|
||||
(doseq [row rows] (row-cb row)))
|
||||
(count rows))))
|
||||
|
||||
(close [db]
|
||||
(go-pair
|
||||
(.close (:connection (.-spec db))))))
|
||||
|
||||
(defn open
|
||||
[path & {:keys [mode]}]
|
||||
(let [spec {:classname "org.sqlite.JDBC"
|
||||
:identifiers identity
|
||||
:subprotocol "sqlite"
|
||||
:subname path}] ;; TODO: use mode.
|
||||
(go-pair
|
||||
(->>
|
||||
(j/get-connection spec)
|
||||
(assoc spec :connection)
|
||||
(->JDBCSQLiteConnection)))))
|
||||
|
||||
(extend-protocol s/ISQLiteConnectionFactory
|
||||
String
|
||||
(<sqlite-connection [path]
|
||||
(open path))
|
||||
|
||||
java.io.File
|
||||
(<sqlite-connection [path]
|
||||
(open path)))
|
|
@ -1,130 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.js
|
||||
(:refer-clojure :exclude [])
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[datomish.promises :refer [go-promise]])
|
||||
(:require
|
||||
[datomish.util
|
||||
:as util
|
||||
:refer-macros [raise raise-str cond-let]]
|
||||
[cljs.core.async :as a :refer [take! <! >!]]
|
||||
[cljs.reader]
|
||||
[cljs-promises.core :refer [promise]]
|
||||
[datomish.cljify :refer [cljify]]
|
||||
[datomish.api :as d]
|
||||
[datomish.db :as db]
|
||||
[datomish.db-factory :as db-factory]
|
||||
[datomish.pair-chan]
|
||||
[datomish.promises :refer [take-pair-as-promise!]]
|
||||
[datomish.sqlite :as sqlite]
|
||||
[datomish.simple-schema :as simple-schema]
|
||||
[datomish.js-sqlite :as js-sqlite]
|
||||
[datomish.transact :as transact]))
|
||||
|
||||
|
||||
;; Public API.
|
||||
|
||||
(def ^:export db d/db)
|
||||
|
||||
(defn- cljify-options [options]
|
||||
;; Step one: basic parsing.
|
||||
(let [o (cljify options)]
|
||||
;; Step two: convert `order-by` into keywords.
|
||||
(if-let [ord (:order-by o)]
|
||||
(assoc o
|
||||
:order-by
|
||||
(map
|
||||
(fn [[var dir]]
|
||||
[(keyword var)
|
||||
(case dir
|
||||
"asc" :asc
|
||||
"desc" :desc
|
||||
nil :asc
|
||||
:default
|
||||
(raise "Unexpected order-by direction " dir
|
||||
{:direction dir}))])
|
||||
ord))
|
||||
o)))
|
||||
|
||||
(defn ^:export q [db find options]
|
||||
(let [find (cljs.reader/read-string find)
|
||||
opts (cljify-options options)]
|
||||
(take-pair-as-promise!
|
||||
(d/<q db find opts)
|
||||
clj->js)))
|
||||
|
||||
(defn ^:export ensure-schema [conn simple-schema]
|
||||
(let [simple-schema (cljify simple-schema)
|
||||
datoms (simple-schema/simple-schema->schema simple-schema)]
|
||||
(take-pair-as-promise!
|
||||
(d/<transact!
|
||||
conn
|
||||
datoms)
|
||||
clj->js)))
|
||||
|
||||
(def js->tx-data cljify)
|
||||
|
||||
(def ^:export tempid (partial db/id-literal :db.part/user))
|
||||
|
||||
(defn ^:export transact [conn tx-data]
|
||||
;; Expects a JS array as input.
|
||||
(try
|
||||
(let [tx-data (js->tx-data tx-data)]
|
||||
(go-promise clj->js
|
||||
(let [tx-result (<? (d/<transact! conn tx-data))
|
||||
tempids (:tempids tx-result)
|
||||
to-return (select-keys tx-result
|
||||
[:tempids
|
||||
:added-idents
|
||||
:added-attributes
|
||||
:tx
|
||||
:txInstant])
|
||||
jsified (clj->js to-return)]
|
||||
|
||||
;; The tempids map isn't enough for a JS caller to look up one of
|
||||
;; these objects, so we need a lookup function.
|
||||
(aset jsified "tempid" (fn [t] (get tempids t)))
|
||||
jsified)))
|
||||
(catch js/Error e
|
||||
(println "Error in transact:" e))))
|
||||
|
||||
(defn ^:export open [path]
|
||||
;; Eventually, URI. For now, just a plain path (no file://).
|
||||
(go-promise clj->js
|
||||
(let [conn (<? (sqlite/<sqlite-connection path))
|
||||
db (<? (db-factory/<db-with-sqlite-connection conn))]
|
||||
(let [c (transact/connection-with-db db)]
|
||||
;; We pickle the connection as a thunk here so it roundtrips through JS
|
||||
;; without incident.
|
||||
{:conn (fn [] c)
|
||||
:db (fn [] (d/db c))
|
||||
:path path
|
||||
|
||||
;; Primary API.
|
||||
:ensureSchema (fn [simple-schema] (ensure-schema c simple-schema))
|
||||
:transact (fn [tx-data] (transact c tx-data))
|
||||
:q (fn [find opts] (q (d/db c) find opts))
|
||||
:close (fn [] (db/close-db db))
|
||||
|
||||
;; So you can generate keywords for binding in `:inputs`.
|
||||
:keyword keyword
|
||||
|
||||
;; Some helpers for testing the bridge.
|
||||
:println (fn [& xs] (apply println xs))
|
||||
:equal =
|
||||
:idx (fn [tempid] (:idx tempid))
|
||||
:cljify cljify
|
||||
:roundtrip (fn [x] (clj->js (cljify x)))
|
||||
|
||||
:toString (fn [] (str "#<DB " path ">"))
|
||||
}))))
|
|
@ -1,20 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.js-util)
|
||||
|
||||
(defn is-node? []
|
||||
(try
|
||||
(= "[object process]"
|
||||
(.toString (aget js/global "process")))
|
||||
(catch js/ReferenceError e
|
||||
false)
|
||||
(catch js/TypeError e
|
||||
false)))
|
|
@ -1,93 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.pair-chan)
|
||||
|
||||
;; From https://github.com/plumatic/schema/blob/bf469889b730feb09448fd085be5828f28425b41/src/clj/schema/macros.clj#L10-L19.
|
||||
(defn cljs-env?
|
||||
"Take the &env from a macro, and tell whether we are expanding into cljs."
|
||||
[env]
|
||||
(boolean (:ns env)))
|
||||
|
||||
(defmacro if-cljs
|
||||
"Return then if we are generating cljs code and else for Clojure code.
|
||||
https://groups.google.com/d/msg/clojurescript/iBY5HaQda4A/w1lAQi9_AwsJ"
|
||||
[then else]
|
||||
(if (cljs-env? &env) then else))
|
||||
|
||||
(defmacro go-safely [[chan chan-form] & body]
|
||||
"Evaluate `body` forms in a `go` block. Binds `chan-form` to `chan`.
|
||||
`chan-form` must evaluate to an error-channel.
|
||||
If `body` throws, the exception is propagated into `chan` and `chan` is closed.
|
||||
Returns `chan`."
|
||||
`(if-cljs
|
||||
(let [~chan ~chan-form]
|
||||
(cljs.core.async.macros/go
|
||||
(try
|
||||
(do ~@body)
|
||||
(catch js/Error ex#
|
||||
(cljs.core.async/>! ~chan [nil ex#]))))
|
||||
~chan)
|
||||
(let [~chan ~chan-form]
|
||||
(clojure.core.async/go
|
||||
(try
|
||||
(do ~@body)
|
||||
(catch Throwable ex#
|
||||
(clojure.core.async/>! ~chan [nil ex#]))))
|
||||
~chan)))
|
||||
|
||||
;; It's a huge pain to declare cross-environment macros. This is awful, but making the namespace a
|
||||
;; parameter appears to be *even worse*. Note also that `go` is not in a consistent namespace...
|
||||
(defmacro go-pair [& body]
|
||||
"Evaluate `body` forms in a `go` block to yield a result.
|
||||
Catch errors during evaluation.
|
||||
Return a promise channel that yields a pair: the result (or nil), and any
|
||||
error thrown (or nil)."
|
||||
`(if-cljs
|
||||
(let [pc-chan# (cljs.core.async/promise-chan)]
|
||||
(cljs.core.async.macros/go
|
||||
(try
|
||||
(cljs.core.async/>! pc-chan# [(do ~@body) nil])
|
||||
(catch js/Error ex#
|
||||
(cljs.core.async/>! pc-chan# [nil ex#]))))
|
||||
pc-chan#)
|
||||
(let [pc-chan# (clojure.core.async/promise-chan)]
|
||||
(clojure.core.async/go
|
||||
(try
|
||||
(clojure.core.async/>! pc-chan# [(do ~@body) nil])
|
||||
(catch Throwable ex#
|
||||
(clojure.core.async/>! pc-chan# [nil ex#]))))
|
||||
pc-chan#)))
|
||||
|
||||
;; Thanks to David Nolen for the name of this macro! http://swannodette.github.io/2013/08/31/asynchronous-error-handling/.
|
||||
;; This version works a bit differently, though. This must be a macro, so that the enclosed <!
|
||||
;; symbols are processed by any enclosing go blocks.
|
||||
(defmacro <?
|
||||
"Expects `pc-chan` to be a channel or ReadPort which produces [value nil] or
|
||||
[nil error] pairs, and returns values and throws errors as per `consume-pair`."
|
||||
[pc-chan]
|
||||
`(if-cljs
|
||||
(consume-pair (cljs.core.async/<! ~pc-chan))
|
||||
(consume-pair (clojure.core.async/<! ~pc-chan))))
|
||||
|
||||
(defmacro <??
|
||||
"Takes from the channel if it's non-nil."
|
||||
[pc-chan]
|
||||
`(let [c# ~pc-chan]
|
||||
(when c#
|
||||
(datomish.pair-chan/<? c#))))
|
||||
|
||||
(defn consume-pair
|
||||
"When passed a [value nil] pair, returns value. When passed a [nil error] pair,
|
||||
throws error. See also `<?`."
|
||||
[[val err]]
|
||||
(if err
|
||||
(throw err)
|
||||
val))
|
|
@ -1,107 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.places.importer
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[datomish.db :as db]
|
||||
[datomish.transact :as transact]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.sqlite :as s]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]])))
|
||||
|
||||
(def places-schema-fragment
|
||||
[{:db/id (db/id-literal :db.part/user)
|
||||
:db/ident :page/url
|
||||
:db/unique :db.unique/identity
|
||||
:db/valueType :db.type/string ;; TODO: uri
|
||||
:db.install/_attribute :db.part/db}
|
||||
{:db/id (db/id-literal :db.part/user)
|
||||
:db/ident :page/guid
|
||||
:db/unique :db.unique/identity
|
||||
:db/valueType :db.type/string ;; TODO: uuid or guid?
|
||||
:db.install/_attribute :db.part/db}
|
||||
{:db/id (db/id-literal :db.part/user)
|
||||
:db/ident :page/title
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/valueType :db.type/string
|
||||
:db.install/_attribute :db.part/db}
|
||||
{:db/id (db/id-literal :db.part/user)
|
||||
:db/ident :page/visitAt
|
||||
:db/cardinality :db.cardinality/many
|
||||
:db/valueType :db.type/long ;; TODO: instant
|
||||
:db.install/_attribute :db.part/db}
|
||||
])
|
||||
|
||||
|
||||
(defn- place->entity [[id rows]]
|
||||
(let [title (:title (first rows))
|
||||
required {:db/id (db/id-literal :db.part/user)
|
||||
:page/url (:url (first rows))
|
||||
:page/guid (:guid (first rows))}
|
||||
visits (keep :visit_date rows)]
|
||||
|
||||
(util/assoc-if required
|
||||
:page/title title
|
||||
:page/visitAt visits)))
|
||||
|
||||
(defn import-titles [conn places-connection]
|
||||
(go-pair
|
||||
(let [rows
|
||||
(<?
|
||||
(s/all-rows
|
||||
places-connection
|
||||
["SELECT DISTINCT p.title AS title, p.guid
|
||||
FROM moz_places AS p
|
||||
WHERE p.title IS NOT NULL AND p.hidden = 0 LIMIT 10"]))]
|
||||
(<?
|
||||
(transact/<transact!
|
||||
conn
|
||||
(map (fn [row]
|
||||
{:db/id [:page/guid (:guid row)]
|
||||
:page/title (:title row)})
|
||||
rows))))))
|
||||
|
||||
(defn import-places [conn places-connection]
|
||||
(go-pair
|
||||
;; Ensure schema fragment is in place, even though it may cost a (mostly empty) transaction.
|
||||
(<? (transact/<transact! conn places-schema-fragment))
|
||||
|
||||
(let [rows
|
||||
(<?
|
||||
(s/all-rows
|
||||
places-connection
|
||||
["SELECT DISTINCT p.id AS id, p.url AS url, p.title AS title, p.visit_count, p.last_visit_date, p.guid,
|
||||
hv.visit_date
|
||||
FROM (SELECT * FROM moz_places LIMIT 1000) AS p LEFT JOIN moz_historyvisits AS hv ON p.id = hv.place_id
|
||||
WHERE p.hidden = 0
|
||||
ORDER BY p.id, hv.visit_date"]))]
|
||||
(<?
|
||||
(transact/<transact!
|
||||
conn
|
||||
(map place->entity (group-by :id rows)))))))
|
||||
|
||||
(defn import-titles-from-path [db places]
|
||||
(go-pair
|
||||
(let [conn (transact/connection-with-db db)
|
||||
pdb (<? (s/<sqlite-connection places))]
|
||||
(import-titles conn pdb))))
|
||||
|
||||
(defn import-places-from-path [db places]
|
||||
(go-pair
|
||||
(let [conn (transact/connection-with-db db)
|
||||
pdb (<? (s/<sqlite-connection places))]
|
||||
(import-places conn pdb))))
|
|
@ -1,30 +0,0 @@
|
|||
(ns datomish.promises
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]))
|
||||
(:require
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair]]
|
||||
[clojure.core.async :as a :refer [take!]]])
|
||||
#?@(:cljs [[cljs-promises.core :refer [promise]]
|
||||
[cljs.core.async :as a :refer [take!]]])))
|
||||
|
||||
(defn take-pair-as-promise!
|
||||
"Just like take-as-promise!, but aware that it's handling a pair channel.
|
||||
Also converts values, if desired."
|
||||
([ch]
|
||||
(take-pair-as-promise! ch identity))
|
||||
([ch f]
|
||||
(promise
|
||||
(fn [resolve reject]
|
||||
(take!
|
||||
ch
|
||||
(fn [[v e]]
|
||||
(if e
|
||||
(reject e)
|
||||
(resolve (f v)))))))))
|
||||
|
||||
(defmacro go-promise [f & body]
|
||||
`(datomish.promises/take-pair-as-promise!
|
||||
(datomish.pair-chan/go-pair
|
||||
~@body)
|
||||
~f))
|
|
@ -1,249 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query
|
||||
(:require
|
||||
[datomish.query.clauses :as clauses]
|
||||
[datomish.query.context :as context]
|
||||
[datomish.query.projection :as projection]
|
||||
[datomish.query.transforms :as transforms]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs
|
||||
[:refer [
|
||||
BindScalar
|
||||
Constant
|
||||
DefaultSrc
|
||||
FindRel FindColl FindTuple FindScalar
|
||||
Pattern
|
||||
Placeholder
|
||||
SrcVar
|
||||
Variable
|
||||
]])]
|
||||
[clojure.string :as str]
|
||||
[honeysql.core :as sql]
|
||||
)
|
||||
#?(:clj
|
||||
(:import
|
||||
[datascript.parser
|
||||
BindScalar
|
||||
Constant
|
||||
DefaultSrc
|
||||
FindRel FindColl FindTuple FindScalar
|
||||
Pattern
|
||||
Placeholder
|
||||
SrcVar
|
||||
Variable
|
||||
])))
|
||||
|
||||
;; Setting this to something else will make your output more readable,
|
||||
;; but not automatically safe for use.
|
||||
(def sql-quoting-style :ansi)
|
||||
|
||||
(defn- validated-order-by [projection order-by]
|
||||
(let [ordering-vars (set (map first order-by))
|
||||
projected-vars (set (map second projection))]
|
||||
|
||||
(when-not (every? #{:desc :asc} (map second order-by))
|
||||
(raise-str "Ordering expressions must be :asc or :desc."))
|
||||
(when-not
|
||||
(clojure.set/subset? ordering-vars projected-vars)
|
||||
(raise "Ordering vars " ordering-vars " not a subset of projected vars " projected-vars
|
||||
{:projected projected-vars
|
||||
:ordering ordering-vars}))
|
||||
|
||||
order-by))
|
||||
|
||||
(defn- limit-and-order [limit projection order-by]
|
||||
(when (or limit order-by)
|
||||
(util/assoc-if {}
|
||||
:limit limit
|
||||
:order-by (validated-order-by projection order-by))))
|
||||
|
||||
(defn context->sql-clause [context]
|
||||
(let [inner-projection (projection/sql-projection-for-relation context)
|
||||
inner
|
||||
(merge
|
||||
;; If we're finding a collection or relations, we specify
|
||||
;; SELECT DISTINCT, because Datalog is set-based.
|
||||
;; If we're only selecting one result — a scalar or a tuple —
|
||||
;; then we don't bother.
|
||||
;;
|
||||
;; TODO: determine from schema analysis whether we can avoid
|
||||
;; the need to do this even in the collection/relation case.
|
||||
{:modifiers
|
||||
(if (= 1 (:limit context))
|
||||
[]
|
||||
[:distinct])}
|
||||
(clauses/cc->partial-subquery inner-projection (:cc context)))
|
||||
|
||||
limit (:limit context)
|
||||
order-by (:order-by-vars context)]
|
||||
|
||||
(if (:has-aggregates? context)
|
||||
(let [outer-projection (projection/sql-projection-for-aggregation context :preag)]
|
||||
;; Validate the projected vars against the ordering clauses.
|
||||
(merge
|
||||
(limit-and-order limit outer-projection order-by)
|
||||
(when-not (empty? (:group-by-vars context))
|
||||
;; We shouldn't need to account for types here, until we account for
|
||||
;; `:or` clauses that bind from different attributes.
|
||||
{:group-by (map util/var->sql-var (:group-by-vars context))})
|
||||
{:select outer-projection
|
||||
:modifiers [:distinct]
|
||||
:from [:preag]
|
||||
:with {:preag inner}}))
|
||||
|
||||
;; Otherwise, validate against the inner.
|
||||
(merge
|
||||
(limit-and-order limit inner-projection order-by)
|
||||
inner))))
|
||||
|
||||
(defn context->sql-string [context args]
|
||||
(->
|
||||
context
|
||||
context->sql-clause
|
||||
(sql/format args :quoting sql-quoting-style)))
|
||||
|
||||
(defn- validate-with [with]
|
||||
(when-not (or (nil? with)
|
||||
(every? #(instance? Variable %1) with))
|
||||
(raise "Complex :with not supported." {:with with})))
|
||||
|
||||
(defn- validate-in [in]
|
||||
(when (nil? in)
|
||||
(raise ":in expression cannot be nil." {:binding in}))
|
||||
(when-not (= "$" (name (-> in first :variable :symbol)))
|
||||
(raise "Non-default sources not supported." {:binding in}))
|
||||
(when-not (every? (partial instance? BindScalar) (rest in))
|
||||
(raise "Non-scalar bindings not supported." {:binding in})))
|
||||
|
||||
(defn in->bindings
|
||||
"Take an `:in` list and return a bindings map suitable for use
|
||||
as external bindings in a CC."
|
||||
[in]
|
||||
(reduce
|
||||
(fn [m b]
|
||||
(or
|
||||
(when (instance? BindScalar b)
|
||||
(let [var (:variable b)]
|
||||
(when (instance? Variable var)
|
||||
(let [v (:symbol var)]
|
||||
(assoc m v [(sql/param (util/var->sql-var v))])))))
|
||||
m))
|
||||
{}
|
||||
in))
|
||||
|
||||
(defn options-into-context
|
||||
[context limit order-by]
|
||||
(when-not (or (and (integer? limit)
|
||||
(pos? limit))
|
||||
(nil? limit))
|
||||
(raise "Invalid limit " limit {:limit limit}))
|
||||
(assoc context :limit limit :order-by-vars order-by))
|
||||
|
||||
(defn find-spec->elements [find-spec]
|
||||
(condp instance? find-spec
|
||||
FindRel (:elements find-spec)
|
||||
FindTuple (:elements find-spec)
|
||||
FindScalar [(:element find-spec)]
|
||||
FindColl [(:element find-spec)]
|
||||
(raise "Unable to handle find spec." {:find-spec find-spec})))
|
||||
|
||||
(defn find-spec->limit [find-spec]
|
||||
(when (or (instance? FindScalar find-spec)
|
||||
(instance? FindTuple find-spec))
|
||||
1))
|
||||
|
||||
(defn find-into-context
|
||||
"Take a parsed `find` expression and return a fully populated
|
||||
Context. You'll want this so you can get access to the
|
||||
projection, amongst other things."
|
||||
[context find]
|
||||
(let [{:keys [find in with where]} find] ; Destructure the Datalog query.
|
||||
(validate-with with)
|
||||
(validate-in in)
|
||||
|
||||
;; A find spec can be:
|
||||
;;
|
||||
;; * FindRel containing :elements. Returns an array of arrays.
|
||||
;; * FindColl containing :element. This is like mapping (fn [row] (aget row 0))
|
||||
;; over the result set. Returns an array of homogeneous values.
|
||||
;; * FindScalar containing :element. Returns a single value.
|
||||
;; * FindTuple containing :elements. This is just like :limit 1
|
||||
;; on FindColl, returning the first item of the result array. Returns an
|
||||
;; array of heterogeneous values.
|
||||
;;
|
||||
;; The code to handle these is:
|
||||
;; - Just above, unifying a variable list in find-spec->elements.
|
||||
;; - In context.cljc, checking whether a single value or collection is returned.
|
||||
;; - In projection.cljc, transducing according to whether a single column or
|
||||
;; multiple columns are assembled into the output.
|
||||
;; - In db.cljc, where we finally take rows and decide what to push into an
|
||||
;; output channel.
|
||||
|
||||
(let [external-bindings (in->bindings in)
|
||||
elements (find-spec->elements find)
|
||||
known-types {}
|
||||
group-by-vars (projection/extract-group-by-vars elements with)]
|
||||
(util/assoc-if
|
||||
(assoc context
|
||||
:find-spec find
|
||||
:elements elements
|
||||
:group-by-vars group-by-vars
|
||||
:has-aggregates? (not (nil? group-by-vars))
|
||||
:cc (clauses/patterns->cc (:default-source context) where known-types external-bindings))
|
||||
:limit (find-spec->limit find)))))
|
||||
|
||||
(defn find->sql-clause
|
||||
"Take a parsed `find` expression and turn it into a structured SQL
|
||||
expression that can be formatted by honeysql."
|
||||
[context find]
|
||||
(->> find
|
||||
(find-into-context context)
|
||||
context->sql-clause))
|
||||
|
||||
(defn find->sql-string
|
||||
"Take a parsed `find` expression and turn it into SQL."
|
||||
[context find args]
|
||||
(->
|
||||
(find->sql-clause context find)
|
||||
(sql/format args :quoting sql-quoting-style)))
|
||||
|
||||
(defn parse
|
||||
"Parse a Datalog query array into a structured `find` expression."
|
||||
[q]
|
||||
(dp/parse-query q))
|
||||
|
||||
#_
|
||||
(def sql-quoting-style nil)
|
||||
|
||||
#_
|
||||
(datomish.query/find->sql-string
|
||||
(datomish.query.context/make-context (datomish.query.source/datoms-source nil))
|
||||
(datomish.query/parse
|
||||
'[:find ?timestampMicros ?page :in $ ?latest :where
|
||||
[?page :page/starred true ?t]
|
||||
[?t :db/txInstant ?timestampMicros]
|
||||
(not [(> ?t ?latest)]) ])
|
||||
{:latest 5})
|
||||
|
||||
#_
|
||||
(datomish.query/find->sql-string
|
||||
(datomish.query.context/make-context (datomish.query.source/datoms-source nil))
|
||||
(datomish.query/parse
|
||||
'[:find ?page :in $ ?latest :where
|
||||
[?page :page/url "http://example.com/"]
|
||||
[(fulltext $ :page/title "Some title") [[?page ?title _ _]]]
|
||||
(or
|
||||
[?entity :page/likes ?page]
|
||||
[?entity :page/loves ?page])
|
||||
])
|
||||
{})
|
|
@ -1,235 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.cc
|
||||
(:require
|
||||
[datomish.query.source
|
||||
:refer [attribute-in-source
|
||||
constant-in-source]]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[honeysql.core :as sql]
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs
|
||||
[:refer
|
||||
[
|
||||
Constant
|
||||
Placeholder
|
||||
Variable
|
||||
]])])
|
||||
#?(:clj
|
||||
(:import
|
||||
[datascript.parser
|
||||
Constant
|
||||
Placeholder
|
||||
Variable
|
||||
])))
|
||||
|
||||
|
||||
;; A ConjoiningClauses (CC) is a collection of clauses that are combined with JOIN.
|
||||
;; The topmost form in a query is a ConjoiningClauses.
|
||||
;;
|
||||
;;---------------------------------------------------------------------------------------
|
||||
;; Done:
|
||||
;; - Ordinary pattern clauses turn into FROM parts and WHERE parts using :=.
|
||||
;; - Predicate clauses turn into the same, but with other functions.
|
||||
;; - `not` turns into NOT EXISTS with WHERE clauses inside the subquery to
|
||||
;; bind it to the outer variables, or adds simple WHERE clauses to the outer
|
||||
;; clause.
|
||||
;; - `not-join` is similar, but with explicit binding.
|
||||
;; - `or` turns into a collection of UNIONs inside a subquery, or a simple
|
||||
;; alternation.
|
||||
;; `or`'s documentation states that all clauses must include the same vars,
|
||||
;; but that's an over-simplification: all clauses must refer to the external
|
||||
;; unification vars.
|
||||
;; The entire UNION-set is JOINed to any surrounding expressions per the `rule-vars`
|
||||
;; clause, or the intersection of the vars in the two sides of the JOIN.
|
||||
;;
|
||||
;; Not yet done:
|
||||
;; - Function clauses with bindings turn into:
|
||||
;; * Subqueries. Perhaps less efficient? Certainly clearer.
|
||||
;; * Projection expressions, if only used for output.
|
||||
;; * Inline expressions?
|
||||
;;---------------------------------------------------------------------------------------
|
||||
;;
|
||||
;; `from` is a list of [source alias] pairs, suitable for passing to honeysql.
|
||||
;; `bindings` is a map from var to qualified columns.
|
||||
;; `known-types` is a map from var to type keyword (e.g., :db.type/ref)
|
||||
;; `extracted-types` is a mapping, similar to `bindings`, but used to pull
|
||||
;; type tags out of the store at runtime.
|
||||
;; `wheres` is a list of fragments that can be joined by `:and`.
|
||||
(defrecord ConjoiningClauses
|
||||
[source
|
||||
from ; [[:datoms 'datoms123]]
|
||||
external-bindings ; {?var0 (sql/param :foobar)}
|
||||
bindings ; {?var1 [:datoms123.v]}
|
||||
known-types ; {?var1 :db.type/integer}
|
||||
extracted-types ; {?var2 :datoms123.value_type_tag}
|
||||
wheres ; [[:= :datoms123.v 15]]
|
||||
ctes ; {:name {:select …}}
|
||||
])
|
||||
|
||||
(defn bind-column-to-var [cc variable table position]
|
||||
(let [var (:symbol variable)
|
||||
col (sql/qualify table (name position))
|
||||
bound (util/append-in cc [:bindings var] col)]
|
||||
(if (or (not (= position :v))
|
||||
(contains? (:known-types cc) var)
|
||||
(contains? (:extracted-types cc) var))
|
||||
;; Type known; no need to accumulate a type-binding.
|
||||
bound
|
||||
(let [tag-col (sql/qualify table :value_type_tag)]
|
||||
(assoc-in bound [:extracted-types var] tag-col)))))
|
||||
|
||||
(defn constrain-column-to-constant [cc table position value]
|
||||
(let [col (sql/qualify table (name position))]
|
||||
(util/append-in cc
|
||||
[:wheres]
|
||||
[:= col (if (= :a position)
|
||||
(attribute-in-source (:source cc) value)
|
||||
(constant-in-source (:source cc) value))])))
|
||||
|
||||
(defprotocol ITypeTagged (->tag-codes [x]))
|
||||
|
||||
(extend-protocol ITypeTagged
|
||||
#?@(:cljs
|
||||
[string (->tag-codes [x] #{4 10 11 12})
|
||||
Keyword (->tag-codes [x] #{13}) ; TODO: what about idents?
|
||||
boolean (->tag-codes [x] #{1})
|
||||
number (->tag-codes [x]
|
||||
(if (integer? x)
|
||||
#{0 4 5} ; Could be a ref or a number or a date.
|
||||
#{4 5}))]) ; Can't be a ref.
|
||||
#?@(:clj
|
||||
[String (->tag-codes [x] #{10})
|
||||
clojure.lang.Keyword (->tag-codes [x] #{13}) ; TODO: what about idents?
|
||||
Boolean (->tag-codes [x] #{1})
|
||||
Integer (->tag-codes [x] #{0 5}) ; Could be a ref or a number.
|
||||
Long (->tag-codes [x] #{0 5}) ; Could be a ref or a number.
|
||||
Float (->tag-codes [x] #{5})
|
||||
Double (->tag-codes [x] #{5})
|
||||
java.util.UUID (->tag-codes [x] #{11})
|
||||
java.util.Date (->tag-codes [x] #{4})
|
||||
java.net.URI (->tag-codes [x] #{12})]))
|
||||
|
||||
(defn constrain-value-column-to-constant
|
||||
"Constrain a `v` column. Note that this can contribute *two*
|
||||
constraints: one for the column itself, and one for the type tag.
|
||||
We don't need to do this if the attribute is known and thus
|
||||
constrains the type."
|
||||
[cc table-alias value]
|
||||
(let [possible-type-codes (->tag-codes value)
|
||||
aliased (sql/qualify table-alias (name :value_type_tag))
|
||||
clauses (map
|
||||
(fn [code] [:= aliased code])
|
||||
possible-type-codes)]
|
||||
(util/concat-in cc [:wheres]
|
||||
;; Type checks then value checks.
|
||||
[(case (count clauses)
|
||||
0 (raise-str "Unexpected number of clauses.")
|
||||
1 (first clauses)
|
||||
(cons :or clauses))
|
||||
[:= (sql/qualify table-alias (name :v))
|
||||
(constant-in-source (:source cc) value)]])))
|
||||
|
||||
(defn combine-known-types [left right]
|
||||
(merge-with (fn [lt rt]
|
||||
(if (= lt rt)
|
||||
lt
|
||||
(raise "Incompatible types: " lt " != " rt {:types [lt rt]})))
|
||||
left right))
|
||||
|
||||
(defn augment-cc [cc from bindings known-types extracted-types wheres]
|
||||
(assoc cc
|
||||
:from (concat (:from cc) from)
|
||||
:bindings (merge-with concat (:bindings cc) bindings)
|
||||
:known-types (combine-known-types (:known-types cc) known-types)
|
||||
:extracted-types (merge (:extracted-types cc) extracted-types)
|
||||
:wheres (concat (:wheres cc) wheres)))
|
||||
|
||||
(defn merge-ccs [left right]
|
||||
(augment-cc left
|
||||
(:from right)
|
||||
(:bindings right)
|
||||
(:known-types right)
|
||||
(:extracted-types right)
|
||||
(:wheres right)))
|
||||
|
||||
(defn- bindings->where
|
||||
"Take a bindings map like
|
||||
{?foo [:datoms12.e :datoms13.v :datoms14.e]}
|
||||
and produce a list of constraints expression like
|
||||
[[:= :datoms12.e :datoms13.v] [:= :datoms12.e :datoms14.e]]
|
||||
|
||||
TODO: experiment; it might be the case that producing more
|
||||
pairwise equalities we get better or worse performance."
|
||||
[bindings]
|
||||
(mapcat (fn [[_ vs]]
|
||||
(when (> (count vs) 1)
|
||||
(let [root (first vs)]
|
||||
(map (fn [v] [:= root v]) (rest vs)))))
|
||||
bindings))
|
||||
|
||||
;; This is so we can link clauses to the outside world.
|
||||
;; Note that we sort the variable list to achieve consistent ordering between
|
||||
;; Clojure and ClojureScript, yielding sane tests.
|
||||
(defn- impose-external-bindings [cc]
|
||||
(if (empty? (:external-bindings cc))
|
||||
cc
|
||||
(let [ours (:bindings cc)
|
||||
theirs (:external-bindings cc)
|
||||
vars (clojure.set/intersection (set (keys theirs)) (set (keys ours)))]
|
||||
(util/concat-in
|
||||
cc [:wheres]
|
||||
(map
|
||||
(fn [v]
|
||||
(let [external (first (v theirs))
|
||||
internal (first (v ours))]
|
||||
(assert external)
|
||||
(assert internal)
|
||||
[:= external internal]))
|
||||
(sort vars))))))
|
||||
|
||||
(defn expand-where-from-bindings
|
||||
"Take the bindings in the CC and contribute
|
||||
additional where clauses. Calling this more than
|
||||
once will result in duplicate clauses."
|
||||
[cc]
|
||||
(impose-external-bindings
|
||||
(assoc cc :wheres
|
||||
;; Note that the order of clauses here means that cross-pattern var bindings
|
||||
;; come last. That's OK: the SQL engine considers these altogether.
|
||||
(concat (:wheres cc)
|
||||
(bindings->where (:bindings cc))))))
|
||||
|
||||
(defn binding-for-symbol [cc symbol]
|
||||
(let [internal-bindings (symbol (:bindings cc))
|
||||
external-bindings (symbol (:external-bindings cc))]
|
||||
(or (first internal-bindings)
|
||||
(first external-bindings))))
|
||||
|
||||
(defn binding-for-symbol-or-throw [cc symbol]
|
||||
(or (binding-for-symbol cc symbol)
|
||||
(raise-str "No bindings yet for " symbol)))
|
||||
|
||||
(defn argument->value
|
||||
"Take a value from an argument list and resolve it against the CC.
|
||||
Throws if the value can't be resolved (e.g., no binding is established)."
|
||||
[cc arg]
|
||||
(condp instance? arg
|
||||
Placeholder
|
||||
(raise-str "Can't use a placeholder in a predicate.")
|
||||
|
||||
Variable
|
||||
(binding-for-symbol-or-throw cc (:symbol arg))
|
||||
|
||||
Constant
|
||||
(constant-in-source (:source cc) (:value arg))
|
||||
|
||||
(raise-str "Unknown predicate argument " arg)))
|
|
@ -1,491 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.clauses
|
||||
(:require
|
||||
[datomish.query.cc :as cc]
|
||||
[datomish.query.functions :as functions]
|
||||
[datomish.query.projection :refer [sql-projection-for-simple-variable-list]]
|
||||
[datomish.query.source
|
||||
:refer [pattern->schema-value-type
|
||||
attribute-in-source
|
||||
constant-in-source
|
||||
source->from
|
||||
source->constraints]]
|
||||
[datomish.schema :as schema]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs
|
||||
[:refer
|
||||
[
|
||||
And
|
||||
Constant
|
||||
DefaultSrc
|
||||
Function
|
||||
Not
|
||||
Or
|
||||
Pattern
|
||||
Placeholder
|
||||
PlainSymbol
|
||||
Predicate
|
||||
Variable
|
||||
]])]
|
||||
[honeysql.core :as sql]
|
||||
[clojure.string :as str]
|
||||
)
|
||||
#?(:clj
|
||||
(:import
|
||||
[datascript.parser
|
||||
And
|
||||
Constant
|
||||
DefaultSrc
|
||||
Function
|
||||
Not
|
||||
Or
|
||||
Pattern
|
||||
Placeholder
|
||||
PlainSymbol
|
||||
Predicate
|
||||
Variable
|
||||
])))
|
||||
|
||||
;; Pattern building is recursive, so we need forward declarations.
|
||||
(declare
|
||||
Not->NotJoinClause not-join->where-fragment
|
||||
expand-pattern-clauses
|
||||
complex-or->cc
|
||||
simple-or? simple-or->cc)
|
||||
|
||||
(defn- check-or-apply-value-type [cc value-type pattern-part]
|
||||
(if (nil? value-type)
|
||||
cc
|
||||
(condp instance? pattern-part
|
||||
Placeholder
|
||||
cc
|
||||
|
||||
Variable
|
||||
(let [var-sym (:symbol pattern-part)]
|
||||
(if-let [existing-type (var-sym (:known-types cc))]
|
||||
(if (= existing-type value-type)
|
||||
cc
|
||||
(raise "Var " var-sym " already has type " existing-type "; this pattern wants " value-type
|
||||
{:pattern pattern-part :value-type value-type}))
|
||||
(assoc-in cc [:known-types var-sym] value-type)))
|
||||
|
||||
Constant
|
||||
(do
|
||||
(or (and (= :db.type/ref value-type)
|
||||
(or (keyword? (:value pattern-part)) ; ident
|
||||
(integer? (:value pattern-part)))) ; entid
|
||||
(schema/ensure-value-matches-type value-type (:value pattern-part)))
|
||||
cc))))
|
||||
|
||||
(defn- apply-pattern-clause-for-alias
|
||||
"This helper assumes that `cc` has already established a table association
|
||||
for the provided alias."
|
||||
[cc alias pattern]
|
||||
(let [pattern (:pattern pattern)
|
||||
columns (:columns (:source cc))
|
||||
places (map vector pattern columns)
|
||||
value-type (pattern->schema-value-type (:source cc) pattern)] ; Optional; e.g., :db.type/string
|
||||
(reduce
|
||||
(fn [cc
|
||||
[pattern-part ; ?x, :foo/bar, 42
|
||||
position]] ; :a
|
||||
(let [cc (case position
|
||||
;; TODO: we should be able to constrain :e and :a to be
|
||||
;; entities... but the type checker expects that to be an int.
|
||||
:v (check-or-apply-value-type cc value-type pattern-part)
|
||||
:e (check-or-apply-value-type cc :db.type/ref pattern-part)
|
||||
cc)]
|
||||
(condp instance? pattern-part
|
||||
;; Placeholders don't contribute any bindings, nor do
|
||||
;; they constrain the query -- there's no need to produce
|
||||
;; IS NOT NULL, because we don't store nulls in our schema.
|
||||
Placeholder
|
||||
cc
|
||||
|
||||
Variable
|
||||
(cc/bind-column-to-var cc pattern-part alias position)
|
||||
|
||||
Constant
|
||||
(if (and (nil? value-type)
|
||||
(= position :v))
|
||||
;; If we don't know the type, but we have a constant, generate
|
||||
;; a :wheres clause constraining the accompanying value_type_tag
|
||||
;; column.
|
||||
(cc/constrain-value-column-to-constant cc alias (:value pattern-part))
|
||||
(cc/constrain-column-to-constant cc alias position (:value pattern-part)))
|
||||
|
||||
(raise "Unknown pattern part." {:part pattern-part :clause pattern}))))
|
||||
|
||||
cc
|
||||
places)))
|
||||
|
||||
(defn pattern->attribute [pattern]
|
||||
(second (:pattern pattern)))
|
||||
|
||||
;; Accumulates a pattern into the CC. Returns a new CC.
|
||||
(defn apply-pattern-clause
|
||||
"Transform a DataScript Pattern instance into the parts needed
|
||||
to build a SQL expression.
|
||||
|
||||
@param cc A CC instance.
|
||||
@param pattern The pattern instance.
|
||||
@return an augmented CC"
|
||||
[cc pattern]
|
||||
(when-not (instance? Pattern pattern)
|
||||
(raise-str "Expected to be called with a Pattern instance." pattern))
|
||||
(when-not (instance? DefaultSrc (:source pattern))
|
||||
(raise-str "Non-default sources are not supported in patterns. Pattern: " pattern))
|
||||
|
||||
;; TODO: look up the attribute in external bindings if it's a var. Perhaps we
|
||||
;; already know what it is…
|
||||
(let [[table alias] (source->from
|
||||
(:source cc) ; e.g., [:datoms :datoms123]
|
||||
(pattern->attribute pattern))]
|
||||
(apply-pattern-clause-for-alias
|
||||
|
||||
;; Record the new table mapping.
|
||||
(util/append-in cc [:from] [table alias])
|
||||
|
||||
;; Use the new alias for columns.
|
||||
alias
|
||||
pattern)))
|
||||
|
||||
(defn- plain-symbol->sql-predicate-symbol [fn]
|
||||
(when-not (instance? PlainSymbol fn)
|
||||
(raise-str "Predicate functions must be named by plain symbols." fn))
|
||||
(#{:> :>= :< :<= := :!=} (keyword (name (:symbol fn)))))
|
||||
|
||||
(defn apply-predicate-clause [cc predicate]
|
||||
(when-not (instance? Predicate predicate)
|
||||
(raise-str "Expected to be called with a Predicate instance." predicate))
|
||||
(let [f (plain-symbol->sql-predicate-symbol (:fn predicate))]
|
||||
(when-not f
|
||||
(raise-str "Unknown function " (:fn predicate)))
|
||||
|
||||
(let [args (map (partial cc/argument->value cc) (:args predicate))]
|
||||
(util/append-in cc [:wheres] (cons f args)))))
|
||||
|
||||
(defn apply-not-clause [cc not]
|
||||
(when-not (instance? Not not)
|
||||
(raise "Expected to be called with a Not instance." {:clause not}))
|
||||
(when-not (instance? DefaultSrc (:source not))
|
||||
(raise "Non-default sources are not supported in `not` clauses." {:clause not}))
|
||||
|
||||
;; If our bindings are already available, great -- emit a :wheres
|
||||
;; fragment, and include the external bindings so that they match up.
|
||||
;; Otherwise, we need to delay. Right now we're lazy, so we just fail:
|
||||
;; reorder your query yourself.
|
||||
;;
|
||||
;; Note that we don't extract and reuse any types established inside
|
||||
;; the `not` clause: perhaps those won't make sense outside. But it's
|
||||
;; a filter, so we push the external types _in_.
|
||||
(util/append-in cc
|
||||
[:wheres]
|
||||
(not-join->where-fragment
|
||||
(Not->NotJoinClause (:source cc)
|
||||
(:known-types cc)
|
||||
(merge-with concat
|
||||
(:external-bindings cc)
|
||||
(:bindings cc))
|
||||
not))))
|
||||
|
||||
(defn apply-or-clause [cc orc]
|
||||
(when-not (instance? Or orc)
|
||||
(raise "Expected to be called with a Or instance." {:clause orc}))
|
||||
(when-not (instance? DefaultSrc (:source orc))
|
||||
(raise "Non-default sources are not supported in `or` clauses." {:clause orc}))
|
||||
|
||||
;; A simple `or` is something like:
|
||||
;;
|
||||
;; (or [?foo :foo/bar ?baz]
|
||||
;; [?foo :foo/noo ?baz])
|
||||
;;
|
||||
;; This can be converted into a single join and an `or` :where expression.
|
||||
;;
|
||||
;; Otherwise -- perhaps each leg of the `or` binds different variables, which
|
||||
;; is acceptable for an `or-join` form -- we call this a complex `or`. To
|
||||
;; execute those, we need to turn them into a joined subquery composed of
|
||||
;; `UNION`ed queries.
|
||||
(let [f (if (simple-or? orc) simple-or->cc complex-or->cc)]
|
||||
(cc/merge-ccs
|
||||
cc
|
||||
(f (:source cc)
|
||||
(:known-types cc)
|
||||
(merge-with concat
|
||||
(:external-bindings cc)
|
||||
(:bindings cc))
|
||||
orc))))
|
||||
|
||||
|
||||
(defn apply-function-clause [cc function]
|
||||
(or (functions/apply-sql-function cc function)
|
||||
(raise "Unknown function expression." {:clause function})))
|
||||
|
||||
;; We're keeping this simple for now: a straightforward type switch.
|
||||
(defn apply-clause [cc it]
|
||||
(condp instance? it
|
||||
Or
|
||||
(apply-or-clause cc it)
|
||||
|
||||
And
|
||||
(expand-pattern-clauses cc (:clauses it))
|
||||
|
||||
Not
|
||||
(apply-not-clause cc it)
|
||||
|
||||
Predicate
|
||||
(apply-predicate-clause cc it)
|
||||
|
||||
Pattern
|
||||
(apply-pattern-clause cc it)
|
||||
|
||||
Function
|
||||
(apply-function-clause cc it)
|
||||
|
||||
(raise "Unknown clause." {:clause it})))
|
||||
|
||||
(defn expand-pattern-clauses
|
||||
"Reduce a sequence of patterns into a CC."
|
||||
[cc patterns]
|
||||
(reduce apply-clause cc patterns))
|
||||
|
||||
(defn- make-cc [source known-types external-bindings]
|
||||
(cc/map->ConjoiningClauses
|
||||
{:source source
|
||||
:from []
|
||||
:known-types (or known-types {})
|
||||
:extracted-types {}
|
||||
:external-bindings (or external-bindings {})
|
||||
:bindings {}
|
||||
:ctes {}
|
||||
:wheres []}))
|
||||
|
||||
(defn pattern->cc [source pattern known-types external-bindings]
|
||||
(cc/expand-where-from-bindings
|
||||
(apply-clause
|
||||
(make-cc source known-types external-bindings)
|
||||
pattern)))
|
||||
|
||||
(defn patterns->cc [source patterns known-types external-bindings]
|
||||
(cc/expand-where-from-bindings
|
||||
(expand-pattern-clauses
|
||||
(make-cc source known-types external-bindings)
|
||||
patterns)))
|
||||
|
||||
(defn cc->partial-subquery
|
||||
"Build part of a honeysql query map from a CC: the `:select`, `:from`, and
|
||||
`:where` parts.
|
||||
This allows for reuse both in top-level query generation and also for
|
||||
subqueries and NOT EXISTS clauses."
|
||||
[select cc]
|
||||
(merge
|
||||
{:select select
|
||||
:from (:from cc)}
|
||||
(when-not (empty? (:ctes cc))
|
||||
{:with (:ctes cc)})
|
||||
(when-not (empty? (:wheres cc))
|
||||
{:where (cons :and (:wheres cc))})))
|
||||
|
||||
|
||||
;; A `not-join` clause is a filter. It takes bindings from the enclosing query
|
||||
;; and runs as a subquery with `NOT EXISTS`.
|
||||
;; The only difference between `not` and `not-join` is that `not` computes
|
||||
;; its varlist by recursively walking the provided patterns.
|
||||
;; DataScript's parser does variable extraction for us, and also verifies
|
||||
;; that a declared variable list is valid for the clauses given.
|
||||
(defrecord NotJoinClause [unify-vars cc])
|
||||
|
||||
(defn Not->NotJoinClause [source known-types external-bindings not]
|
||||
(when-not (instance? DefaultSrc (:source not))
|
||||
(raise "Non-default sources are not supported in `not` clauses." {:clause not}))
|
||||
(map->NotJoinClause
|
||||
{:unify-vars (:vars not)
|
||||
:cc (patterns->cc source (:clauses not) known-types external-bindings)}))
|
||||
|
||||
(defn not-join->where-fragment [not-join]
|
||||
[:not
|
||||
(if (empty? (:bindings (:cc not-join)))
|
||||
;; If the `not` doesn't establish any bindings, it means it only contains
|
||||
;; expressions that constrain variables established outside itself.
|
||||
;; We can just return an expression.
|
||||
(cons :and (:wheres (:cc not-join)))
|
||||
|
||||
;; If it does establish bindings, then it has to be a subquery.
|
||||
[:exists (cc->partial-subquery [1] (:cc not-join))])])
|
||||
|
||||
|
||||
;; A simple Or clause is one in which each branch can be evaluated against
|
||||
;; a single pattern match. That means that all the variables are in the same places.
|
||||
;; We can produce a ConjoiningClauses in that case -- the :wheres will suffice
|
||||
;; for alternation.
|
||||
|
||||
(defn validate-or-clause [orc]
|
||||
(when-not (instance? DefaultSrc (:source orc))
|
||||
(raise "Non-default sources are not supported in `or` clauses." {:clause orc}))
|
||||
(when-not (nil? (:required (:rule-vars orc)))
|
||||
(raise "We've never seen required rule-vars before." {:clause orc})))
|
||||
|
||||
(defn simple-or? [orc]
|
||||
(let [clauses (:clauses orc)]
|
||||
(and
|
||||
;; Every pattern is a Pattern.
|
||||
(every? (partial instance? Pattern) clauses)
|
||||
|
||||
(or
|
||||
(= 1 (count clauses))
|
||||
|
||||
;; Every pattern has the same source, and every place is either the
|
||||
;; same var or a fixed value. We ignore placeholders for now.
|
||||
(let [template (first clauses)
|
||||
template-source (:source template)]
|
||||
(every? (fn [c]
|
||||
(and (= (:source c) template-source)
|
||||
(util/every-pair?
|
||||
(fn [left right]
|
||||
(condp instance? left
|
||||
Variable (= left right)
|
||||
Constant (instance? Constant right)
|
||||
|
||||
false))
|
||||
(:pattern c) (:pattern template))))
|
||||
(rest clauses)))))))
|
||||
|
||||
(defn simple-or->cc
|
||||
"The returned CC has not yet had bindings expanded."
|
||||
[source known-types external-bindings orc]
|
||||
(validate-or-clause orc)
|
||||
|
||||
;; We 'fork' a CC for each pattern, then union them together.
|
||||
;; We need to build the first in order that the others use the same
|
||||
;; column names and known types.
|
||||
(let [cc (make-cc source known-types external-bindings)
|
||||
primary (apply-pattern-clause cc (first (:clauses orc)))
|
||||
remainder (rest (:clauses orc))]
|
||||
|
||||
(if (empty? remainder)
|
||||
;; That was easy.
|
||||
primary
|
||||
|
||||
;; Note that for a simple `or` clause, the same template is used for each,
|
||||
;; so we can simply use the `extracted-types` bindings from `primary`.
|
||||
;; A complex `or` is much harder to handle.
|
||||
(let [template (assoc primary :wheres [])
|
||||
alias (second (first (:from template)))
|
||||
ccs (map (partial apply-pattern-clause-for-alias template alias)
|
||||
remainder)]
|
||||
|
||||
;; Because this is a simple clause, we know that the first pattern established
|
||||
;; any necessary bindings.
|
||||
;; Take any new :wheres from each CC and combine them with :or.
|
||||
(assoc primary
|
||||
:wheres
|
||||
[(cons :or
|
||||
(reduce (fn [acc cc]
|
||||
(let [w (:wheres cc)]
|
||||
(case (count w)
|
||||
0 acc
|
||||
1 (conj acc (first w))
|
||||
|
||||
(conj acc (cons :and w)))))
|
||||
[]
|
||||
(cons primary ccs)))])))))
|
||||
|
||||
(defn complex-or->cc
|
||||
[source known-types external-bindings orc]
|
||||
(validate-or-clause orc)
|
||||
|
||||
;; Step one: any clauses that are standalone patterns might differ only in
|
||||
;; attribute. In that case, we can treat them as a 'simple or' -- a single
|
||||
;; pattern with a WHERE clause that alternates on the attribute.
|
||||
;; Pull those out first.
|
||||
;;
|
||||
;; Step two: for each cluster of patterns, and for each `and`, recursively
|
||||
;; build a CC and simple projection. The projection must be the same for each
|
||||
;; CC, because we will concatenate these with a `UNION`.
|
||||
;;
|
||||
;; Finally, we alias this entire UNION block as a FROM; it can be stitched into
|
||||
;; the outer query by looking at the projection.
|
||||
;;
|
||||
;; For example,
|
||||
;;
|
||||
;; [:find ?page :in $ ?string :where
|
||||
;; (or [?page :page/title ?string]
|
||||
;; [?page :page/excerpt ?string]
|
||||
;; (and [?save :save/string ?string]
|
||||
;; [?page :page/save ?save]))]
|
||||
;;
|
||||
;; would expand to
|
||||
;;
|
||||
;; SELECT or123.page AS page FROM
|
||||
;; (SELECT datoms124.e AS page FROM datoms AS datoms124
|
||||
;; WHERE datoms124.v = ? AND
|
||||
;; (datoms124.a = :page/title OR
|
||||
;; datoms124.a = :page/excerpt)
|
||||
;; UNION
|
||||
;; SELECT datoms126.e AS page FROM datoms AS datoms125, datoms AS datoms126
|
||||
;; WHERE datoms125.a = :save/string AND
|
||||
;; datoms125.v = ? AND
|
||||
;; datoms126.v = datoms125.e AND
|
||||
;; datoms126.a = :page/save)
|
||||
;; AS or123
|
||||
;;
|
||||
;; Note that a top-level standalone `or` doesn't really need to be aliased, but
|
||||
;; it shouldn't do any harm.
|
||||
|
||||
(if (= 1 (count (:clauses orc)))
|
||||
;; Well, this is silly.
|
||||
(pattern->cc source (first (:clauses orc)) known-types external-bindings)
|
||||
|
||||
;; TODO: pull out simple patterns. Issue #62.
|
||||
(let [
|
||||
;; First: turn each arm of the `or` into a CC. We can easily turn this
|
||||
;; into SQL.
|
||||
ccs (map (fn [p] (pattern->cc source p known-types external-bindings))
|
||||
(:clauses orc))
|
||||
|
||||
free-vars (:free (:rule-vars orc))
|
||||
|
||||
;; Second: wrap an equivalent projection around each. The Or knows which
|
||||
;; variables to use.
|
||||
projection-list-fn
|
||||
(partial sql-projection-for-simple-variable-list
|
||||
free-vars)
|
||||
|
||||
;; Third: turn each CC and projection into an arm of a UNION.
|
||||
subqueries {:union (map (fn [cc]
|
||||
(cc->partial-subquery (projection-list-fn cc)
|
||||
cc))
|
||||
ccs)}
|
||||
|
||||
|
||||
;; Fourth: map this query to an alias in `:from`, and establish bindings
|
||||
;; so that the enclosing query and projection know which names to use.
|
||||
;; Finally, return a CC that can be merged.
|
||||
alias ((:table-alias source) :orjoin)
|
||||
bindings (into {} (map (fn [var]
|
||||
(let [sym (:symbol var)]
|
||||
[sym [(sql/qualify alias (util/var->sql-var sym))]]))
|
||||
free-vars))
|
||||
|
||||
known-types
|
||||
(reduce cc/combine-known-types {} (map :known-types ccs))]
|
||||
|
||||
(cc/map->ConjoiningClauses
|
||||
{:source source
|
||||
:from [[subqueries alias]]
|
||||
:known-types known-types
|
||||
:extracted-types (apply merge (map :extracted-types ccs))
|
||||
:external-bindings {} ; No need: caller will merge.
|
||||
:bindings bindings
|
||||
:ctes {}
|
||||
:wheres []}))))
|
|
@ -1,39 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
;; A context, very simply, holds on to a default source and some knowledge
|
||||
;; needed for aggregation.
|
||||
(ns datomish.query.context
|
||||
(:require
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs [:refer [FindRel FindColl FindTuple FindScalar]])])
|
||||
#?(:clj
|
||||
(:import
|
||||
[datascript.parser FindRel FindColl FindTuple FindScalar])))
|
||||
|
||||
(defrecord Context
|
||||
[
|
||||
default-source
|
||||
find-spec ; The parsed find spec. Used to decide how to process rows.
|
||||
elements ; A list of Element instances, drawn from the :find-spec itself.
|
||||
has-aggregates?
|
||||
group-by-vars ; A list of variables from :find and :with, used to generate GROUP BY.
|
||||
order-by-vars ; A list of projected variables and directions, e.g., [:date :asc], [:_max_timestamp :desc].
|
||||
limit ; The limit to apply to the final results of the query. Only makes sense with ORDER BY.
|
||||
cc ; The main conjoining clause.
|
||||
])
|
||||
|
||||
(defn scalar-or-tuple-query? [context]
|
||||
(when-let [find-spec (:find-spec context)]
|
||||
(or (instance? FindScalar find-spec)
|
||||
(instance? FindTuple find-spec))))
|
||||
|
||||
(defn make-context [source]
|
||||
(->Context source nil nil false nil nil nil nil))
|
|
@ -1,343 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.functions
|
||||
(:require
|
||||
[honeysql.format :as fmt]
|
||||
[datomish.query.cc :as cc]
|
||||
[datomish.schema :as schema]
|
||||
[datomish.sqlite-schema :refer [->tag ->SQLite]]
|
||||
[datomish.query.source
|
||||
:as source
|
||||
:refer [attribute-in-source
|
||||
constant-in-source]]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs
|
||||
[:refer
|
||||
[
|
||||
BindColl
|
||||
BindScalar
|
||||
BindTuple
|
||||
BindIgnore
|
||||
Constant
|
||||
Function
|
||||
PlainSymbol
|
||||
SrcVar
|
||||
Variable
|
||||
]])]
|
||||
[honeysql.core :as sql]
|
||||
[clojure.string :as str]
|
||||
)
|
||||
#?(:clj
|
||||
(:import
|
||||
[datascript.parser
|
||||
BindColl
|
||||
BindScalar
|
||||
BindTuple
|
||||
BindIgnore
|
||||
Constant
|
||||
Function
|
||||
PlainSymbol
|
||||
SrcVar
|
||||
Variable
|
||||
])))
|
||||
|
||||
;; honeysql's MATCH handler doesn't work for sqlite. This does.
|
||||
(defmethod fmt/fn-handler "match" [_ col val]
|
||||
(str (fmt/to-sql col) " MATCH " (fmt/to-sql val)))
|
||||
|
||||
(defn fulltext-attribute? [source attribute]
|
||||
;; TODO: schema lookup.
|
||||
true)
|
||||
|
||||
(defn bind-coll->binding-vars [bind-coll]
|
||||
(:bindings (:binding bind-coll)))
|
||||
|
||||
(defn binding-placeholder-or-variable? [binding]
|
||||
(or
|
||||
;; It's a placeholder...
|
||||
(instance? BindIgnore binding)
|
||||
|
||||
;; ... or it's a scalar binding to a variable.
|
||||
(and
|
||||
(instance? BindScalar binding)
|
||||
(instance? Variable (:variable binding)))))
|
||||
|
||||
(defn- validate-fulltext-clause [cc function]
|
||||
(let [bind-coll (:binding function)
|
||||
[src attr search] (:args function)]
|
||||
(when-not (and (instance? SrcVar src)
|
||||
(= "$" (name (:symbol src))))
|
||||
(raise "Non-default sources not supported." {:arg src}))
|
||||
(when (and (instance? Constant attr)
|
||||
(not (fulltext-attribute? (:source cc) (:value attr))))
|
||||
(raise-str "Attribute " (:value attr) " is not a fulltext-indexed attribute."))
|
||||
|
||||
(when-not (and (instance? BindColl bind-coll)
|
||||
(instance? BindTuple (:binding bind-coll))
|
||||
(every? binding-placeholder-or-variable?
|
||||
(bind-coll->binding-vars bind-coll)))
|
||||
|
||||
(raise "Unexpected binding value." {:binding bind-coll}))))
|
||||
|
||||
(defn apply-fulltext-clause [cc function]
|
||||
(validate-fulltext-clause cc function)
|
||||
|
||||
;; A fulltext search string is either a constant string or a variable binding.
|
||||
;; The search string and the attribute are used to generate a SQL MATCH expression:
|
||||
;; table MATCH 'search string'
|
||||
;; This is then joined against an ordinary pattern to yield entity, value, and tx.
|
||||
;; We do not currently support scoring; the score value will always be 0.
|
||||
(let [[src attr search] (:args function)
|
||||
|
||||
;; Note that DataScript's parser won't allow us to write a term like
|
||||
;;
|
||||
;; [(fulltext $ _ "foo") [[?x]]]
|
||||
;;
|
||||
;; so we instead have a placeholder attribute. Sigh.
|
||||
;; We also support sets of attributes, so you can write
|
||||
;;
|
||||
;; [(fulltext $ #{:foo/bar :foo/baz} "Noo") [[?x]]]
|
||||
;;
|
||||
;; which involves some tomfoolery here.
|
||||
;;
|
||||
;; TODO: exclude any non-fulltext attributes. If the set shrinks to nothing,
|
||||
;; fail the entire pattern.
|
||||
;; https://github.com/mozilla/datomish/issues/56
|
||||
attr-constants (or
|
||||
(and (instance? Constant attr)
|
||||
(let [attr (:value attr)
|
||||
intern (partial source/attribute-in-source (:source cc))]
|
||||
(when-not (= :any attr)
|
||||
(cond
|
||||
(set? attr)
|
||||
(map intern attr)
|
||||
|
||||
(or (keyword? attr)
|
||||
(integer? attr))
|
||||
(list (intern attr))
|
||||
|
||||
:else
|
||||
(raise-str "Unknown fulltext attribute " attr {:attr attr})))))
|
||||
|
||||
(and (instance? Variable attr)
|
||||
(cc/binding-for-symbol-or-throw cc (:symbol attr)))
|
||||
|
||||
;; nil, so it's seqable.
|
||||
nil)
|
||||
|
||||
;; Pull out the symbols for the binding array.
|
||||
[entity value tx score]
|
||||
(map (comp :symbol :variable) ; This will nil-out placeholders.
|
||||
(get-in function [:binding :binding :bindings]))
|
||||
|
||||
;; Find the FTS table name and alias. We might have multiple fulltext
|
||||
;; expressions so we will generate a query like
|
||||
;; SELECT ttt.a FROM t1 AS ttt WHERE ttt.t1 MATCH 'string'
|
||||
[fulltext-table fulltext-alias] (source/source->fulltext-values (:source cc)) ; [:t1 :ttt]
|
||||
match-column (sql/qualify fulltext-alias fulltext-table) ; :ttt.t1
|
||||
match-value (cc/argument->value cc search)
|
||||
|
||||
[datom-table datom-alias] (source/source->non-fulltext-from (:source cc))
|
||||
|
||||
;; The following will end up being added to the CC.
|
||||
from [[fulltext-table fulltext-alias]
|
||||
[datom-table datom-alias]]
|
||||
|
||||
extracted-types {} ; TODO
|
||||
known-types {entity :db.type/ref} ; All entities are refs.
|
||||
|
||||
wheres (concat
|
||||
[[:match match-column match-value] ; The FTS match.
|
||||
|
||||
;; The fulltext rowid-to-datom correspondence.
|
||||
[:=
|
||||
(sql/qualify datom-alias :v)
|
||||
(sql/qualify fulltext-alias :rowid)]]
|
||||
|
||||
;; If known, the attribute itself must match.
|
||||
(when (seq attr-constants)
|
||||
(let [a (sql/qualify datom-alias :a)
|
||||
fragments (map (fn [v] [:= a v])
|
||||
attr-constants)]
|
||||
(if (seq (rest fragments))
|
||||
[(cons :or fragments)]
|
||||
fragments))))
|
||||
|
||||
;; Now compose any bindings for entity, value, tx, and score.
|
||||
;; TODO: do we need to examine existing bindings to capture
|
||||
;; wheres for any of these? We shouldn't, because the CC will
|
||||
;; be internally cross-where'd when everything is done...
|
||||
;; TODO: bind attribute?
|
||||
bindings (into {}
|
||||
(filter
|
||||
(comp not nil? first)
|
||||
[[entity [(sql/qualify datom-alias :e)]]
|
||||
[value [match-column]]
|
||||
[tx [(sql/qualify datom-alias :tx)]]
|
||||
|
||||
;; Future: use matchinfo to compute a score
|
||||
;; if this is a variable rather than a placeholder.
|
||||
[score [0]]]))]
|
||||
|
||||
(cc/augment-cc cc from bindings known-types extracted-types wheres)))
|
||||
|
||||
;; get-else is how Datalog handles optional attributes.
|
||||
;;
|
||||
;; It consists of:
|
||||
;; * A bound entity
|
||||
;; * A cardinality-one attribute
|
||||
;; * A var to bind the value
|
||||
;; * A default value.
|
||||
;;
|
||||
;; We model this as:
|
||||
;; * A check against known bindings for the entity.
|
||||
;; * A check against the schema for cardinality-one.
|
||||
;; * Generating a COALESCE expression with a query inside the projection itself.
|
||||
;;
|
||||
;; Note that this will be messy for queries like:
|
||||
;;
|
||||
;; [:find ?page ?title :in $
|
||||
;; :where [?page :page/url _]
|
||||
;; [(get-else ?page :page/title "<empty>") ?title]
|
||||
;; [_ :foo/quoted ?title]]
|
||||
;;
|
||||
;; or
|
||||
;; [(some-function ?title)]
|
||||
;;
|
||||
;; -- we aren't really establishing a binding, so the subquery will be
|
||||
;; repeated. But this will do for now.
|
||||
(defn apply-get-else-clause [cc function]
|
||||
(let [{:keys [source bindings external-bindings]} cc
|
||||
schema (:schema source)
|
||||
|
||||
{:keys [args binding]} function
|
||||
[src e a default-val] args]
|
||||
|
||||
(when-not (instance? BindScalar binding)
|
||||
(raise-str "Expected scalar binding."))
|
||||
(when-not (instance? Variable (:variable binding))
|
||||
(raise-str "Expected variable binding."))
|
||||
(when-not (instance? Constant a)
|
||||
(raise-str "Expected constant attribute."))
|
||||
(when-not (instance? Constant default-val)
|
||||
(raise-str "Expected constant default value."))
|
||||
(when-not (and (instance? SrcVar src)
|
||||
(= "$" (name (:symbol src))))
|
||||
(raise "Non-default sources not supported." {:arg src}))
|
||||
|
||||
(let [a (attribute-in-source source (:value a))
|
||||
a-type (get-in (:schema schema) [a :db/valueType])
|
||||
a-tag (->tag a-type)
|
||||
|
||||
default-val (:value default-val)
|
||||
var (:variable binding)]
|
||||
|
||||
;; Schema check.
|
||||
(when-not (and (integer? a)
|
||||
(not (datomish.schema/multival? schema a)))
|
||||
(raise-str "Attribute " a " is not cardinality-one."))
|
||||
|
||||
;; TODO: type-check the default value.
|
||||
|
||||
(condp instance? e
|
||||
Variable
|
||||
(let [e (:symbol e)
|
||||
e-binding (cc/binding-for-symbol-or-throw cc e)]
|
||||
|
||||
(let [[table _] (source/source->from source a) ; We don't need to alias: single pattern.
|
||||
;; These :limit values shouldn't be needed, but sqlite will
|
||||
;; appreciate them.
|
||||
;; Note that we don't extract type tags here: the attribute
|
||||
;; must be known!
|
||||
subquery {:select
|
||||
[(sql/call
|
||||
:coalesce
|
||||
{:select [:v]
|
||||
:from [table]
|
||||
:where [:and
|
||||
[:= 'a a]
|
||||
[:= 'e e-binding]]
|
||||
:limit 1}
|
||||
(->SQLite default-val))]
|
||||
:limit 1}]
|
||||
(->
|
||||
(assoc-in cc [:known-types (:symbol var)] a-type)
|
||||
(util/append-in [:bindings (:symbol var)] subquery))))
|
||||
|
||||
(raise-str "Can't handle entity" e)))))
|
||||
|
||||
(defn apply-ground-clause [cc function]
|
||||
(let [{:keys [args binding]} function]
|
||||
(when-not (= (count args) 1)
|
||||
(raise-str "Too many args to ground."))
|
||||
|
||||
(when-not (and (instance? BindScalar binding)
|
||||
(instance? Variable (:variable binding)))
|
||||
(raise-str "ground only binds scalars."))
|
||||
|
||||
(let [var (:variable binding)
|
||||
val (first args)
|
||||
constant? (instance? Constant val)
|
||||
external (when (instance? Variable val)
|
||||
(first (get (:external-bindings cc) (:symbol val))))]
|
||||
|
||||
(when-not (or constant? external)
|
||||
(raise-str "ground argument must be constant or externally bound."))
|
||||
|
||||
(-> cc
|
||||
;; TODO: figure out if we can conclusively know the type of the var.
|
||||
; (assoc-in [:known-types (:symbol var)] nil)
|
||||
|
||||
(util/append-in [:bindings (:symbol var)]
|
||||
(if constant?
|
||||
(:value val)
|
||||
external))))))
|
||||
|
||||
(def sql-functions
|
||||
;; Future: versions of this that uses snippet() or matchinfo().
|
||||
{"fulltext" apply-fulltext-clause
|
||||
"get-else" apply-get-else-clause
|
||||
"ground" apply-ground-clause})
|
||||
|
||||
(defn apply-sql-function
|
||||
"Either returns an application of `function` to `cc`, or nil to
|
||||
encourage you to try a different application."
|
||||
[cc function]
|
||||
(when (and (instance? Function function)
|
||||
(instance? PlainSymbol (:fn function)))
|
||||
(when-let [apply-f (get sql-functions (name (:symbol (:fn function))))]
|
||||
(apply-f cc function))))
|
||||
|
||||
;; A fulltext expression parses to:
|
||||
;;
|
||||
;; Function ( :fn, :args )
|
||||
;;
|
||||
;; The args begin with a SrcVar, and then are attr and search.
|
||||
;;
|
||||
;; This binds a relation of [?entity ?value ?tx ?score]:
|
||||
;;
|
||||
;; BindColl
|
||||
;; :binding BindTuple
|
||||
;; :bindings [BindScalar...]
|
||||
;;
|
||||
;; #datascript.parser.Function
|
||||
;; {:fn #datascript.parser.PlainSymbol{:symbol fulltext},
|
||||
;; :args [#datascript.parser.SrcVar{:symbol $}
|
||||
;; #datascript.parser.Constant{:value :artist/name}
|
||||
;; #datascript.parser.Variable{:symbol ?search}],
|
||||
;; :binding #datascript.parser.BindColl
|
||||
;; {:binding #datascript.parser.BindTuple
|
||||
;; {:bindings [
|
||||
;; #datascript.parser.BindScalar{:variable #datascript.parser.Variable{:symbol ?entity}}
|
||||
;; #datascript.parser.BindScalar{:variable #datascript.parser.Variable{:symbol ?name}}
|
||||
;; #datascript.parser.BindScalar{:variable #datascript.parser.Variable{:symbol ?tx}}
|
||||
;; #datascript.parser.BindScalar{:variable #datascript.parser.Variable{:symbol ?score}}]}}}
|
|
@ -1,334 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.projection
|
||||
(:require
|
||||
[honeysql.core :as sql]
|
||||
[datomish.query.source :as source]
|
||||
[datomish.sqlite-schema :as ss]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datascript.parser :as dp
|
||||
#?@(:cljs [:refer
|
||||
[Aggregate
|
||||
Constant
|
||||
DefaultSrc
|
||||
FindRel FindColl FindTuple FindScalar
|
||||
Pattern
|
||||
Placeholder
|
||||
PlainSymbol
|
||||
Variable
|
||||
]])]
|
||||
)
|
||||
#?(:clj (:import
|
||||
[datascript.parser
|
||||
Aggregate
|
||||
Constant
|
||||
DefaultSrc
|
||||
FindRel FindColl FindTuple FindScalar
|
||||
Pattern
|
||||
Placeholder
|
||||
PlainSymbol
|
||||
Variable
|
||||
])))
|
||||
|
||||
(defn lookup-variable [cc variable]
|
||||
(or (-> cc :bindings variable first)
|
||||
(raise-str "Couldn't find variable " variable)))
|
||||
|
||||
(def aggregate-functions
|
||||
{:avg :avg
|
||||
:count :count
|
||||
:max :max
|
||||
:min :min
|
||||
:sum :total
|
||||
})
|
||||
|
||||
(defn- aggregate-symbols->projected-var [fn-symbol var-symbol]
|
||||
(keyword (str "_" (name fn-symbol) "_" (subs (name var-symbol) 1))))
|
||||
|
||||
(defn- aggregate->projected-var [elem]
|
||||
(aggregate-symbols->projected-var (:symbol (:fn elem))
|
||||
(:symbol (first (:args elem)))))
|
||||
|
||||
(defn simple-aggregate?
|
||||
"If `elem` is a simple aggregate -- symbolic function, one var arg --
|
||||
return the variable symbol."
|
||||
[elem]
|
||||
(when (instance? Aggregate elem)
|
||||
(let [{:keys [fn args]} elem]
|
||||
(when (and (instance? PlainSymbol fn)
|
||||
(= 1 (count args)))
|
||||
(let [arg (first args)]
|
||||
(when (instance? Variable arg)
|
||||
(:symbol arg)))))))
|
||||
|
||||
(defn- aggregate->var [elem]
|
||||
(when (instance? Aggregate elem)
|
||||
(when-not (simple-aggregate? elem)
|
||||
(raise-str "Only know how to handle simple aggregates."))
|
||||
|
||||
(:symbol (first (:args elem)))))
|
||||
|
||||
(defn- variable->var [elem]
|
||||
(when (instance? Variable elem)
|
||||
(:symbol elem)))
|
||||
|
||||
(defn- aggregate->projection [elem context lookup-fn]
|
||||
(when (instance? Aggregate elem)
|
||||
(when-not (simple-aggregate? elem)
|
||||
(raise-str "Only know how to handle simple aggregates."))
|
||||
|
||||
(let [var-symbol (:symbol (first (:args elem)))
|
||||
fn-symbol (:symbol (:fn elem))
|
||||
lookup-var (lookup-fn var-symbol)
|
||||
aggregate-fn (get aggregate-functions (keyword fn-symbol))]
|
||||
|
||||
(when-not aggregate-fn
|
||||
(raise-str "Unknown aggregate function " fn-symbol))
|
||||
|
||||
(let [funcall-var (util/aggregate->sql-var aggregate-fn lookup-var)
|
||||
project-as (aggregate-symbols->projected-var fn-symbol var-symbol)]
|
||||
[[funcall-var project-as]]))))
|
||||
|
||||
(defn- type-projection
|
||||
"Produce a projection pair by looking up `var` in the provided
|
||||
`extracted-types`."
|
||||
[extracted-types var]
|
||||
(when-let [t (get extracted-types var)]
|
||||
[t (util/var->sql-type-var var)]))
|
||||
|
||||
(defn- aggregate-type-projection
|
||||
"Produce a passthrough projection pair for a type field
|
||||
in an inner query."
|
||||
[inner var]
|
||||
(let [type-var (util/var->sql-type-var var)]
|
||||
[(sql/qualify inner type-var) type-var]))
|
||||
|
||||
(defn- symbol->projection
|
||||
"Given a variable symbol, produce a projection pair.
|
||||
`lookup-fn` will be used to find a column. For a non-aggregate query,
|
||||
this will typically be a lookup into the CC's bindings. For an
|
||||
aggregate query it'll be a qualification of the same var into the
|
||||
subquery.
|
||||
`known-types` is a type map to decide whether to project a type tag.
|
||||
`type-projection-fn` is like `lookup-fn` but for type tag columns."
|
||||
[var lookup-fn known-types type-projection-fn]
|
||||
(let [lookup-var (lookup-fn var)
|
||||
projected-var (util/var->sql-var var)
|
||||
var-projection [lookup-var projected-var]]
|
||||
|
||||
;; If the type of a variable isn't explicitly known, we also select
|
||||
;; its type column so we can transform it.
|
||||
(if-let [type-proj (when (not (contains? known-types var))
|
||||
(type-projection-fn var))]
|
||||
[var-projection type-proj]
|
||||
[var-projection])))
|
||||
|
||||
(defn- variable->projection [elem lookup-fn known-types type-projection-fn]
|
||||
(when (instance? Variable elem)
|
||||
(symbol->projection (:symbol elem) lookup-fn known-types type-projection-fn)))
|
||||
|
||||
(defn sql-projection-for-relation
|
||||
"Take a `find` clause's `:elements` list and turn it into a SQL
|
||||
projection clause, suitable for passing as a `:select` clause to
|
||||
honeysql.
|
||||
|
||||
For example:
|
||||
|
||||
[Variable{:symbol ?foo}, Variable{:symbol ?bar}]
|
||||
|
||||
with bindings in the context:
|
||||
|
||||
{?foo [:datoms12.e :datoms13.v], ?bar [:datoms13.e]}
|
||||
|
||||
=>
|
||||
|
||||
[[:datoms12.e :foo] [:datoms13.e :bar]]
|
||||
|
||||
Note that we also look at `:group-by-vars`, because we need to
|
||||
alias columns and apply `DISTINCT` to those columns in order to
|
||||
aggregate correctly.
|
||||
|
||||
This function unpacks aggregate operations, instead selecting the var.
|
||||
|
||||
@param context A Context, containing elements.
|
||||
@return a sequence of pairs."
|
||||
[context]
|
||||
(let [{:keys [group-by-vars elements cc]} context
|
||||
{:keys [known-types extracted-types]} cc]
|
||||
|
||||
;; The primary projections from the :find list.
|
||||
;; Note that deduplication will be necessary, because we unpack aggregates.
|
||||
(let [projected-vars
|
||||
(map (fn [elem]
|
||||
(or (aggregate->var elem)
|
||||
(variable->var elem)
|
||||
(raise "Only able to :find variables or aggregates."
|
||||
{:elem elem})))
|
||||
elements)
|
||||
|
||||
;; If we have any GROUP BY requirements from :with, that aren't already
|
||||
;; included in the above, project them now.
|
||||
additional-vars
|
||||
(clojure.set/difference
|
||||
(set group-by-vars)
|
||||
(set projected-vars))
|
||||
|
||||
full-var-list
|
||||
(distinct (concat projected-vars additional-vars))
|
||||
|
||||
type-proj-fn
|
||||
(partial type-projection extracted-types)
|
||||
|
||||
lookup-fn
|
||||
(partial lookup-variable cc)]
|
||||
|
||||
(mapcat (fn [var]
|
||||
(symbol->projection var lookup-fn known-types type-proj-fn))
|
||||
full-var-list))))
|
||||
|
||||
;; Like sql-projection-for-relation, but exposed for simpler
|
||||
;; use (e.g., in handling complex `or` patterns).
|
||||
(defn sql-projection-for-simple-variable-list [elements cc]
|
||||
{:pre [(every? (partial instance? Variable) elements)]}
|
||||
(let [{:keys [known-types extracted-types]} cc
|
||||
|
||||
projected-vars
|
||||
(map variable->var elements)
|
||||
|
||||
type-proj-fn
|
||||
(partial type-projection extracted-types)
|
||||
|
||||
lookup-fn
|
||||
(partial lookup-variable cc)]
|
||||
|
||||
(mapcat (fn [var]
|
||||
(symbol->projection var lookup-fn known-types type-proj-fn))
|
||||
projected-vars)))
|
||||
|
||||
(defn sql-projection-for-aggregation
|
||||
"Project an element list that contains aggregates. This expects a subquery
|
||||
aliased to `inner-table` which itself will project each var with the
|
||||
correct name."
|
||||
[context inner-table]
|
||||
(let [{:keys [group-by-vars elements cc]} context
|
||||
{:keys [known-types extracted-types]} cc
|
||||
lookup-fn (fn [var]
|
||||
(sql/qualify inner-table (util/var->sql-var var)))
|
||||
type-proj-fn (partial aggregate-type-projection inner-table)]
|
||||
(mapcat (fn [elem]
|
||||
(or (variable->projection elem lookup-fn known-types type-proj-fn)
|
||||
(aggregate->projection elem context lookup-fn)
|
||||
(raise "Only able to :find variables or aggregates."
|
||||
{:elem elem})))
|
||||
elements)))
|
||||
|
||||
(defn make-projectors-for-columns [elements known-types extracted-types]
|
||||
{:pre [(map? extracted-types)
|
||||
(map? known-types)]}
|
||||
(letfn [(variable->projector [elem known-types extracted-types tag-decoder]
|
||||
(when (instance? Variable elem)
|
||||
(let [var (:symbol elem)
|
||||
projected-var (util/var->sql-var var)]
|
||||
|
||||
(if-let [type (get known-types var)]
|
||||
;; We know the type! We already know how to decode it.
|
||||
;; TODO: most of these tags don't actually require calling through to <-tagged-SQLite.
|
||||
;; TODO: optimize this without making it horrible.
|
||||
(let [decoder (tag-decoder (ss/->tag type))]
|
||||
(fn [row]
|
||||
(decoder (get row projected-var))))
|
||||
|
||||
;; We don't know the type. Find the type projection column
|
||||
;; and use it to decode the value.
|
||||
(if (contains? extracted-types var)
|
||||
(let [type-column (util/var->sql-type-var var)]
|
||||
(fn [row]
|
||||
(ss/<-tagged-SQLite
|
||||
(get row type-column)
|
||||
(get row projected-var))))
|
||||
|
||||
;; We didn't extract a type and we don't know it in advance.
|
||||
;; Just pass through; the :col will look itself up in the row.
|
||||
projected-var)))))
|
||||
|
||||
;; For now we assume numerics and that everything will shake out in the wash.
|
||||
(aggregate->projector [elem]
|
||||
(when (instance? Aggregate elem)
|
||||
(let [var (aggregate->projected-var elem)]
|
||||
(fn [row]
|
||||
(get row var)))))]
|
||||
|
||||
(let [tag-decoder (memoize
|
||||
(fn [tag]
|
||||
(partial ss/<-tagged-SQLite tag)))]
|
||||
(map (fn [elem]
|
||||
(or (variable->projector elem known-types extracted-types tag-decoder)
|
||||
(aggregate->projector elem)))
|
||||
elements))))
|
||||
|
||||
(defn row-pair-transducer [context]
|
||||
(let [{:keys [find-spec elements cc]} context
|
||||
{:keys [source known-types extracted-types]} cc
|
||||
|
||||
;; We know the projection will fail above if these aren't simple variables or aggregates.
|
||||
projectors
|
||||
(make-projectors-for-columns elements known-types extracted-types)
|
||||
|
||||
single-column-find-spec?
|
||||
(or (instance? FindScalar find-spec)
|
||||
(instance? FindColl find-spec))]
|
||||
|
||||
(map
|
||||
(if single-column-find-spec?
|
||||
;; We're only grabbing one result from each row.
|
||||
(let [projector (first projectors)]
|
||||
(when (second projectors)
|
||||
(raise "Single-column find spec used, but multiple projectors present."
|
||||
{:elements elements
|
||||
:projectors projectors
|
||||
:find-spec find-spec}))
|
||||
|
||||
(fn [[row err]]
|
||||
(if err
|
||||
[nil err]
|
||||
[(projector row) nil])))
|
||||
|
||||
;; Otherwise, collect each column into a sequence.
|
||||
(fn [[row err]]
|
||||
(if err
|
||||
[nil err]
|
||||
[(map (fn [projector] (projector row)) projectors) nil]))))))
|
||||
|
||||
(defn extract-group-by-vars
|
||||
"Take inputs to :find and, if any aggregates exist in `elements`,
|
||||
return the variable names upon which we should GROUP BY."
|
||||
[elements with]
|
||||
(when (some #(instance? Aggregate %1) elements)
|
||||
(loop [ignore #{}
|
||||
group-by (map :symbol with)
|
||||
e elements]
|
||||
|
||||
(if-let [element (first e)]
|
||||
(if-let [aggregated-var (simple-aggregate? element)]
|
||||
(recur (conj ignore aggregated-var)
|
||||
group-by
|
||||
(rest e))
|
||||
(if (instance? Variable element)
|
||||
(let [var (:symbol element)]
|
||||
(recur ignore
|
||||
(if (contains? ignore var)
|
||||
group-by
|
||||
(conj group-by var))
|
||||
(rest e)))
|
||||
(raise-str "Unknown element." {:element element})))
|
||||
|
||||
;; Done. Remove any later vars we saw.
|
||||
(remove ignore group-by)))))
|
|
@ -1,141 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.source
|
||||
(:require
|
||||
[datomish.query.transforms :as transforms]
|
||||
[datomish.schema :as schema]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str]]
|
||||
[datascript.parser
|
||||
#?@(:cljs
|
||||
[:refer [Variable Constant Placeholder]])])
|
||||
#?(:clj
|
||||
(:import [datascript.parser Variable Constant Placeholder])))
|
||||
|
||||
(defn gensym-table-alias [table]
|
||||
(gensym (name table)))
|
||||
|
||||
;;;
|
||||
;;; A source is something that can match patterns. For example:
|
||||
;;;
|
||||
;;; * The database itself.
|
||||
;;; * The history of the database.
|
||||
;;; * A filtered version of the database or the history.
|
||||
;;;
|
||||
;;; We model this in a SQL context as something that can:
|
||||
;;;
|
||||
;;; * Give us a table name.
|
||||
;;; * Give us a new alias for the table name.
|
||||
;;; * Provide us with a list of columns to match, positionally,
|
||||
;;; against patterns.
|
||||
;;; * Provide us with a set of WHERE fragments that, in combination
|
||||
;;; with the table name, denote the source.
|
||||
;;; * Transform constants and attributes into something usable
|
||||
;;; by the source.
|
||||
|
||||
(defprotocol Source
|
||||
(source->from [source attribute]
|
||||
"Returns a pair, `[table alias]` for a pattern with the provided attribute.")
|
||||
(source->non-fulltext-from [source])
|
||||
(source->fulltext-from [source]
|
||||
"Returns a pair, `[table alias]` for querying the source's fulltext index.")
|
||||
(source->fulltext-values [source]
|
||||
"Returns a pair, `[table alias]` for querying the source's fulltext values")
|
||||
(source->constraints [source alias])
|
||||
(pattern->schema-value-type [source pattern])
|
||||
(attribute-in-source [source attribute])
|
||||
(constant-in-source [source constant]))
|
||||
|
||||
(defrecord
|
||||
DatomsSource
|
||||
[table ; Typically :datoms.
|
||||
fulltext-table ; Typically :fulltext_datoms
|
||||
fulltext-view ; Typically :all_datoms
|
||||
fulltext-values ; Typically :fulltext_values
|
||||
columns ; e.g., [:e :a :v :tx]
|
||||
schema ; An ISchema instance.
|
||||
|
||||
;; `attribute-transform` is a function from attribute to constant value. Used to
|
||||
;; turn, e.g., :p/attribute into an interned integer.
|
||||
;; `constant-transform` is a function from constant value to constant value. Used to
|
||||
;; turn, e.g., the literal 'true' into 1.
|
||||
attribute-transform
|
||||
constant-transform
|
||||
|
||||
;; `table-alias` is a function from table to alias, e.g., :datoms => :datoms1234.
|
||||
table-alias
|
||||
|
||||
;; Not currently used.
|
||||
make-constraints ; ?fn [source alias] => [where-clauses]
|
||||
]
|
||||
Source
|
||||
|
||||
(source->from [source attribute]
|
||||
(let [schema (:schema source)
|
||||
int->table (fn [a]
|
||||
(if (schema/fulltext? schema a)
|
||||
(:fulltext-table source)
|
||||
(:table source)))
|
||||
table
|
||||
(cond
|
||||
(integer? attribute)
|
||||
(int->table attribute)
|
||||
|
||||
(instance? Constant attribute)
|
||||
(let [a (:value attribute)
|
||||
id (if (keyword? a)
|
||||
(attribute-in-source source a)
|
||||
a)]
|
||||
(int->table id))
|
||||
|
||||
;; TODO: perhaps we know an external binding already?
|
||||
(or (instance? Variable attribute)
|
||||
(instance? Placeholder attribute))
|
||||
;; It's variable. We must act as if it could be a fulltext datom.
|
||||
(:fulltext-view source)
|
||||
|
||||
true
|
||||
(raise "Unknown source->from attribute " attribute {:attribute attribute}))]
|
||||
[table ((:table-alias source) table)]))
|
||||
|
||||
(source->non-fulltext-from [source]
|
||||
(let [table (:table source)]
|
||||
[table ((:table-alias source) table)]))
|
||||
|
||||
(source->fulltext-from [source]
|
||||
(let [table (:fulltext-table source)]
|
||||
[table ((:table-alias source) table)]))
|
||||
|
||||
(source->fulltext-values [source]
|
||||
(let [table (:fulltext-values source)]
|
||||
[table ((:table-alias source) table)]))
|
||||
|
||||
(source->constraints [source alias]
|
||||
(when-let [f (:make-constraints source)]
|
||||
(f alias)))
|
||||
|
||||
(pattern->schema-value-type [source pattern]
|
||||
(let [[_ a v _] pattern
|
||||
schema (:schema (:schema source))]
|
||||
(when (instance? Constant a)
|
||||
(let [val (:value a)]
|
||||
(if (keyword? val)
|
||||
;; We need to find the entid for the keyword attribute,
|
||||
;; because the schema stores attributes by ID.
|
||||
(let [id (attribute-in-source source val)]
|
||||
(get-in schema [id :db/valueType]))
|
||||
(when (integer? val)
|
||||
(get-in schema [val :db/valueType])))))))
|
||||
|
||||
(attribute-in-source [source attribute]
|
||||
((:attribute-transform source) attribute))
|
||||
|
||||
(constant-in-source [source constant]
|
||||
((:constant-transform source) constant)))
|
|
@ -1,27 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.query.transforms)
|
||||
|
||||
#?(:clj
|
||||
(defn boolean? [x]
|
||||
(instance? Boolean x)))
|
||||
|
||||
(defn attribute-transform-string
|
||||
"Turns :p/foo into \"p/foo\". Adequate for testing, but this depends on the storage schema."
|
||||
[x]
|
||||
(str (namespace x) "/" (name x)))
|
||||
|
||||
(defn constant-transform-default [x]
|
||||
(if (boolean? x)
|
||||
(if x 1 0)
|
||||
(if (keyword? x)
|
||||
(attribute-transform-string x)
|
||||
x)))
|
|
@ -1,217 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
;; Purloined from DataScript.
|
||||
|
||||
(ns datomish.schema
|
||||
(:require
|
||||
[datomish.sqlite-schema :as sqlite-schema]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise]]))
|
||||
|
||||
(defn entid? [x]
|
||||
(and (integer? x) (pos? x)))
|
||||
|
||||
(defprotocol ISchema
|
||||
(attrs-by
|
||||
[schema property]
|
||||
"TODO: document this, think more about making this part of the schema."))
|
||||
|
||||
(defn- #?@(:clj [^Boolean is-attr?]
|
||||
:cljs [^boolean is-attr?]) [schema attr property]
|
||||
(contains? (attrs-by schema property) attr))
|
||||
|
||||
(defn #?@(:clj [^Boolean multival?]
|
||||
:cljs [^boolean multival?]) [schema attr]
|
||||
(is-attr? schema attr :db.cardinality/many))
|
||||
|
||||
(defn #?@(:clj [^Boolean ref?]
|
||||
:cljs [^boolean ref?]) [schema attr]
|
||||
(is-attr? schema attr :db.type/ref))
|
||||
|
||||
(defn #?@(:clj [^Boolean kw?]
|
||||
:cljs [^boolean kw?]) [schema attr]
|
||||
(is-attr? schema attr :db.type/keyword))
|
||||
|
||||
(defn #?@(:clj [^Boolean component?]
|
||||
:cljs [^boolean component?]) [schema attr]
|
||||
(is-attr? schema attr :db/isComponent))
|
||||
|
||||
(defn #?@(:clj [^Boolean indexing?]
|
||||
:cljs [^boolean indexing?]) [schema attr]
|
||||
(is-attr? schema attr :db/index))
|
||||
|
||||
(defn #?@(:clj [^Boolean fulltext?]
|
||||
:cljs [^boolean fulltext?]) [schema attr]
|
||||
(is-attr? schema attr :db/fulltext))
|
||||
|
||||
(defn #?@(:clj [^Boolean unique?]
|
||||
:cljs [^boolean unique?]) [schema attr]
|
||||
(is-attr? schema attr :db/unique))
|
||||
|
||||
(defn #?@(:clj [^Boolean unique-identity?]
|
||||
:cljs [^boolean unique-identity?]) [schema attr]
|
||||
(is-attr? schema attr :db.unique/identity))
|
||||
|
||||
(defn #?@(:clj [^Boolean unique-value?]
|
||||
:cljs [^boolean unique-value?]) [schema attr]
|
||||
(is-attr? schema attr :db.unique/value))
|
||||
|
||||
(defn doc [schema attr]
|
||||
(get-in (.-schema schema) [attr :db/doc]))
|
||||
|
||||
(defn valueType [schema attr]
|
||||
(let [schema (.-schema schema)]
|
||||
(get-in schema [attr :db/valueType])))
|
||||
|
||||
(defn schema? [x]
|
||||
(satisfies? ISchema x))
|
||||
|
||||
(defrecord Schema [schema rschema]
|
||||
ISchema
|
||||
(attrs-by [schema property]
|
||||
((.-rschema schema) property)))
|
||||
|
||||
(defn- attr->properties [k v]
|
||||
(cond
|
||||
(= [k v] [:db/isComponent true]) [:db/isComponent]
|
||||
(= v :db.type/ref) [:db.type/ref :db/index]
|
||||
(= v :db.cardinality/many) [:db.cardinality/many]
|
||||
(= v :db.unique/identity) [:db/unique :db.unique/identity :db/index]
|
||||
(= v :db.unique/value) [:db/unique :db.unique/value :db/index]
|
||||
(= [k v] [:db/index true]) [:db/index]
|
||||
(= [k v] [:db/fulltext true]) [:db/fulltext :db/index]
|
||||
(= k :db/valueType) [v]))
|
||||
|
||||
(defn- multimap [e m]
|
||||
(reduce
|
||||
(fn [acc [k v]]
|
||||
(update-in acc [k] (fnil conj e) v))
|
||||
{} m))
|
||||
|
||||
(defn rschema [schema]
|
||||
(->>
|
||||
(for [[a kv] schema
|
||||
[k v] kv
|
||||
prop (attr->properties k v)]
|
||||
[prop a])
|
||||
(multimap #{})))
|
||||
|
||||
(defn- validate-schema-key [a k v expected]
|
||||
(when-not (or (nil? v)
|
||||
(contains? expected v))
|
||||
(throw (ex-info (str "Bad attribute specification for " (pr-str {a {k v}}) ", expected one of " expected)
|
||||
{:error :schema/validation
|
||||
:attribute a
|
||||
:key k
|
||||
:value v}))))
|
||||
|
||||
#?(:clj
|
||||
(defn uuidish? [x]
|
||||
(instance? java.util.UUID x)))
|
||||
#?(:cljs
|
||||
(let [uuid-re (js/RegExp. "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" "i")]
|
||||
(defn uuidish? [x]
|
||||
(and (string? x)
|
||||
(re-find uuid-re x)))))
|
||||
|
||||
(def value-type-map
|
||||
{:db.type/ref { :valid? entid? }
|
||||
:db.type/keyword { :valid? keyword? }
|
||||
:db.type/string { :valid? string? }
|
||||
:db.type/boolean { :valid? #?(:clj #(instance? Boolean %) :cljs #(= js/Boolean (type %))) }
|
||||
:db.type/long { :valid? integer? }
|
||||
:db.type/uuid { :valid? uuidish? }
|
||||
:db.type/instant { :valid? #?(:clj #(instance? java.util.Date %) :cljs #(= js/Date (type %))) }
|
||||
:db.type/uri { :valid? #?(:clj #(instance? java.net.URI %) :cljs string?) }
|
||||
:db.type/double { :valid? #?(:clj float? :cljs number?) }
|
||||
})
|
||||
|
||||
(defn #?@(:clj [^Boolean ensure-value-matches-type]
|
||||
:cljs [^boolean ensure-value-matches-type]) [type value]
|
||||
(if-let [valid? (get-in value-type-map [type :valid?])]
|
||||
(when-not (valid? value)
|
||||
(raise "Invalid value for type " type "; got " value
|
||||
{:error :schema/valueType, :type type, :value value}))
|
||||
(raise "Unknown valueType " type ", expected one of " (sorted-set (keys value-type-map))
|
||||
{:error :schema/valueType, :type type})))
|
||||
|
||||
;; There's some duplication here so we get better error messages.
|
||||
(defn #?@(:clj [^Boolean ensure-valid-value]
|
||||
:cljs [^boolean ensure-valid-value]) [schema attr value]
|
||||
{:pre [(schema? schema)
|
||||
(integer? attr)]}
|
||||
(let [schema (.-schema schema)]
|
||||
(if-let [valueType (get-in schema [attr :db/valueType])]
|
||||
(if-let [valid? (get-in value-type-map [valueType :valid?])]
|
||||
(when-not (valid? value)
|
||||
(raise "Invalid value for attribute " attr ", expected " valueType " but got " value
|
||||
{:error :schema/valueType, :attribute attr, :value value}))
|
||||
(raise "Unknown valueType for attribute " attr ", expected one of " (sorted-set (keys value-type-map))
|
||||
{:error :schema/valueType, :attribute attr}))
|
||||
(raise "Unknown attribute " attr ", expected one of " (sorted-set (keys schema))
|
||||
{:error :schema/valueType, :attribute attr}))))
|
||||
|
||||
(defn ->SQLite [schema attr value]
|
||||
{:pre [(schema? schema)
|
||||
(integer? attr)]}
|
||||
(let [schema (.-schema schema)]
|
||||
(if-let [valueType (get-in schema [attr :db/valueType])]
|
||||
(if-let [valid? (get-in value-type-map [valueType :valid?])]
|
||||
(if (valid? value)
|
||||
[(sqlite-schema/->SQLite value) (sqlite-schema/->tag valueType)]
|
||||
(raise "Invalid value for attribute " attr ", expected " valueType " but got " value
|
||||
{:error :schema/valueType, :attribute attr, :value value}))
|
||||
(raise "Unknown valueType for attribute " attr ", expected one of " (sorted-set (keys value-type-map))
|
||||
{:error :schema/valueType, :attribute attr}))
|
||||
(raise "Unknown attribute " attr ", expected one of " (sorted-set (keys schema))
|
||||
{:error :schema/valueType, :attribute attr}))))
|
||||
|
||||
(defn <-SQLite [schema attr value]
|
||||
{:pre [(schema? schema)]}
|
||||
(let [schema (.-schema schema)]
|
||||
(if-let [valueType (get-in schema [attr :db/valueType])]
|
||||
(if (contains? value-type-map valueType)
|
||||
(sqlite-schema/<-SQLite valueType value)
|
||||
(raise "Unknown valueType for attribute " attr ", expected one of " (sorted-set (keys value-type-map))
|
||||
{:error :schema/valueType, :attribute attr}))
|
||||
(raise "Unknown attribute " attr ", expected one of " (sorted-set (keys schema))
|
||||
{:error :schema/valueType, :attribute attr}))))
|
||||
|
||||
(defn validate-schema [schema]
|
||||
(doseq [[a kv] schema]
|
||||
(when-not (:db/valueType kv)
|
||||
(throw (ex-info (str "Bad attribute specification for " a ": should have {:db/valueType ...}")
|
||||
{:error :schema/validation
|
||||
:attribute a
|
||||
:key :db/valueType})))
|
||||
(let [comp? (:db/isComponent kv false)]
|
||||
(validate-schema-key a :db/isComponent (:db/isComponent kv) #{true false})
|
||||
(when (and comp? (not= (:db/valueType kv) :db.type/ref))
|
||||
(throw (ex-info (str "Bad attribute specification for " a ": {:db/isComponent true} should also have {:db/valueType :db.type/ref}")
|
||||
{:error :schema/validation
|
||||
:attribute a
|
||||
:key :db/isComponent}))))
|
||||
(let [fulltext? (:db/fulltext kv false)]
|
||||
(validate-schema-key a :db/fulltext (:db/fulltext kv) #{true false})
|
||||
(when (and fulltext? (not= (:db/valueType kv) :db.type/string))
|
||||
(throw (ex-info (str "Bad attribute specification for " a ": {:db/fulltext true} should also have {:db/valueType :db.type/string}")
|
||||
{:error :schema/validation
|
||||
:attribute a
|
||||
:key :db/fulltext}))))
|
||||
(validate-schema-key a :db/unique (:db/unique kv) #{:db.unique/value :db.unique/identity})
|
||||
(validate-schema-key a :db/valueType (:db/valueType kv) (set (keys value-type-map)))
|
||||
(validate-schema-key a :db/cardinality (:db/cardinality kv) #{:db.cardinality/one :db.cardinality/many}))
|
||||
schema)
|
||||
|
||||
(defn schema [schema]
|
||||
{:pre [(or (nil? schema) (map? schema))]}
|
||||
(map->Schema {:schema (validate-schema schema)
|
||||
:rschema (rschema schema)}))
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.schema-changes
|
||||
(:require
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]))
|
||||
|
||||
(defn datoms->schema-fragment
|
||||
"Turn [[:db.part/db :db.install/attribute e] [e :db/ident :attr]] into {:attr {:db/* v}}.
|
||||
|
||||
From http://docs.datomic.com/schema.html, :db/ident, :db/valueType,
|
||||
and :db/cardinality are required. For us, enforce that valueType and
|
||||
cardinality are present at schema validation time.
|
||||
|
||||
This code is not pretty, so here's what it does:
|
||||
|
||||
Input: a sequence of datoms, like [e :keyword-attr v _ added].
|
||||
1. Select [:db.part/db :db.install/attribute ... ].
|
||||
2. Fail if any are not (= added true)
|
||||
3. For each [ :db.part/db :db.install/attribute e ], collect
|
||||
{e {:db/* v}}, dropping the inner :db/ident key.
|
||||
4. Map e -> ident; fail if not possible.
|
||||
5. Return the map, with ident keys.
|
||||
|
||||
This would be more pleasant with `q` and pull expressions.
|
||||
|
||||
Note that this function takes as input an existing map of {entid ident}.
|
||||
That's because it's possible for an ident to be established in a separate
|
||||
set of datoms -- we can't re-insert it without uniqueness constraint
|
||||
violations, so we just provide it here."
|
||||
|
||||
[datoms existing-idents]
|
||||
{:pre [(sequential? datoms)]}
|
||||
|
||||
(let [db-install? (fn [datom]
|
||||
(= [:db.part/db :db.install/attribute] ((juxt :e :a) datom)))
|
||||
db-installs (filter db-install? datoms)]
|
||||
(if (empty? db-installs)
|
||||
{}
|
||||
(if-let [retracted (first (filter (comp not :added) db-installs))]
|
||||
(raise "Retracting a :db.install/attribute is not yet supported, got " retracted
|
||||
{:error :schema/db-install :op retracted})
|
||||
(let [by-e (group-by :e datoms)
|
||||
|
||||
;; TODO: pull entity from database, rather than expecting entire attribute to be in single transaction.
|
||||
installed-es (select-keys by-e (map :v db-installs))
|
||||
;; select-keys ignores missing keys. We don't want that.
|
||||
installed-es (merge (into {} (map (juxt :v (constantly {})) db-installs)) installed-es)
|
||||
|
||||
db-*? (fn [datom]
|
||||
(= "db" (namespace (:a datom))))]
|
||||
|
||||
;; Just the :db/* attribute-value pairs.
|
||||
(into {} (for [[e datoms] installed-es]
|
||||
(let [->av (juxt :a :v)
|
||||
;; TODO: transduce!
|
||||
db-avs (into {} (map ->av (filter db-*? datoms)))]
|
||||
(if-let [ident (or (:db/ident db-avs)
|
||||
;; The schema table wants a keyword, not an entid, and
|
||||
;; we need to check the existing idents…
|
||||
(when (contains? existing-idents e)
|
||||
(if (keyword? e)
|
||||
e
|
||||
(get existing-idents e))))]
|
||||
[ident (dissoc db-avs :db/ident)]
|
||||
(raise ":db.install/attribute requires :db/ident, got " db-avs " for " e
|
||||
{:error :schema/db-install :op db-avs}))))))))))
|
|
@ -1,361 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.schema-management
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?? <?]]))
|
||||
(:require
|
||||
[clojure.data :refer [diff]]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?? <?]]])
|
||||
[clojure.set]
|
||||
[datomish.api :as d]
|
||||
[datomish.schema] ; For validation.
|
||||
[datomish.util :as util
|
||||
#?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]))
|
||||
|
||||
;; For testing.
|
||||
(def log (fn [& args]) #_ println)
|
||||
|
||||
;; This code implements the concept described in
|
||||
;; https://github.com/mozilla/datomish/wiki/Proposal:-application-schema-coordination-and-versioning
|
||||
;;
|
||||
;; This is a high-level API: it's built on top of the schema alteration
|
||||
;; primitives and plain ol' storage layer that Datomish exposes.
|
||||
;;
|
||||
;; Schema fragments are described by name and version number.
|
||||
;; The existing schema in the store is diffed against incoming fragments.
|
||||
;;
|
||||
;; Validation:
|
||||
;; - No attribute should be mentioned in a different fragment in the
|
||||
;; store and the input. Attributes cannot move between fragments.
|
||||
;; - Fragments with the same name and version should be congruent,
|
||||
;; with the only acceptable changes being to add new attributes.
|
||||
;; - Fragments with an increased version number can make changes to
|
||||
;; attributes:
|
||||
;; - Adding new attributes.
|
||||
;; - Rename existing attributes.
|
||||
;; - Altering cardinality, uniqueness, or indexing properties.
|
||||
;;
|
||||
;; The inputs to the diffing process are:
|
||||
;; - The set of schema fragments.
|
||||
;; - The existing database (and implicitly its active schema).
|
||||
;; - A collection of attribute renames.
|
||||
;; - A set of app- and fragment-scoped pre/post functions that will
|
||||
;; be run before and after schema changes are applied.
|
||||
;;
|
||||
;; The output of the diffing process, if validation succeeds, is a
|
||||
;; set of operations to perform on the knowledge base. If there are
|
||||
;; no version changes, no pre/post functions will be included.
|
||||
;;
|
||||
;; Potential outcomes:
|
||||
;; - An attribute is mentioned in two incoming fragments: error.
|
||||
;; - An attribute is in fragment A in the store and fragment B in input: error.
|
||||
;; - An attribute changed between the store and input, but the version number is the same: error.
|
||||
;; - An attribute is present in input, but not the store, and the version number is the same: add the attribute.
|
||||
;; - An attribute is present in the store, but not input, and the version number is the same: do nothing.
|
||||
;; - A fragment's version number is higher in the store than in the input:
|
||||
;; - If the input is a subset of the fragment in the store, then do nothing.
|
||||
;; - If the input differs, then error.
|
||||
;; - A fragment's version number is higher in the input than in the store:
|
||||
;; - Run app 'pre' and 'post'.
|
||||
;; - Run this fragment's 'pre' and 'post'.
|
||||
;; - Alter the store to match. If altering fails due to a consistency error, roll back and error out.
|
||||
;;
|
||||
;; The core data format here, which we call a "managed schema fragment" is:
|
||||
;;
|
||||
;; {:name :org.mozilla.foo
|
||||
;; :version 4
|
||||
;; :attributes {:foo/bar {:db/valueType ...}}}
|
||||
;;
|
||||
;; This can be trivially expanded from the 'simple schema' format used by
|
||||
;; JS callers:
|
||||
;;
|
||||
;; {"name": "org.mozilla.foo",
|
||||
;; "version": 4,
|
||||
;; "attributes": [
|
||||
;; {"name": "foo/bar",
|
||||
;; "type": ...}]}
|
||||
;;
|
||||
;; and it can be trivially collapsed into the format understood by the
|
||||
;; transactor, which we call "schema datoms":
|
||||
;;
|
||||
;; [{:db/ident :org.mozilla.foo
|
||||
;; :db.schema/version 4}
|
||||
;; {:db/name :foo/bar
|
||||
;; :db.schema/_attribute :org.mozilla.foo
|
||||
;; :db/valueType ...}]
|
||||
|
||||
(defn- attribute->datoms [schema-fragment-id [attribute-name attribute-pairs]]
|
||||
(let [attribute-id (d/id-literal :db.part/user)]
|
||||
[(assoc
|
||||
attribute-pairs
|
||||
:db.install/_attribute :db.part/db
|
||||
;; Point back to the fragment.
|
||||
:db.schema/_attribute schema-fragment-id
|
||||
:db/id attribute-id
|
||||
:db/ident attribute-name)]))
|
||||
|
||||
(defn managed-schema-fragment->datoms [{:keys [name version attributes]}]
|
||||
(let [fragment-id (d/id-literal :db.part/db)]
|
||||
(conj
|
||||
(mapcat (partial attribute->datoms fragment-id) attributes)
|
||||
{:db/id fragment-id
|
||||
:db/ident name
|
||||
:db.schema/version version})))
|
||||
|
||||
(defn <collect-schema-fragment-versions
|
||||
"Return a map, like {:org.mozilla.foo 5, :org.mozilla.core 2}."
|
||||
[db]
|
||||
(let [ident (partial d/ident db)]
|
||||
(go-pair
|
||||
(into {}
|
||||
(map
|
||||
(fn [[s v]] [(ident s) v])
|
||||
(<?
|
||||
(d/<q db '[:find ?s ?v
|
||||
:in $
|
||||
:where [?s :db.schema/version ?v]])))))))
|
||||
|
||||
(defn <collect-schema-fragment-attributes
|
||||
"Return a map, like {:foo/name :org.mozilla.foo}.
|
||||
Attributes that are not linked to a fragment will not be returned."
|
||||
[db]
|
||||
(let [ident (partial d/ident db)]
|
||||
(go-pair
|
||||
(into {}
|
||||
(map
|
||||
(fn [[a f]] [(ident a) (ident f)])
|
||||
(<?
|
||||
(d/<q db '[:find ?a ?f
|
||||
:in $
|
||||
:where [?f :db.schema/attribute ?a]])))))))
|
||||
|
||||
(defn db->symbolic-schema [db]
|
||||
(:symbolic-schema db))
|
||||
|
||||
(defn changed-attribute->datoms [schema-fragment-id attribute-name existing new-values]
|
||||
(let [differences (first (diff new-values existing))]
|
||||
(when-not (empty? differences)
|
||||
[(merge
|
||||
{:db/id (d/id-literal :db.part/user)
|
||||
:db/ident attribute-name
|
||||
;; Point back to the fragment.
|
||||
:db.schema/_attribute schema-fragment-id
|
||||
:db.alter/_attribute :db.part/db}
|
||||
differences)])))
|
||||
|
||||
(defn changed-schema-fragment->datoms [schema-fragment-id existing-schema name attributes version]
|
||||
(conj
|
||||
(mapcat (fn [[attribute-name new-values]]
|
||||
(let [existing (get existing-schema attribute-name)]
|
||||
(if existing
|
||||
(changed-attribute->datoms
|
||||
schema-fragment-id
|
||||
attribute-name
|
||||
existing
|
||||
new-values)
|
||||
(attribute->datoms
|
||||
schema-fragment-id [attribute-name new-values]))))
|
||||
attributes)
|
||||
{:db.schema/version version
|
||||
:db/ident name
|
||||
:db/id (d/id-literal :db.part/db)}))
|
||||
|
||||
(defn- prepare-schema-application-for-fragments
|
||||
"Given a non-empty collection of fragments known to be new or outdated,
|
||||
yield a migration sequence containing the necessary pre/post ops and
|
||||
transact bodies."
|
||||
[db
|
||||
symbolic-schema
|
||||
schema-fragment-versions
|
||||
schema-fragment-attributes
|
||||
{:keys [fragments pre post fragment-pre fragment-post] :as args}]
|
||||
|
||||
(when-let
|
||||
[body
|
||||
(mapcat
|
||||
(fn [{:keys [name version attributes] :as fragment}]
|
||||
(let [existing-version (get schema-fragment-versions name)
|
||||
|
||||
datoms
|
||||
[[:transact
|
||||
(if existing-version
|
||||
;; It's a change.
|
||||
;; Transact the datoms to effect the change and
|
||||
;; bump the schema fragment version.
|
||||
(changed-schema-fragment->datoms
|
||||
(d/entid db name)
|
||||
symbolic-schema
|
||||
name
|
||||
attributes
|
||||
version)
|
||||
|
||||
;; It's new! Just do it.
|
||||
(managed-schema-fragment->datoms fragment))]]]
|
||||
|
||||
;; We optionally allow you to provide a `:none` migration here, which
|
||||
;; is useful in the case where a vocabulary might have been added
|
||||
;; outside of the schema management system.
|
||||
(concat
|
||||
(when-let [fragment-pre-for-this
|
||||
(get-in fragment-pre [name (or existing-version :none)])]
|
||||
[[:call fragment-pre-for-this]])
|
||||
datoms
|
||||
(when-let [fragment-post-for-this
|
||||
(get-in fragment-post [name (or existing-version :none)])]
|
||||
[[:call fragment-post-for-this]]))))
|
||||
|
||||
fragments)]
|
||||
|
||||
(concat
|
||||
(when pre [[:call pre]])
|
||||
body
|
||||
(when post [[:call post]]))))
|
||||
|
||||
(defn- <prepare-schema-application*
|
||||
[db {:keys [fragments pre post fragment-pre fragment-post] :as args}]
|
||||
{:pre [(map? (first fragments))]}
|
||||
(go-pair
|
||||
(let [symbolic-schema (db->symbolic-schema db)
|
||||
schema-fragment-versions (<? (<collect-schema-fragment-versions db))
|
||||
schema-fragment-attributes (<? (<collect-schema-fragment-attributes db))]
|
||||
|
||||
;; Filter out any incoming fragments that are already present with
|
||||
;; the correct version. Err if any of the fragments are outdated,
|
||||
;; or contain attributes that are already present elsewhere.
|
||||
(let [to-apply
|
||||
(filter
|
||||
(fn [{:keys [name version attributes]}]
|
||||
{:pre [(not (nil? name))
|
||||
(integer? version)
|
||||
(not (empty? attributes))]}
|
||||
|
||||
;; Make sure that every attribute in this fragment is either
|
||||
;; already associate with its ident, or not associated with
|
||||
;; anything. We do this before we even check the version.
|
||||
(doseq [attribute-name (keys attributes)]
|
||||
(when-let [existing-fragment (get schema-fragment-attributes attribute-name)]
|
||||
(when-not (= existing-fragment name)
|
||||
(raise "Attribute " attribute-name
|
||||
" already belongs to schema fragment "
|
||||
existing-fragment ", not " name "."
|
||||
{:error :schema/different-fragment
|
||||
:existing existing-fragment
|
||||
:fragment name}))))
|
||||
|
||||
;; Now we know that every attribute is either this fragment's or
|
||||
;; not assigned to one.
|
||||
;; Check our fragment version against the store.
|
||||
(let [existing-version (get schema-fragment-versions name)]
|
||||
(log "Schema" name "at existing version" existing-version)
|
||||
(or (nil? existing-version)
|
||||
(> version existing-version)
|
||||
(when (< version existing-version)
|
||||
(raise "Existing version of " name " is " existing-version
|
||||
", which is later than requested " version "."
|
||||
{:error :schema/outdated-version
|
||||
:name name
|
||||
:version version
|
||||
:existing existing-version})))))
|
||||
fragments)]
|
||||
|
||||
(if (empty? to-apply)
|
||||
(do
|
||||
(log "No fragments left to apply.")
|
||||
nil)
|
||||
(prepare-schema-application-for-fragments
|
||||
db
|
||||
symbolic-schema
|
||||
schema-fragment-versions
|
||||
schema-fragment-attributes
|
||||
(assoc args :fragments to-apply)))))))
|
||||
|
||||
(defn <prepare-schema-application
|
||||
"Take a database and a sequence of managed schema fragments,
|
||||
along with migration tools, and return a migration operation."
|
||||
[db {:keys [fragments pre post fragment-pre fragment-post] :as args}]
|
||||
(when-not (contains? args :fragments)
|
||||
(raise-str "Missing :fragments argument to <prepare-schema-application."))
|
||||
(if (empty? fragments)
|
||||
(go-pair nil)
|
||||
(do
|
||||
;; Validate each fragment.
|
||||
(doseq [fragment fragments]
|
||||
(datomish.schema/validate-schema (:attributes fragment)))
|
||||
(let [repeated-attributes (util/repeated-keys (map :attributes fragments))]
|
||||
(when-not (empty? repeated-attributes)
|
||||
(raise "Attributes appear in more than one fragment: " repeated-attributes
|
||||
{:error :schema/repeated-attributes
|
||||
:repeated repeated-attributes}))
|
||||
|
||||
;; At this point we know we have schema fragments to apply,
|
||||
;; and that they don't overlap. They might still cross fragment
|
||||
;; boundaries when compared to the store, and they might still
|
||||
;; be inconsistent, but we can proceed to the next step.
|
||||
(<prepare-schema-application* db args)))))
|
||||
|
||||
(defn- <schema-fragment-versions-match?
|
||||
"Quickly return true if every provided fragment matches the
|
||||
version in the store."
|
||||
[db fragments]
|
||||
(go-pair
|
||||
(let [schema-fragment-versions (<? (<collect-schema-fragment-versions db))]
|
||||
(every?
|
||||
(fn [{:keys [name version]}]
|
||||
(= (get schema-fragment-versions name)
|
||||
version))
|
||||
fragments))))
|
||||
|
||||
(defn <apply-schema-alteration
|
||||
"Take a database and a sequence of managed schema fragments,
|
||||
along with migration tools, and transact a migration operation.
|
||||
Throws and rolls back if any step of the operation fails. Returns
|
||||
nil if no work was done, or the last db-report otherwise."
|
||||
[conn args]
|
||||
(go-pair
|
||||
(if (or (empty? (:fragments args))
|
||||
(<? (<schema-fragment-versions-match?
|
||||
(d/db conn)
|
||||
(:fragments args))))
|
||||
(log "No schema work to do.")
|
||||
|
||||
;; Do the real fragment op computation inside the transaction.
|
||||
;; This avoids a check-then-write race.
|
||||
(<?
|
||||
(d/<transact!
|
||||
conn
|
||||
(fn [db do-transact]
|
||||
(go-pair
|
||||
(let [last-report (atom {:db-after db})
|
||||
ops (<? (<prepare-schema-application db args))]
|
||||
(doseq [[op op-arg] ops]
|
||||
(case op
|
||||
:transact
|
||||
(reset! last-report
|
||||
(<? (do-transact
|
||||
(:db-after @last-report)
|
||||
op-arg)))
|
||||
|
||||
:call
|
||||
;; We use <?? so that callers don't accidentally
|
||||
;; break us if they pass a function that returns
|
||||
;; nil rather than a *channel* that returns nil.
|
||||
(when-let [new-report
|
||||
(<?? (op-arg (:db-after @last-report)
|
||||
do-transact))]
|
||||
(when-not (:db-after new-report)
|
||||
(raise "Function didn't return a valid report."
|
||||
{:error :schema/invalid-report
|
||||
:function op-arg
|
||||
:returned new-report}))
|
||||
|
||||
(reset! last-report new-report))))
|
||||
@last-report))))))))
|
|
@ -1,72 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.simple-schema
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[clojure.set]
|
||||
[datomish.util :as util
|
||||
#?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.db :as db]
|
||||
[datomish.schema :as ds]
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.sqlite-schema :as sqlite-schema]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]])))
|
||||
|
||||
(defn- name->ident [name]
|
||||
(when-not (and (string? name)
|
||||
(not (empty? name)))
|
||||
(raise "Invalid name " name {:error :invalid-name :name name}))
|
||||
(keyword name))
|
||||
|
||||
(defn simple-schema-attributes->schema-parts [attrs]
|
||||
(let [{:keys [cardinality type name unique doc fulltext]} attrs
|
||||
value-type (when type (keyword (str "db.type/" type)))]
|
||||
|
||||
(when-not (and value-type
|
||||
(contains? ds/value-type-map value-type))
|
||||
(raise "Invalid type " type {:error :invalid-type :type type}))
|
||||
|
||||
(let [unique
|
||||
(case unique
|
||||
"identity" :db.unique/identity
|
||||
"value" :db.unique/value
|
||||
nil nil
|
||||
(raise "Invalid unique " unique
|
||||
{:error :invalid-unique :unique unique}))
|
||||
|
||||
cardinality
|
||||
(case cardinality
|
||||
"one" :db.cardinality/one
|
||||
"many" :db.cardinality/many
|
||||
nil nil
|
||||
(raise "Invalid cardinality " cardinality
|
||||
{:error :invalid-cardinality :cardinality cardinality}))]
|
||||
|
||||
(util/assoc-if
|
||||
{:db/valueType value-type
|
||||
:db/ident (name->ident name)
|
||||
:db/id (db/id-literal :db.part/user)
|
||||
:db.install/_attribute :db.part/db}
|
||||
:db/doc doc
|
||||
:db/unique unique
|
||||
:db/fulltext fulltext
|
||||
:db/cardinality cardinality))))
|
||||
|
||||
(defn simple-schema->schema [simple-schema]
|
||||
(let [{:keys [name attributes]} simple-schema]
|
||||
(map simple-schema-attributes->schema-parts attributes)))
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.sqlite
|
||||
(:refer-clojure :exclude [format])
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair go-safely <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
#?(:clj
|
||||
(:require
|
||||
[honeysql.core]
|
||||
[datomish.pair-chan :refer [go-pair go-safely <?]]
|
||||
[clojure.core.async :refer [go <! >! chan put! take! close!]])
|
||||
:cljs
|
||||
(:require
|
||||
[honeysql.core]
|
||||
[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [<! >! chan put! take! close!]])))
|
||||
|
||||
;; Setting this to something else will make your output more readable,
|
||||
;; but not automatically safe for use.
|
||||
(def sql-quoting-style :ansi)
|
||||
|
||||
(def log-sql? false)
|
||||
|
||||
(defn format [args]
|
||||
(honeysql.core/format args :quoting sql-quoting-style))
|
||||
|
||||
(defprotocol ISQLiteConnection
|
||||
(-execute!
|
||||
[db sql bindings]
|
||||
"Execute the given SQL string with the specified bindings. Returns a pair channel resolving
|
||||
to a query dependent `[result error]` pair.")
|
||||
|
||||
(-each
|
||||
[db sql bindings row-cb]
|
||||
"Execute the given SQL string with the specified bindings, invoking the given `row-cb` callback
|
||||
function (if provided) with each returned row. Each row will be presented to `row-cb` as a
|
||||
map-like object, such that `(:column-name row)` succeeds. Returns a pair channel of `[result
|
||||
error]`, where `result` to the number of rows returned.")
|
||||
|
||||
(close
|
||||
[db]
|
||||
"Close this SQLite connection. Returns a pair channel of [nil error]."))
|
||||
|
||||
(defprotocol ISQLiteConnectionFactory
|
||||
(<sqlite-connection
|
||||
[path]
|
||||
"Open an ISQLiteConnection to `path`. Returns a pair channel of `[sqlite-connection error]`"))
|
||||
|
||||
(defn execute!
|
||||
[db [sql & bindings]]
|
||||
(when log-sql?
|
||||
(println "Running SQL:" sql (pr-str bindings)))
|
||||
(-execute! db sql bindings))
|
||||
|
||||
(defn each-row
|
||||
[db [sql & bindings] row-cb]
|
||||
(-each db sql bindings row-cb))
|
||||
|
||||
(defn reduce-rows
|
||||
[db [sql & bindings] initial f]
|
||||
(let [acc (atom initial)]
|
||||
(go-pair
|
||||
(<? (-each db sql bindings #(swap! acc f %)))
|
||||
@acc)))
|
||||
|
||||
(defn <?all-rows
|
||||
"Takes a new channel, put!ing rows as [row err] pairs
|
||||
into it as they arrive from storage. Closes the channel
|
||||
when no more results exist. Consume with <?."
|
||||
[db [sql & bindings :as rest] chan]
|
||||
(go-safely [c chan]
|
||||
(let [result (<! (-each db sql bindings
|
||||
(fn [row]
|
||||
(put! c [row nil]))))]
|
||||
;; We assume that a failure will result in the promise
|
||||
;; channel being rejected and no further row callbacks
|
||||
;; being called.
|
||||
(when (second result)
|
||||
(put! c result))
|
||||
(close! c))))
|
||||
|
||||
(defn all-rows
|
||||
[db [sql & bindings :as rest]]
|
||||
(reduce-rows db rest [] conj))
|
||||
|
||||
(defn in-transaction! [db chan-fn]
|
||||
(go
|
||||
(try
|
||||
(<? (execute! db ["BEGIN EXCLUSIVE TRANSACTION"]))
|
||||
(let [[v e] (<! (chan-fn))]
|
||||
(if v
|
||||
(do
|
||||
(<? (execute! db ["COMMIT"]))
|
||||
[v nil])
|
||||
(do
|
||||
(<? (execute! db ["ROLLBACK TRANSACTION"]))
|
||||
[nil e])))
|
||||
(catch #?(:clj Throwable :cljs js/Error) e
|
||||
[nil e]))))
|
||||
|
||||
(defn get-user-version [db]
|
||||
(go-pair
|
||||
(let [row (first (<? (all-rows db ["PRAGMA user_version"])))]
|
||||
(or
|
||||
(:user_version row)
|
||||
0))))
|
||||
|
||||
(defn set-user-version [db version]
|
||||
(execute! db [(str "PRAGMA user_version = " version)]))
|
|
@ -1,318 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.sqlite-schema
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.sqlite :as s]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :refer [go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [<! >!]]])))
|
||||
|
||||
;; Version history:
|
||||
;; 1: initial schema.
|
||||
;; 2: added :db.schema/version and /attribute in bootstrap; assigned
|
||||
;; idents 36 and 37, so we bump the part range here; tie bootstrapping
|
||||
;; to the SQLite user_version.
|
||||
|
||||
(def current-version 2)
|
||||
|
||||
(def v1-statements
|
||||
["CREATE TABLE datoms (e INTEGER NOT NULL, a SMALLINT NOT NULL, v BLOB NOT NULL, tx INTEGER NOT NULL,
|
||||
value_type_tag SMALLINT NOT NULL,
|
||||
index_avet TINYINT NOT NULL DEFAULT 0, index_vaet TINYINT NOT NULL DEFAULT 0,
|
||||
index_fulltext TINYINT NOT NULL DEFAULT 0,
|
||||
unique_value TINYINT NOT NULL DEFAULT 0)"
|
||||
"CREATE UNIQUE INDEX idx_datoms_eavt ON datoms (e, a, value_type_tag, v)"
|
||||
"CREATE UNIQUE INDEX idx_datoms_aevt ON datoms (a, e, value_type_tag, v)"
|
||||
|
||||
;; Opt-in index: only if a has :db/index true.
|
||||
"CREATE UNIQUE INDEX idx_datoms_avet ON datoms (a, value_type_tag, v, e) WHERE index_avet IS NOT 0"
|
||||
|
||||
;; Opt-in index: only if a has :db/valueType :db.type/ref. No need for tag here since all
|
||||
;; indexed elements are refs.
|
||||
"CREATE UNIQUE INDEX idx_datoms_vaet ON datoms (v, a, e) WHERE index_vaet IS NOT 0"
|
||||
|
||||
;; Opt-in index: only if a has :db/fulltext true; thus, it has :db/valueType :db.type/string,
|
||||
;; which is not :db/valueType :db.type/ref. That is, index_vaet and index_fulltext are mutually
|
||||
;; exclusive.
|
||||
"CREATE INDEX idx_datoms_fulltext ON datoms (value_type_tag, v, a, e) WHERE index_fulltext IS NOT 0"
|
||||
|
||||
;; TODO: possibly remove this index. :db.unique/{value,identity} should be asserted by the
|
||||
;; transactor in all cases, but the index may speed up some of SQLite's query planning. For now,
|
||||
;; it serves to validate the transactor implementation. Note that tag is needed here to
|
||||
;; differentiate, e.g., keywords and strings.
|
||||
"CREATE UNIQUE INDEX idx_datoms_unique_value ON datoms (a, value_type_tag, v) WHERE unique_value IS NOT 0"
|
||||
|
||||
"CREATE TABLE transactions (e INTEGER NOT NULL, a SMALLINT NOT NULL, v BLOB NOT NULL, tx INTEGER NOT NULL, added TINYINT NOT NULL DEFAULT 1, value_type_tag SMALLINT NOT NULL)"
|
||||
"CREATE INDEX idx_transactions_tx ON transactions (tx, added)"
|
||||
|
||||
;; Fulltext indexing.
|
||||
;; A fulltext indexed value v is an integer rowid referencing fulltext_values.
|
||||
|
||||
;; Optional settings:
|
||||
;; tokenize="porter"
|
||||
;; prefix='2,3'
|
||||
;; By default we use Unicode-aware tokenizing (particularly for case folding), but preserve
|
||||
;; diacritics.
|
||||
"CREATE VIRTUAL TABLE fulltext_values
|
||||
USING FTS4 (text NOT NULL, searchid INT, tokenize=unicode61 \"remove_diacritics=0\")"
|
||||
|
||||
;; This combination of view and triggers allows you to transparently
|
||||
;; update-or-insert into FTS. Just INSERT INTO fulltext_values_view (text, searchid).
|
||||
"CREATE VIEW fulltext_values_view AS SELECT * FROM fulltext_values"
|
||||
"CREATE TRIGGER replace_fulltext_searchid
|
||||
INSTEAD OF INSERT ON fulltext_values_view
|
||||
WHEN EXISTS (SELECT 1 FROM fulltext_values WHERE text = new.text)
|
||||
BEGIN
|
||||
UPDATE fulltext_values SET searchid = new.searchid WHERE text = new.text;
|
||||
END"
|
||||
"CREATE TRIGGER insert_fulltext_searchid
|
||||
INSTEAD OF INSERT ON fulltext_values_view
|
||||
WHEN NOT EXISTS (SELECT 1 FROM fulltext_values WHERE text = new.text)
|
||||
BEGIN
|
||||
INSERT INTO fulltext_values (text, searchid) VALUES (new.text, new.searchid);
|
||||
END"
|
||||
|
||||
;; A view transparently interpolating fulltext indexed values into the datom structure.
|
||||
"CREATE VIEW fulltext_datoms AS
|
||||
SELECT e, a, fulltext_values.text AS v, tx, value_type_tag, index_avet, index_vaet, index_fulltext, unique_value
|
||||
FROM datoms, fulltext_values
|
||||
WHERE datoms.index_fulltext IS NOT 0 AND datoms.v = fulltext_values.rowid"
|
||||
|
||||
;; A view transparently interpolating all entities (fulltext and non-fulltext) into the datom structure.
|
||||
"CREATE VIEW all_datoms AS
|
||||
SELECT e, a, v, tx, value_type_tag, index_avet, index_vaet, index_fulltext, unique_value
|
||||
FROM datoms
|
||||
WHERE index_fulltext IS 0
|
||||
UNION ALL
|
||||
SELECT e, a, v, tx, value_type_tag, index_avet, index_vaet, index_fulltext, unique_value
|
||||
FROM fulltext_datoms"
|
||||
|
||||
;; Materialized views of the schema.
|
||||
"CREATE TABLE idents (ident TEXT NOT NULL PRIMARY KEY, entid INTEGER UNIQUE NOT NULL)"
|
||||
"CREATE TABLE schema (ident TEXT NOT NULL, attr TEXT NOT NULL, value BLOB NOT NULL, value_type_tag SMALLINT NOT NULL,
|
||||
FOREIGN KEY (ident) REFERENCES idents (ident))"
|
||||
"CREATE INDEX idx_schema_unique ON schema (ident, attr, value, value_type_tag)"
|
||||
"CREATE TABLE parts (part TEXT NOT NULL PRIMARY KEY, start INTEGER NOT NULL, idx INTEGER NOT NULL)"
|
||||
])
|
||||
|
||||
(def v2-statements v1-statements)
|
||||
|
||||
(defn create-temp-tx-lookup-statement [table-name]
|
||||
;; n.b., v0/value_type_tag0 can be NULL, in which case we look up v from datoms;
|
||||
;; and the datom columns are NULL into the LEFT JOIN fills them in.
|
||||
;; The table-name is not escaped in any way, in order to allow "temp.dotted" names.
|
||||
;; TODO: update comment about sv.
|
||||
[(str "CREATE TABLE IF NOT EXISTS " table-name
|
||||
" (e0 INTEGER NOT NULL, a0 SMALLINT NOT NULL, v0 BLOB NOT NULL, tx0 INTEGER NOT NULL, added0 TINYINT NOT NULL,
|
||||
value_type_tag0 SMALLINT NOT NULL,
|
||||
index_avet0 TINYINT, index_vaet0 TINYINT,
|
||||
index_fulltext0 TINYINT,
|
||||
unique_value0 TINYINT,
|
||||
sv BLOB,
|
||||
svalue_type_tag SMALLINT,
|
||||
rid INTEGER,
|
||||
e INTEGER, a SMALLINT, v BLOB, tx INTEGER, value_type_tag SMALLINT)")])
|
||||
|
||||
(defn create-temp-tx-lookup-eavt-statement [idx-name table-name]
|
||||
;; Note that the consuming code creates and drops the indexes
|
||||
;; manually, which makes insertion slightly faster.
|
||||
;; This index prevents overlapping transactions.
|
||||
;; The idx-name and table-name are not escaped in any way, in order
|
||||
;; to allow "temp.dotted" names.
|
||||
;; TODO: drop added0?
|
||||
[(str "CREATE UNIQUE INDEX IF NOT EXISTS "
|
||||
idx-name
|
||||
" ON "
|
||||
table-name
|
||||
" (e0, a0, v0, added0, value_type_tag0) WHERE sv IS NOT NULL")])
|
||||
|
||||
(defn <create-current-version
|
||||
[db bootstrapper]
|
||||
(println "Creating database at" current-version)
|
||||
(s/in-transaction!
|
||||
db
|
||||
#(go-pair
|
||||
(doseq [statement v2-statements]
|
||||
(try
|
||||
(<? (s/execute! db [statement]))
|
||||
(catch #?(:clj Throwable :cljs js/Error) e
|
||||
(throw (ex-info "Failed to execute statement" {:statement statement} e)))))
|
||||
(<? (bootstrapper db 0))
|
||||
(<? (s/set-user-version db current-version))
|
||||
[0 (<? (s/get-user-version db))])))
|
||||
|
||||
(defn <update-from-version
|
||||
[db from-version bootstrapper]
|
||||
{:pre [(> from-version 0)]} ;; Or we'd create-current-version instead.
|
||||
{:pre [(< from-version current-version)]} ;; Or we wouldn't need to update-from-version.
|
||||
(println "Upgrading database from" from-version "to" current-version)
|
||||
(s/in-transaction!
|
||||
db
|
||||
#(go-pair
|
||||
;; We must only be migrating from v1 to v2.
|
||||
(let [statement "UPDATE parts SET idx = idx + 2 WHERE part = ?"]
|
||||
(try
|
||||
(<? (s/execute!
|
||||
db
|
||||
[statement :db.part/db]))
|
||||
(catch #?(:clj Throwable :cljs js/Error) e
|
||||
(throw (ex-info "Failed to execute statement" {:statement statement} e)))))
|
||||
(<? (bootstrapper db from-version))
|
||||
(<? (s/set-user-version db current-version))
|
||||
[from-version (<? (s/get-user-version db))])))
|
||||
|
||||
(defn <ensure-current-version
|
||||
"Returns a pair: [previous-version current-version]."
|
||||
[db bootstrapper]
|
||||
(go-pair
|
||||
(let [v (<? (s/get-user-version db))]
|
||||
(cond
|
||||
(= v current-version)
|
||||
[v v]
|
||||
|
||||
(= v 0)
|
||||
(<? (<create-current-version db bootstrapper))
|
||||
|
||||
(< v current-version)
|
||||
(<? (<update-from-version db v bootstrapper))))))
|
||||
|
||||
;; This is close to the SQLite schema since it may impact the value tag bit.
|
||||
(defprotocol IEncodeSQLite
|
||||
(->SQLite [x] "Transforms Clojure{Script} values to SQLite."))
|
||||
|
||||
(extend-protocol IEncodeSQLite
|
||||
#?@(:clj
|
||||
[String
|
||||
(->SQLite [x] x)
|
||||
|
||||
clojure.lang.Keyword
|
||||
(->SQLite [x] (str x))
|
||||
|
||||
Boolean
|
||||
(->SQLite [x] (if x 1 0))
|
||||
|
||||
Integer
|
||||
(->SQLite [x] x)
|
||||
|
||||
Long
|
||||
(->SQLite [x] x)
|
||||
|
||||
java.util.Date
|
||||
(->SQLite [x] (.getTime x))
|
||||
|
||||
java.util.UUID
|
||||
(->SQLite [x] (.toString x)) ; TODO: BLOB storage. Issue #44.
|
||||
|
||||
Float
|
||||
(->SQLite [x] x)
|
||||
|
||||
Double
|
||||
(->SQLite [x] x)]
|
||||
|
||||
:cljs
|
||||
[string
|
||||
(->SQLite [x] x)
|
||||
|
||||
Keyword
|
||||
(->SQLite [x] (str x))
|
||||
|
||||
boolean
|
||||
(->SQLite [x] (if x 1 0))
|
||||
|
||||
js/Date
|
||||
(->SQLite [x] (.getTime x))
|
||||
|
||||
number
|
||||
(->SQLite [x] x)]))
|
||||
|
||||
;; Datomish rows are tagged with a numeric representation of :db/valueType:
|
||||
;; The tag is used to limit queries, and therefore is placed carefully in the relevant indices to
|
||||
;; allow searching numeric longs and doubles quickly. The tag is also used to convert SQLite values
|
||||
;; to the correct Datomish value type on query egress.
|
||||
(def value-type-tag-map
|
||||
{:db.type/ref 0
|
||||
:db.type/boolean 1
|
||||
:db.type/instant 4
|
||||
:db.type/long 5 ;; SQLite distinguishes integral from decimal types, allowing long and double to share a tag.
|
||||
:db.type/double 5 ;; SQLite distinguishes integral from decimal types, allowing long and double to share a tag.
|
||||
:db.type/string 10
|
||||
:db.type/uuid 11
|
||||
:db.type/uri 12
|
||||
:db.type/keyword 13})
|
||||
|
||||
(defn ->tag [valueType]
|
||||
(or
|
||||
(valueType value-type-tag-map)
|
||||
(raise "Unknown valueType " valueType ", expected one of " (sorted-set (keys value-type-tag-map))
|
||||
{:error :SQLite/tag, :valueType valueType})))
|
||||
|
||||
#?(:clj
|
||||
(defn <-tagged-SQLite
|
||||
"Transforms SQLite values to Clojure with tag awareness."
|
||||
[tag value]
|
||||
(case tag
|
||||
;; In approximate commonality order.
|
||||
0 value ; ref.
|
||||
1 (= value 1) ; boolean
|
||||
4 (java.util.Date. value) ; instant
|
||||
13 (keyword (subs value 1)) ; keyword
|
||||
12 (java.net.URI. value) ; URI
|
||||
11 (java.util.UUID/fromString value) ; UUID
|
||||
; 5 value ; numeric
|
||||
; 10 value ; string
|
||||
value
|
||||
)))
|
||||
|
||||
#?(:cljs
|
||||
(defn <-tagged-SQLite
|
||||
"Transforms SQLite values to ClojureScript with tag awareness."
|
||||
[tag value]
|
||||
;; In approximate commonality order.
|
||||
(case tag
|
||||
0 value ; ref.
|
||||
1 (= value 1) ; boolean
|
||||
4 (js/Date. value) ; instant
|
||||
13 (keyword (subs value 1)) ; keyword
|
||||
; 12 value ; URI
|
||||
; 11 value ; UUID
|
||||
; 5 value ; numeric
|
||||
; 10 value ; string
|
||||
value
|
||||
)))
|
||||
|
||||
(defn tagged-SQLite-to-JS
|
||||
"Transforms SQLite values to JavaScript-compatible values."
|
||||
[tag value]
|
||||
(case tag
|
||||
1 (= value 1) ; boolean.
|
||||
; 0 value ; No point trying to ident.
|
||||
; 4 value ; JS doesn't have a Date representation.
|
||||
; 13 value ; Return the keyword string from the DB: ":foobar".
|
||||
value))
|
||||
|
||||
(defn <-SQLite
|
||||
"Transforms SQLite values to Clojure{Script}."
|
||||
[valueType value]
|
||||
(case valueType
|
||||
:db.type/ref value
|
||||
:db.type/keyword (keyword (subs value 1))
|
||||
:db.type/string value
|
||||
:db.type/boolean (not= value 0)
|
||||
:db.type/long value
|
||||
:db.type/instant (<-tagged-SQLite 4 value)
|
||||
:db.type/uuid (<-tagged-SQLite 11 value)
|
||||
:db.type/double value))
|
|
@ -1,931 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.transact
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go go-loop]]))
|
||||
(:require
|
||||
[datomish.query.context :as context]
|
||||
[datomish.query.projection :as projection]
|
||||
[datomish.query.source :as source]
|
||||
[datomish.query :as query]
|
||||
[datomish.db :as db :refer [id-literal id-literal?]]
|
||||
[datomish.db.debug :as debug]
|
||||
[datomish.datom :as dd :refer [datom datom? #?@(:cljs [Datom])]]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.schema :as ds]
|
||||
[datomish.schema-changes]
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.sqlite-schema :as sqlite-schema]
|
||||
[datomish.transact.bootstrap :as bootstrap]
|
||||
[datomish.transact.explode :as explode]
|
||||
[datomish.tufte-stub :as tufte
|
||||
#?(:cljs :refer-macros :clj :refer) [p]]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go go-loop <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]]))
|
||||
#?(:clj
|
||||
(:import
|
||||
[datomish.datom Datom])))
|
||||
|
||||
(defprotocol IConnection
|
||||
(close
|
||||
[conn]
|
||||
"Close this connection. Returns a pair channel of [nil error].
|
||||
|
||||
Closing a closed connection is a no-op.")
|
||||
|
||||
(db
|
||||
[conn]
|
||||
"Get the current DB associated with this connection.")
|
||||
|
||||
(history
|
||||
[conn]
|
||||
"Get the full transaction history DB associated with this connection."))
|
||||
|
||||
(defrecord Connection [closed? current-db transact-chan]
|
||||
IConnection
|
||||
(close [conn]
|
||||
(go-pair ;; Always want to return a pair-chan.
|
||||
(when (compare-and-set! (:closed? conn) false true)
|
||||
(let [result (a/chan 1)]
|
||||
;; Ask for the underlying database to be closed while (usually, after) draining the queue.
|
||||
;; Invariant: we see :sentinel-close in the transactor queue at most once.
|
||||
(a/put! (:transact-chan conn) [:sentinel-close nil result true])
|
||||
;; This immediately stops <transact! enqueueing new transactions.
|
||||
(a/close! (:transact-chan conn))
|
||||
;; The transactor will close the underlying DB after draining the queue; by waiting for
|
||||
;; result, we can raise any error from closing the DB and ensure that the DB is really
|
||||
;; closed after waiting for the connection to close.
|
||||
(<? result)))))
|
||||
|
||||
(db [conn] @(:current-db conn))
|
||||
|
||||
(history [conn]
|
||||
(raise "Datomic's history is not yet supported." {})))
|
||||
|
||||
(defn conn? [x]
|
||||
(and (satisfies? IConnection x)))
|
||||
|
||||
(defrecord TxReport [db-before ;; The DB before the transaction.
|
||||
db-after ;; The DB after the transaction.
|
||||
tx ;; The tx ID represented by the transaction in this report; refer :db/tx.
|
||||
txInstant ;; The timestamp instant when the the transaction was processed/committed in this report; refer :db/txInstant.
|
||||
entities ;; The set of entities (like [:db/add e a v]) processed.
|
||||
tx-data ;; The set of datoms applied to the database, like (Datom. e a v tx added).
|
||||
tempids ;; The map from id-literal -> numeric entid.
|
||||
part-map ;; Map {:db.part/user {:start 0x10000 :idx 0x10000}, ...}.
|
||||
added-parts ;; The set of parts added during the transaction via :db.part/db :db.install/part.
|
||||
added-idents ;; The map of idents -> entid added during the transaction, via e :db/ident ident.
|
||||
retracted-idents ;; The map of idents -> entid removed during the transaction.
|
||||
added-attributes ;; The map of schema attributes (ident -> schema fragment) added during the transaction, via :db.part/db :db.install/attribute.
|
||||
altered-attributes ;; TODO
|
||||
])
|
||||
|
||||
(defn- report? [x]
|
||||
(and (instance? TxReport x)))
|
||||
|
||||
(defn- -next-eid! [part-map-atom tempid]
|
||||
"Advance {:db.part/user {:start 0x10 :idx 0x11}, ...} to {:db.part/user {:start 0x10 :idx 0x12}, ...} and return 0x12."
|
||||
{:pre [(id-literal? tempid)]}
|
||||
(let [part (:part tempid)
|
||||
next (fn [part-map]
|
||||
(let [idx (get-in part-map [part :idx])]
|
||||
(when-not idx
|
||||
(raise "Cannot allocate entid for id-literal " tempid " because part " part " is not known"
|
||||
{:error :db/bad-part
|
||||
:parts (sorted-set (keys part-map))
|
||||
:part part}))
|
||||
(update-in part-map [part :idx] inc)))]
|
||||
(get-in (swap! part-map-atom next) [part :idx])))
|
||||
|
||||
(defn- allocate-eid
|
||||
[report id-literal eid]
|
||||
{:pre [(report? report) (id-literal? id-literal) (and (integer? eid) (not (neg? eid)))]}
|
||||
|
||||
(assoc-in report [:tempids id-literal] eid))
|
||||
|
||||
;; (def data-readers {'db/id id-literal})
|
||||
|
||||
;; #?(:cljs
|
||||
;; (doseq [[tag cb] data-readers] (cljs.reader/register-tag-parser! tag cb)))
|
||||
|
||||
(declare start-transactor)
|
||||
|
||||
(defn connection-with-db [db]
|
||||
;; Puts to listener-source may park if listener-mult can't distribute them fast enough. Since the
|
||||
;; underlying taps are asserted to be be unblocking, the parking time should be very short.
|
||||
(let [listener-source
|
||||
(a/chan 1)
|
||||
|
||||
listener-mult
|
||||
(a/mult listener-source) ;; Just for tapping.
|
||||
|
||||
connection
|
||||
(map->Connection {:closed? (atom false)
|
||||
:current-db (atom db)
|
||||
:listener-source listener-source
|
||||
:listener-mult listener-mult
|
||||
:transact-chan (a/chan (util/unlimited-buffer))
|
||||
})]
|
||||
(start-transactor connection)
|
||||
connection))
|
||||
|
||||
(defn maybe-datom->entity [entity]
|
||||
(cond
|
||||
(datom? entity)
|
||||
(->
|
||||
(let [[e a v tx added] entity]
|
||||
(if added
|
||||
[:db/add [e a v tx]]
|
||||
[:db/retract [e a v tx]]))
|
||||
(with-meta (get (meta entity) :source)))
|
||||
|
||||
true
|
||||
entity))
|
||||
|
||||
(defn maybe-ident->entid [db [op e a v :as orig]]
|
||||
;; We have to handle all ops, including those when a or v are not defined.
|
||||
(let [e (db/entid db e)
|
||||
a (db/entid db a)
|
||||
v (if (and a (ds/kw? (db/schema db) a)) ;; TODO: decide if this is best. We could also check for ref and numeric types.
|
||||
v
|
||||
(db/entid db v))]
|
||||
(when (and a (not (integer? a)))
|
||||
(raise "Unknown attribute " a
|
||||
{:form orig :attribute a :entity orig}))
|
||||
[op e a v]))
|
||||
|
||||
(defrecord Transaction [db tempids entities])
|
||||
|
||||
(defn- tx-entity [db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
(let [tx (:tx report)
|
||||
txInstant (:txInstant report)]
|
||||
;; n.b., this must not be symbolic -- it's inserted after we map idents -> entids.
|
||||
[:db/add tx (db/entid db :db/txInstant) txInstant]))
|
||||
|
||||
(defn ensure-entity-form [entity]
|
||||
(when-not (sequential? entity)
|
||||
(raise "Bad entity " entity ", should be sequential at this point"
|
||||
{:error :transact/bad-entity, :entity entity}))
|
||||
|
||||
(let [[op] entity]
|
||||
(case op
|
||||
(:db/add :db/retract)
|
||||
(let [[_ e a v & rest] entity]
|
||||
(cond
|
||||
(nil? e)
|
||||
(raise "Bad entity: nil e in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(nil? a)
|
||||
(raise "Bad entity: nil a in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(nil? v)
|
||||
(raise "Bad entity: nil v in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(some? rest)
|
||||
(raise "Bad entity: too long " entity
|
||||
{:error :transact/bad-entity :entity entity })))
|
||||
|
||||
:db.fn/retractAttribute
|
||||
(let [[_ e a & rest] entity]
|
||||
(cond
|
||||
(nil? e)
|
||||
(raise "Bad entity: nil e in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(nil? a)
|
||||
(raise "Bad entity: nil a in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(some? rest)
|
||||
(raise "Bad entity: too long " entity
|
||||
{:error :transact/bad-entity :entity entity })))
|
||||
|
||||
:db.fn/retractEntity
|
||||
(let [[_ e & rest] entity]
|
||||
(cond
|
||||
(nil? e)
|
||||
(raise "Bad entity: nil e in " entity
|
||||
{:error :transact/bad-entity :entity entity })
|
||||
|
||||
(some? rest)
|
||||
(raise "Bad entity: too long " entity
|
||||
{:error :transact/bad-entity :entity entity })))
|
||||
|
||||
;; Default
|
||||
(raise "Unrecognized operation " op " expected one of :db/add :db/retract :db/fn.retractAttribute :db/fn.retractEntity at this point"
|
||||
{:error :transact/bad-operation :entity entity })))
|
||||
|
||||
entity)
|
||||
|
||||
(defn- tx-instant? [db [op e a & _]]
|
||||
(and (= op :db/add)
|
||||
(= (db/entid db e) (db/entid db :db/tx))
|
||||
(= (db/entid db a) (db/entid db :db/txInstant))))
|
||||
|
||||
(defn- update-txInstant [db report]
|
||||
"Extract [:db/add :db/tx :db/txInstant ...], and update :txInstant with that value."
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
;; TODO: be more efficient here: don't iterate all entities.
|
||||
(if-let [[_ _ _ txInstant] (first (filter (partial tx-instant? db) (:entities report)))]
|
||||
(assoc report :txInstant txInstant)
|
||||
report))
|
||||
|
||||
(defn preprocess [db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
(let [initial-es (or (:entities report) [])
|
||||
;; :db/tx is a "dynamic enum ident" that maps to the current transaction ID. This approach
|
||||
;; mimics DataScript's :db/current-tx. (We don't follow DataScript because
|
||||
;; current-txInstant is awkward.) It's much simpler than Datomic's approach, which appears
|
||||
;; to unify all id-literals in :db.part/tx to the current transaction value, but also seems
|
||||
;; inconsistent.
|
||||
tx (:tx report)
|
||||
db* (db/with-ident db :db/tx tx)]
|
||||
(when-not (sequential? initial-es)
|
||||
(raise "Bad transaction data " initial-es ", expected sequential collection"
|
||||
{:error :transact/syntax, :tx-data initial-es}))
|
||||
|
||||
;; TODO: find an approach that generates less garbage.
|
||||
(->
|
||||
report
|
||||
|
||||
;; Normalize Datoms into :db/add or :db/retract vectors.
|
||||
(update :entities (partial map maybe-datom->entity))
|
||||
|
||||
(update :entities (partial explode/explode-entities db))
|
||||
|
||||
(update :entities (partial map ensure-entity-form))
|
||||
|
||||
;; Replace idents with entids where possible, using db* to capture :db/tx.
|
||||
(update :entities (partial map (partial maybe-ident->entid db*)))
|
||||
|
||||
;; If an explicit [:db/add :db/tx :db/txInstant] is not given, add one. Use db* to
|
||||
;; capture :db/tx.
|
||||
(update :entities (fn [entities]
|
||||
(if (first (filter (partial tx-instant? db*) entities))
|
||||
entities
|
||||
(conj entities (tx-entity db report)))))
|
||||
|
||||
;; Extract the current txInstant for the report.
|
||||
(->> (update-txInstant db*)))))
|
||||
|
||||
(defn <resolve-lookup-refs [db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
(let [unique-identity? (memoize (partial ds/unique-identity? (db/schema db)))
|
||||
;; Map lookup-ref -> entities containing lookup-ref, like {[:a :v] [[(lookup-ref :a :v) :b :w] ...], ...}.
|
||||
groups (group-by (partial keep db/lookup-ref?) (:entities report))
|
||||
;; Entities with no lookup-ref are grouped under the key (lazy-seq).
|
||||
entities (get groups (lazy-seq)) ;; No lookup-refs? Pass through.
|
||||
to-resolve (dissoc groups (lazy-seq)) ;; The ones with lookup-refs.
|
||||
;; List [[:a :v] ...] to lookup.
|
||||
avs (set (map (juxt :a :v) (apply concat (keys to-resolve))))
|
||||
->av (fn [r] ;; Conditional (juxt :a :v) that passes through nil.
|
||||
(when r [(:a r) (:v r)]))]
|
||||
(go-pair
|
||||
(let [av->e (<? (db/<avs db avs))
|
||||
resolve1 (fn [field]
|
||||
(if-let [[a v] (->av (db/lookup-ref? field))]
|
||||
(if-not (unique-identity? (db/entid db a))
|
||||
(raise "Lookup-ref found with non-unique-identity attribute " a " and value " v
|
||||
{:error :transact/lookup-ref-with-non-unique-identity-attribute
|
||||
:a a
|
||||
:v v})
|
||||
(or
|
||||
(get av->e [a v])
|
||||
(raise "No entity found for lookup-ref with attribute " a " and value " v
|
||||
{:error :transact/lookup-ref-not-found
|
||||
:a a
|
||||
:v v})))
|
||||
field))
|
||||
resolve (fn [entity]
|
||||
(mapv resolve1 entity))]
|
||||
(assoc
|
||||
report
|
||||
:entities
|
||||
(concat
|
||||
entities
|
||||
(map resolve (apply concat (vals to-resolve)))))))))
|
||||
|
||||
(declare <resolve-id-literals)
|
||||
|
||||
(defn <retry-with-tempid [db report es tempid upserted-eid]
|
||||
(if (contains? (:tempids report) tempid)
|
||||
(go-pair
|
||||
(raise "Conflicting upsert: " tempid " resolves"
|
||||
" both to " upserted-eid " and " (get (:tempids report) tempid)
|
||||
{ :error :transact/upsert }))
|
||||
;; try to re-run from the beginning
|
||||
;; but remembering that `old-eid` will resolve to `upserted-eid`
|
||||
(<resolve-id-literals db
|
||||
(->
|
||||
report
|
||||
(assoc-in [:tempids tempid] upserted-eid)
|
||||
(assoc-in [:entities] es)))))
|
||||
|
||||
(defn- transact-entity [report entity]
|
||||
(update-in report [:entities] conj entity))
|
||||
|
||||
(defn id-literal-generation [unique-identity? entity]
|
||||
"Group entities possibly containing id-literals into 'generations'.
|
||||
|
||||
Entities are grouped into one of the following generations:
|
||||
|
||||
:upserts-ev - 'complex upserts' that look like [:db/add -1 a -2] where a is :db.unique/identity;
|
||||
|
||||
:upserts-e - 'simple upserts' that look like [:db/add -1 a v] where a is :db.unique/identity;
|
||||
|
||||
:allocations-{e,v,ev} - things like [:db/add -1 b v], [:db/add e b -2], or [:db/add -3 b -4] where
|
||||
b is *not* :db.unique/identity, or like [:db/add e a -5] where a is :db.unique/identity;
|
||||
|
||||
:entities - not :db/add, or no id-literals."
|
||||
{:pre [(sequential? entity)]}
|
||||
(let [[op e a v] entity
|
||||
v? (id-literal? v)]
|
||||
(when (id-literal? a)
|
||||
(raise "id-literal attributes are not yet supported: " entity
|
||||
{:error :transact/no-id-literal-attributes
|
||||
:entity entity }))
|
||||
(cond
|
||||
(not= op :db/add) ;; TODO: verify no id-literals appear.
|
||||
:entities
|
||||
|
||||
(id-literal? e)
|
||||
(if (unique-identity? a)
|
||||
(if v?
|
||||
:upserts-ev
|
||||
:upserts-e)
|
||||
(if v?
|
||||
:allocations-ev
|
||||
:allocations-e))
|
||||
|
||||
v?
|
||||
:allocations-v
|
||||
|
||||
true
|
||||
:entities)))
|
||||
|
||||
(defn <resolve-upserts-e [db upserts-e]
|
||||
"Given a sequence of :upserts-e, query the database to try to map them to existing entities.
|
||||
|
||||
Returns a map of id-literals to integer entids, with keys only those id-literals that mapped to
|
||||
existing entities."
|
||||
(go-pair
|
||||
(when (seq upserts-e)
|
||||
(let [->id-av (fn [[op id-literal a v]] [id-literal [a v]])
|
||||
;; Like {id-literal [[:a1 :v1] [:a2 :v2] ...], ...}.
|
||||
id->avs (util/group-by-kv ->id-av upserts-e)
|
||||
;; Like [[:a1 :v1] [:a2 v2] ...].
|
||||
avs (apply concat (vals id->avs))
|
||||
;; Like {[:a1 :v1] e1, ...}.
|
||||
av->e (<? (db/<avs db avs))
|
||||
|
||||
avs->es (fn [avs] (set (keep (partial get av->e) avs)))
|
||||
|
||||
id->es (util/mapvals avs->es id->avs)]
|
||||
|
||||
(into {}
|
||||
;; nil is dropped.
|
||||
(map (fn [[id es]]
|
||||
(when-let [e (first es)]
|
||||
(when (second es)
|
||||
(raise "Conflicting upsert: " id " resolves"
|
||||
" to more than one entid " es
|
||||
{:error :transact/upsert :tempid id :entids es}))
|
||||
[id e])))
|
||||
id->es)))))
|
||||
|
||||
(defn evolve-upserts-e [id->e upserts-e]
|
||||
(let [evolve1
|
||||
(fn [[op id-e a v :as entity]]
|
||||
(if-let [e* (get id->e id-e)]
|
||||
[:upserted [op e* a v]]
|
||||
[:allocations-e entity]))]
|
||||
(util/group-by-kv evolve1 upserts-e)))
|
||||
|
||||
(defn evolve-upserts-ev [id->e upserts-ev]
|
||||
"Given a map id->e of id-literals to integer entids, evolve the given entities. Returns a map
|
||||
whose keys are generations and whose values are vectors of entities in those generations."
|
||||
(let [evolve1
|
||||
(fn [[op id-e a id-v :as entity]]
|
||||
(let [e* (get id->e id-e)
|
||||
v* (get id->e id-v)]
|
||||
(if e*
|
||||
(if v*
|
||||
[:resolved [op e* a v*]]
|
||||
[:allocations-v [op e* a id-v]])
|
||||
(if v*
|
||||
[:upserts-e [op id-e a v*]]
|
||||
[:upserts-ev entity]))))]
|
||||
(util/group-by-kv evolve1 upserts-ev)))
|
||||
|
||||
(defn evolve-allocations-e [id->e allocations-e]
|
||||
"Given a map id->e of id-literals to integer entids, evolve the given entities. Returns a map
|
||||
whose keys are generations and whose values are vectors of entities in those generations."
|
||||
(let [evolve1
|
||||
(fn [[op id-e a v :as entity]]
|
||||
(if-let [e* (get id->e id-e)]
|
||||
[:resolved [op e* a v]]
|
||||
[:allocations-e entity]))]
|
||||
(util/group-by-kv evolve1 allocations-e)))
|
||||
|
||||
(defn evolve-allocations-v [id->e allocations-v]
|
||||
"Given a map id->e of id-literals to integer entids, evolve the given entities. Returns a map
|
||||
whose keys are generations and whose values are vectors of entities in those generations."
|
||||
(let [evolve1
|
||||
(fn [[op e a id-v :as entity]]
|
||||
(if-let [v* (get id->e id-v)]
|
||||
[:resolved [op e a v*]]
|
||||
[:allocations-v entity]))]
|
||||
(util/group-by-kv evolve1 allocations-v)))
|
||||
|
||||
(defn evolve-allocations-ev [id->e allocations-ev]
|
||||
"Given a map id->e of id-literals to integer entids, evolve the entities in allocations-ev. Returns a
|
||||
map whose keys are generations and whose values are vectors of entities in those generations."
|
||||
(let [evolve1
|
||||
(fn [[op id-e a id-v :as entity]]
|
||||
(let [e* (get id->e id-e)
|
||||
v* (get id->e id-v)]
|
||||
(if e*
|
||||
(if v*
|
||||
[:resolved [op e* a v*]]
|
||||
[:allocations-v [op e* a id-v]])
|
||||
(if v*
|
||||
[:allocations-e [op id-e a v*]]
|
||||
[:allocations-ev entity]))))]
|
||||
(util/group-by-kv evolve1 allocations-ev)))
|
||||
|
||||
(defn <evolve [db evolution]
|
||||
"Evolve a map of generations {:upserts-e [...], :upserts-ev [...], ...} as much as possible.
|
||||
|
||||
The algorithm is as follows.
|
||||
|
||||
First, resolve :upserts-e against the database. Some [a v] -> e will upsert; some will not.
|
||||
Some :upserts-e evolve to become actual :upserts (they upserted!); any other :upserts-e evolve to
|
||||
become :allocations-e (they did not upsert, and will not upsert this transaction). All :upserts-e
|
||||
will evolve out of the :upserts-e generation: each one upserts or does not.
|
||||
|
||||
Using the newly upserted id-literals, some :upserts-ev evolve to become :resolved;
|
||||
some :upserts-ev evolve to become :upserts-e; and some :upserts-ev remain :upserts-ev.
|
||||
|
||||
Likewise, some :allocations-ev evolve to become :allocations-e, :allocations-v, or :resolved; some
|
||||
:allocations-e evolve to become :resolved; and some :allocations-v evolve to become :resolved.
|
||||
|
||||
If we have *new* :upserts-e (i.e., some :upserts-ev become :upserts-e), then we may be able to
|
||||
make more progress. We recurse, trying to resolve these new :upserts-e.
|
||||
|
||||
Eventually we will have no :upserts-e. At this point, :upserts-ev become :allocations-ev, and now
|
||||
we have :entities, :upserted, :resolved, and various :allocations-*.
|
||||
|
||||
As a future optimization, :upserts do not need to be inserted; they upserted, so they already
|
||||
exist in the DB. (We still need to verify uniqueness and ensure no overlapping can occur.)
|
||||
Similarly, :allocations-* do not need to be checked for existence, so they can be written to the DB
|
||||
faster."
|
||||
(go-pair
|
||||
(let [upserts-e (seq (:upserts-e evolution))
|
||||
id->e (and upserts-e
|
||||
(<? (<resolve-upserts-e db upserts-e)))]
|
||||
(if-not id->e
|
||||
;; No more progress to be made. Any upserts-ev must just be allocations.
|
||||
(update
|
||||
(dissoc evolution :upserts-ev :upserts-e)
|
||||
:allocations-ev concat (:upserts-ev evolution))
|
||||
;; Progress can be made. Try to evolve further.
|
||||
(let [{:keys [upserted resolved upserts-ev allocations-ev allocations-e allocations-v entities]} evolution]
|
||||
(merge-with
|
||||
concat
|
||||
{:upserted upserted
|
||||
:resolved resolved
|
||||
:entities entities
|
||||
;; The keys of the id->e map are unique between generation steps, so we can simply
|
||||
;; concat tempids. Suppose that id->e and id->e* are two such mappings, resolved on
|
||||
;; subsequent evolutionary steps, and that id is a key in the intersection of the two
|
||||
;; key sets. This can't happen: if id maps to e via id->e, all instances of id have
|
||||
;; been evolved forward (replaced with e) before we try to resolve the next set of
|
||||
;; :upserts-e. That is, we'll never successfully upsert the same id-literal in more
|
||||
;; than one generation step. (We might upsert the same id-literal to multiple entids
|
||||
;; via distinct [a v] pairs in a single generation step; in this case,
|
||||
;; <resolve-upserts-e will throw.)
|
||||
:tempids id->e}
|
||||
(evolve-upserts-ev id->e upserts-ev)
|
||||
(evolve-upserts-e id->e upserts-e)
|
||||
(evolve-allocations-ev id->e allocations-ev)
|
||||
(evolve-allocations-e id->e allocations-e)
|
||||
(evolve-allocations-v id->e allocations-v)))))))
|
||||
|
||||
;; TODO: do this in one step, rather than iterating.
|
||||
(defn allocate [report evolution]
|
||||
"Given a maximally evolved map of generations, allocate entids for all id-literals that did not
|
||||
get upserted."
|
||||
(let [{:keys [tempids upserted resolved allocations-ev allocations-e allocations-v entities]} evolution
|
||||
initial-report (assoc report :tempids tempids)]
|
||||
(loop [report
|
||||
(assoc initial-report
|
||||
;; TODO: drop :upserted, they already exist in the DB; and don't search for
|
||||
;; :allocations-*, they definitely don't already exist in the DB.
|
||||
:entities (concat upserted resolved entities))
|
||||
|
||||
es
|
||||
(concat allocations-ev allocations-e allocations-v)]
|
||||
(let [[[op e a v :as entity] & entities] es]
|
||||
(cond
|
||||
(nil? entity)
|
||||
report
|
||||
|
||||
(id-literal? e)
|
||||
(let [eid (or (get-in report [:tempids e]) (-next-eid! (:part-map-atom report) e))]
|
||||
(recur (allocate-eid report e eid) (cons [op eid a v] entities)))
|
||||
|
||||
(id-literal? v)
|
||||
(let [eid (or (get-in report [:tempids v]) (-next-eid! (:part-map-atom report) v))]
|
||||
(recur (allocate-eid report v eid) (cons [op e a eid] entities)))
|
||||
|
||||
true
|
||||
(recur (transact-entity report entity) entities)
|
||||
)))))
|
||||
|
||||
(defn <resolve-id-literals
|
||||
"Upsert uniquely identified literals when possible and allocate new entids for all other id literals.
|
||||
|
||||
It's worth noting that some amount of trial and error is probably
|
||||
necessary here, since [[-1 :ref -2] [-2 :ref -1]] is a valid input.
|
||||
It's my belief that no graph algorithm can correctly order the
|
||||
id-literals in quasi-linear time, since that algorithm will need to
|
||||
accept all permutations of the id-literals. Therefore, we simplify
|
||||
by accepting that we may process the input multiple times, and we
|
||||
regain some efficiency by sorting so that upserts happen earlier and
|
||||
we are most likely to find a successful entid allocation without
|
||||
multiple trials.
|
||||
|
||||
Concretely, we sort [-1 a v] < [-1 a -2] < [e a -1] < [e a v]. This
|
||||
means simple upserts will be processed early, followed by entities
|
||||
with multiple id-literals that we hope will reduce to simple upserts
|
||||
based on the earlier upserts. After that, we handle what should be
|
||||
simple allocations."
|
||||
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
(go-pair
|
||||
(let [schema (db/schema db)
|
||||
unique-identity? (memoize (partial ds/unique-identity? schema))
|
||||
|
||||
generations
|
||||
(group-by (partial id-literal-generation unique-identity?) (:entities report))
|
||||
|
||||
evolution
|
||||
(<? (<evolve db generations))
|
||||
]
|
||||
(allocate report evolution))))
|
||||
|
||||
(defn- transact-report [report datom]
|
||||
(update-in report [:tx-data] conj datom))
|
||||
|
||||
(defn- <ensure-schema-constraints
|
||||
"Throw unless all entities in :entities obey the schema constraints."
|
||||
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
;; TODO: consider accumulating errors to show more meaningful error reports.
|
||||
;; TODO: constrain entities; constrain attributes.
|
||||
|
||||
(go-pair
|
||||
(let [schema (db/schema db)]
|
||||
(doseq [[op e a v] (:entities report)]
|
||||
(if (and e a v)
|
||||
(ds/ensure-valid-value schema a v))))
|
||||
report))
|
||||
|
||||
(defn <transact-tx-data
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
(let [<apply-entities (fn [db report]
|
||||
(go-pair
|
||||
(let [tx-data (<? (db/<apply-entities db (:tx report) (:entities report)))]
|
||||
(assoc report :tx-data tx-data))))]
|
||||
(go-pair
|
||||
(->>
|
||||
report
|
||||
(preprocess db)
|
||||
|
||||
(<resolve-lookup-refs db)
|
||||
(<?)
|
||||
(p :resolve-lookup-refs)
|
||||
|
||||
(<resolve-id-literals db)
|
||||
(<?)
|
||||
(p :resolve-id-literals)
|
||||
|
||||
(<ensure-schema-constraints db)
|
||||
(<?)
|
||||
(p :ensure-schema-constraints)
|
||||
|
||||
(<apply-entities db)
|
||||
(<?)
|
||||
(p :apply-entities)
|
||||
))))
|
||||
|
||||
(defn- is-ident? [db [_ a & _]]
|
||||
(= a (db/entid db :db/ident)))
|
||||
|
||||
(defn collect-db-ident-assertions
|
||||
"Transactions may add idents, install new partitions, and install new schema attributes.
|
||||
Collect :db/ident assertions into :added-idents and :retracted-idents here."
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
;; TODO: use q to filter the report!
|
||||
(let [original-report report
|
||||
tx-data (:tx-data report)
|
||||
original-ident-assertions (filter (partial is-ident? db) tx-data)]
|
||||
(loop [report original-report
|
||||
ident-assertions original-ident-assertions]
|
||||
(let [[ia & ias] ident-assertions]
|
||||
(cond
|
||||
(nil? ia)
|
||||
report
|
||||
|
||||
:else
|
||||
(let [ident (:v ia)]
|
||||
(if (keyword? ident)
|
||||
(recur (assoc-in report [(if (:added ia)
|
||||
:added-idents
|
||||
:retracted-idents)
|
||||
ident] (:e ia)) ias)
|
||||
(raise "Cannot add or retract a :db/ident with a non-keyword value, got " ia
|
||||
{:error :schema/idents
|
||||
:op ia }))))))))
|
||||
|
||||
(defn collect-db-install-assertions
|
||||
"Transactions may add idents, install new partitions, and install new schema attributes.
|
||||
Collect [:db.part/db :db.install/attribute] assertions here."
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
;; Symbolicating is not expensive.
|
||||
(let [symbolicate-install-datom
|
||||
(fn [[e a v tx added]]
|
||||
(datom
|
||||
(db/ident db e)
|
||||
(db/ident db a)
|
||||
(db/ident db v)
|
||||
tx
|
||||
added))
|
||||
datoms (map symbolicate-install-datom (:tx-data report))
|
||||
schema-fragment (datomish.schema-changes/datoms->schema-fragment
|
||||
datoms
|
||||
(:ident-map db))]
|
||||
(assoc-in report [:added-attributes] schema-fragment)))
|
||||
|
||||
(defn collect-db-alter-assertions
|
||||
"Transactions may alter existing attributes."
|
||||
[db report]
|
||||
{:pre [(db/db? db) (report? report)]}
|
||||
|
||||
;; We walk the tx-data once to find any altered attributes.
|
||||
;; We walk it again to collect the new properties of those
|
||||
;; attributes.
|
||||
(let [tx-data (:tx-data report)
|
||||
|
||||
;; This is what we're looking for.
|
||||
alter-attribute (db/entid db :db.alter/attribute)
|
||||
|
||||
altered-attributes (reduce (fn [acc [_ a v & _]]
|
||||
(if (= a alter-attribute)
|
||||
(conj acc v)
|
||||
acc))
|
||||
#{}
|
||||
tx-data)]
|
||||
(if (empty? altered-attributes)
|
||||
report
|
||||
|
||||
(assoc report
|
||||
:altered-attributes
|
||||
(reduce
|
||||
(fn [acc [e a v _ added? :as datom]]
|
||||
;; We ignore the retraction of the old value.
|
||||
;; We already have it in our in-memory schema!
|
||||
(if (and added?
|
||||
(contains? altered-attributes e))
|
||||
(conj acc [e a v])
|
||||
acc))
|
||||
[]
|
||||
tx-data)))))
|
||||
|
||||
;; TODO: expose this in a more appropriate way.
|
||||
(defn <with-internal [db tx-data merge-attr]
|
||||
(go-pair
|
||||
(let [part-map-atom
|
||||
(atom (db/part-map db))
|
||||
|
||||
tx
|
||||
(-next-eid! part-map-atom (id-literal :db.part/tx))
|
||||
|
||||
report (->>
|
||||
(map->TxReport
|
||||
{:db-before db
|
||||
:db-after db
|
||||
;; This mimics DataScript. It's convenient to be able to extract the
|
||||
;; transaction ID and transaction timestamp directly from the report; Datomic
|
||||
;; makes this surprisingly difficult: one needs a :db.part/tx temporary and an
|
||||
;; explicit upsert of that temporary.
|
||||
:part-map-atom part-map-atom
|
||||
:tx tx
|
||||
:txInstant (db/now db)
|
||||
:entities tx-data
|
||||
:tx-data []
|
||||
:tempids {}
|
||||
:added-parts {}
|
||||
:added-idents {}
|
||||
:retracted-idents {}
|
||||
:added-attributes {}
|
||||
:altered-attributes {}
|
||||
})
|
||||
|
||||
(<transact-tx-data db)
|
||||
(<?)
|
||||
(p :transact-tx-data)
|
||||
|
||||
(collect-db-ident-assertions db)
|
||||
(p :collect-db-ident-assertions)
|
||||
|
||||
(collect-db-install-assertions db)
|
||||
(p :collect-db-install-assertions)
|
||||
|
||||
(collect-db-alter-assertions db)
|
||||
(p :collect-db-alter-assertions)
|
||||
)
|
||||
|
||||
db-after (->
|
||||
db
|
||||
|
||||
(db/<apply-db-part-map @(:part-map-atom report))
|
||||
(<?)
|
||||
(->> (p :apply-db-part-changes))
|
||||
|
||||
(db/<apply-db-ident-assertions (:added-idents report)
|
||||
(:retracted-idents report))
|
||||
(<?)
|
||||
(->> (p :apply-db-ident-assertions))
|
||||
|
||||
(db/<apply-db-install-assertions (:added-attributes report) merge-attr)
|
||||
(<?)
|
||||
(->> (p :apply-db-install-assertions))
|
||||
|
||||
(db/<apply-db-alter-assertions (:altered-attributes report))
|
||||
(<?)
|
||||
(->> (p :apply-db-alter-assertions))
|
||||
)
|
||||
]
|
||||
(-> report
|
||||
(assoc-in [:db-after] db-after)))))
|
||||
|
||||
(defn <transact-tx-data-in-transaction! [db tx-data]
|
||||
(let [fail-touch-attr (fn [old new] (raise "Altering schema attributes is not yet supported, got " new " altering existing schema attribute " old
|
||||
{:error :schema/alter-schema :old old :new new}))]
|
||||
(<with-internal db tx-data fail-touch-attr)))
|
||||
|
||||
(defn <db-transact-tx-data! [db tx-data]
|
||||
(go-pair
|
||||
(:db-after (<? (<transact-tx-data-in-transaction! db tx-data)))))
|
||||
|
||||
(defn <transact!
|
||||
"Submits a transaction to the database for writing.
|
||||
|
||||
Returns a pair-chan resolving to `[result error]`."
|
||||
([conn tx-data]
|
||||
(<transact! conn tx-data (a/chan 1) true))
|
||||
([conn tx-data-or-fn result close?]
|
||||
{:pre [(conn? conn)]}
|
||||
;; Any race to put! is a real race between callers of <transact!. We can't just park on put!,
|
||||
;; because the parked putter that is woken is non-deterministic.
|
||||
(let [op (if (fn? tx-data-or-fn)
|
||||
:sentinel-fn
|
||||
:sentinel-transact)
|
||||
closed? (not (a/put! (:transact-chan conn)
|
||||
[op tx-data-or-fn result close?]))]
|
||||
(go-pair
|
||||
;; We want to return a pair-chan, no matter what kind of channel result is.
|
||||
(if closed?
|
||||
(raise "Connection is closed" {:error :transact/connection-closed})
|
||||
(<? result))))))
|
||||
|
||||
(defn- start-transactor [conn]
|
||||
(let [token-chan (a/chan 1)]
|
||||
(go
|
||||
(>! token-chan (gensym "transactor-token"))
|
||||
(loop []
|
||||
(when-let [token (<! token-chan)]
|
||||
(when-let [[sentinel tx-data-or-fn result close?] (<! (:transact-chan conn))]
|
||||
(let
|
||||
[pair
|
||||
(<!
|
||||
(go-pair ;; Catch exceptions, return the pair.
|
||||
(case sentinel
|
||||
:sentinel-close
|
||||
;; Time to close the underlying DB.
|
||||
(<? (db/close-db @(:current-db conn)))
|
||||
|
||||
;; Default: process the transaction.
|
||||
(do
|
||||
(when @(:closed? conn)
|
||||
;; Drain enqueued transactions.
|
||||
(raise "Connection is closed" {:error :transact/connection-closed}))
|
||||
|
||||
(let [db (db conn)
|
||||
in-transaction-fn
|
||||
(case sentinel
|
||||
:sentinel-fn
|
||||
;; This is a function that we'd like to run
|
||||
;; within a database transaction. See
|
||||
;; db/in-transaction! for details.
|
||||
;; The function is invoked with two arguments:
|
||||
;; the db and a function that takes (db,
|
||||
;; tx-data) and transacts it to return a
|
||||
;; TxReport.
|
||||
;; The function must return a TxReport.
|
||||
;; The function must not itself call
|
||||
;; `in-transaction!` or `<transact!`.
|
||||
(partial tx-data-or-fn db <transact-tx-data-in-transaction!)
|
||||
|
||||
:sentinel-transact
|
||||
;; This is data. Apply it with `<transact-tx-data-in-transaction!`.
|
||||
(partial <transact-tx-data-in-transaction! db tx-data-or-fn))
|
||||
|
||||
report (<? (db/in-transaction! db in-transaction-fn))]
|
||||
(when report
|
||||
;; <r-t-t-d! returns non-nil or throws, but we still check report just in
|
||||
;; case. Here, in-transaction! function completed and returned non-nil,
|
||||
;; so the transaction has committed.
|
||||
(reset! (:current-db conn) (:db-after report))
|
||||
(>! (:listener-source conn) report))
|
||||
report)))))]
|
||||
;; Even when report is nil (transaction not committed), pair is non-nil.
|
||||
(>! result pair))
|
||||
(>! token-chan token)
|
||||
(when close?
|
||||
(a/close! result))
|
||||
(recur)))))))
|
||||
|
||||
(defn listen-chan!
|
||||
"Put reports successfully transacted against the given connection onto the given channel.
|
||||
|
||||
The listener sink channel must be unblocking.
|
||||
|
||||
Returns the channel listened to, for future unlistening."
|
||||
[conn listener-sink]
|
||||
{:pre [(conn? conn)]}
|
||||
(when-not (util/unblocking-chan? listener-sink)
|
||||
(raise "Listener sinks must be channels backed by unblocking buffers"
|
||||
{:error :transact/bad-listener :listener-sink listener-sink}))
|
||||
;; Tapping an already registered sink is a no-op.
|
||||
(a/tap (:listener-mult conn) listener-sink)
|
||||
listener-sink)
|
||||
|
||||
(defn- -listen-chan
|
||||
[f n]
|
||||
(let [c (a/chan (a/dropping-buffer n))]
|
||||
(go-loop []
|
||||
(when-let [v (<! c)]
|
||||
(do
|
||||
(f v)
|
||||
(recur))))
|
||||
c))
|
||||
|
||||
(defn listen!
|
||||
"Evaluate the given function with reports successfully transacted against the given connection.
|
||||
|
||||
`f` should be a function of one argument, the transaction report.
|
||||
|
||||
Returns the channel listened to, for future calls to `unlisten-chan!`."
|
||||
([conn f]
|
||||
;; Decently large buffer before dropping, for JS consumers.
|
||||
(listen! conn f 1024))
|
||||
([conn f n]
|
||||
{:pre [(fn? f) (pos? n)]}
|
||||
(listen-chan! conn (-listen-chan f n))))
|
||||
|
||||
(defn unlisten-chan! [conn listener-sink]
|
||||
"Stop putting reports successfully transacted against the given connection onto the given channel."
|
||||
{:pre [(conn? conn)]}
|
||||
;; Untapping an un-registered sink is a no-op.
|
||||
(a/untap (:listener-mult conn) listener-sink))
|
|
@ -1,116 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.transact.bootstrap)
|
||||
|
||||
(def v1-symbolic-schema
|
||||
{:db/ident {:db/valueType :db.type/keyword
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/unique :db.unique/identity}
|
||||
:db.install/partition {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/many}
|
||||
:db.install/valueType {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/many}
|
||||
:db.install/attribute {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/many}
|
||||
;; TODO: support user-specified functions in the future.
|
||||
;; :db.install/function {:db/valueType :db.type/ref
|
||||
;; :db/cardinality :db.cardinality/many}
|
||||
:db/txInstant {:db/valueType :db.type/long
|
||||
:db/cardinality :db.cardinality/one
|
||||
:db/index true}
|
||||
:db/valueType {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/cardinality {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/doc {:db/valueType :db.type/string
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/unique {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/isComponent {:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/index {:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/fulltext {:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one}
|
||||
:db/noHistory {:db/valueType :db.type/boolean
|
||||
:db/cardinality :db.cardinality/one}})
|
||||
|
||||
(def v2-symbolic-schema
|
||||
{:db.alter/attribute {:db/valueType :db.type/ref
|
||||
:db/cardinality :db.cardinality/many}
|
||||
:db.schema/version {:db/valueType :db.type/long
|
||||
:db/cardinality :db.cardinality/one}
|
||||
|
||||
;; unique-value because an attribute can only belong to a single
|
||||
;; schema fragment.
|
||||
:db.schema/attribute {:db/valueType :db.type/ref
|
||||
:db/unique :db.unique/value
|
||||
:db/cardinality :db.cardinality/many}})
|
||||
|
||||
(def symbolic-schema (merge v1-symbolic-schema v2-symbolic-schema))
|
||||
|
||||
(def v1-idents
|
||||
{:db/ident 1
|
||||
:db.part/db 2
|
||||
:db/txInstant 3
|
||||
:db.install/partition 4
|
||||
:db.install/valueType 5
|
||||
:db.install/attribute 6
|
||||
:db/valueType 7
|
||||
:db/cardinality 8
|
||||
:db/unique 9
|
||||
:db/isComponent 10
|
||||
:db/index 11
|
||||
:db/fulltext 12
|
||||
:db/noHistory 13
|
||||
:db/add 14
|
||||
:db/retract 15
|
||||
:db.part/user 16
|
||||
:db.part/tx 17
|
||||
:db/excise 18
|
||||
:db.excise/attrs 19
|
||||
:db.excise/beforeT 20
|
||||
:db.excise/before 21
|
||||
:db.alter/attribute 22
|
||||
:db.type/ref 23
|
||||
:db.type/keyword 24
|
||||
:db.type/long 25
|
||||
:db.type/double 26
|
||||
:db.type/string 27
|
||||
:db.type/boolean 28
|
||||
:db.type/instant 29
|
||||
:db.type/bytes 30
|
||||
:db.cardinality/one 31
|
||||
:db.cardinality/many 32
|
||||
:db.unique/value 33
|
||||
:db.unique/identity 34
|
||||
:db/doc 35})
|
||||
|
||||
(def v2-idents
|
||||
{:db.schema/version 36 ; Fragment -> version.
|
||||
:db.schema/attribute 37 ; Fragment -> attribute.
|
||||
})
|
||||
|
||||
(def idents (merge v1-idents v2-idents))
|
||||
|
||||
(def parts
|
||||
{:db.part/db {:start 0 :idx (inc (apply max (vals idents)))}
|
||||
:db.part/user {:start 0x10000 :idx 0x10000}
|
||||
:db.part/tx {:start 0x10000000 :idx 0x10000000}
|
||||
})
|
||||
|
||||
(defn tx-data [new-idents new-symbolic-schema]
|
||||
(concat
|
||||
(map (fn [[ident entid]] [:db/add entid :db/ident ident]) new-idents)
|
||||
;; TODO: install partitions as well, like (map (fn [[ident entid]] [:db/add :db.part/db :db.install/partition ident])).
|
||||
(map (fn [[ident attrs]] (assoc attrs :db/id ident)) new-symbolic-schema)
|
||||
(map (fn [[ident attrs]] [:db/add :db.part/db :db.install/attribute (get idents ident)]) new-symbolic-schema) ;; TODO: fail if nil.
|
||||
))
|
|
@ -1,99 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.transact.explode
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.pair-chan :refer [go-pair <?]]
|
||||
[cljs.core.async.macros :refer [go]]))
|
||||
(:require
|
||||
[datomish.db :as db]
|
||||
[datomish.util :as util #?(:cljs :refer-macros :clj :refer) [raise raise-str cond-let]]
|
||||
[datomish.schema :as ds]
|
||||
#?@(:clj [[datomish.pair-chan :refer [go-pair <?]]
|
||||
[clojure.core.async :as a :refer [chan go <! >!]]])
|
||||
#?@(:cljs [[datomish.pair-chan]
|
||||
[cljs.core.async :as a :refer [chan <! >!]]])))
|
||||
|
||||
(defn- #?@(:clj [^Boolean reverse-ref?]
|
||||
:cljs [^boolean reverse-ref?]) [attr]
|
||||
(if (keyword? attr)
|
||||
(= \_ (nth (name attr) 0))
|
||||
(raise "Bad attribute type: " attr ", expected keyword"
|
||||
{:error :transact/syntax, :attribute attr})))
|
||||
|
||||
(defn- reverse-ref [attr]
|
||||
(if (keyword? attr)
|
||||
(if (reverse-ref? attr)
|
||||
(keyword (namespace attr) (subs (name attr) 1))
|
||||
(keyword (namespace attr) (str "_" (name attr))))
|
||||
(raise "Bad attribute type: " attr ", expected keyword"
|
||||
{:error :transact/syntax, :attribute attr})))
|
||||
|
||||
(declare explode-entity)
|
||||
|
||||
(defn- explode-entity-a-v [db entity eid a v]
|
||||
(let [a (db/ident db a) ;; We expect a to be an ident, but it's legal to provide an entid.
|
||||
a* (db/entid db a)
|
||||
reverse? (reverse-ref? a)
|
||||
straight-a (if reverse? (reverse-ref a) a)
|
||||
straight-a* (db/entid db straight-a)
|
||||
_ (when (and reverse? (not (ds/ref? (db/schema db) straight-a*)))
|
||||
(raise "Bad attribute " a ": reverse attribute name requires {:db/valueType :db.type/ref} in schema"
|
||||
{:error :transact/syntax, :attribute a, :op entity}))]
|
||||
(cond
|
||||
reverse?
|
||||
(explode-entity-a-v db entity v straight-a eid)
|
||||
|
||||
(and (map? v)
|
||||
(not (db/lookup-ref? v))
|
||||
(not (db/id-literal? v)))
|
||||
;; Another entity is given as a nested map.
|
||||
(if (ds/ref? (db/schema db) straight-a*)
|
||||
(let [other (assoc v (reverse-ref a) eid
|
||||
;; TODO: make the new ID have the same part as the original eid.
|
||||
;; TODO: make the new ID not show up in the tempids map. (Does Datomic exposed the new ID this way?)
|
||||
:db/id (db/id-literal :db.part/user))]
|
||||
(explode-entity db other))
|
||||
(raise "Bad attribute " a ": nested map " v " given but attribute name requires {:db/valueType :db.type/ref} in schema"
|
||||
{:error :transact/entity-map-type-ref
|
||||
:op entity }))
|
||||
|
||||
(sequential? v)
|
||||
(if (some nil? v)
|
||||
;; This is a hard one to track down, with a steep stack back in `transact/ensure-entity-form`, so
|
||||
;; we error specifically here rather than expanding further.
|
||||
(raise "Sequential attribute value for " a " contains nil."
|
||||
{:error :transact/sequence-contains-nil
|
||||
:op entity
|
||||
:attribute a
|
||||
:value v})
|
||||
(if (ds/multival? (db/schema db) a*) ;; dm/schema
|
||||
(mapcat (partial explode-entity-a-v db entity eid a) v) ;; Allow sequences of nested maps, etc. This does mean [[1]] will work.
|
||||
(raise "Sequential values " v " but attribute " a " is :db.cardinality/one"
|
||||
{:error :transact/entity-sequential-cardinality-one
|
||||
:op entity })))
|
||||
|
||||
true
|
||||
[[:db/add eid a* v]])))
|
||||
|
||||
(defn- explode-entity [db entity]
|
||||
(if (map? entity)
|
||||
(if-let [eid (:db/id entity)]
|
||||
(mapcat (partial apply explode-entity-a-v db entity eid) (dissoc entity :db/id))
|
||||
(raise "Map entity missing :db/id, got " entity
|
||||
{:error :transact/entity-missing-db-id
|
||||
:op entity }))
|
||||
[entity]))
|
||||
|
||||
(defn explode-entities [db entities]
|
||||
"Explode map shorthand, such as {:db/id e :attr value :_reverse ref}, to a list of vectors,
|
||||
like [[:db/add e :attr value] [:db/add ref :reverse e]]."
|
||||
(mapcat (partial explode-entity db) entities))
|
|
@ -1,22 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.tufte-stub)
|
||||
|
||||
;;; The real version of Tufte pulls in cljs.test, which pulls in
|
||||
;;; pprint, which breaks the build in Firefox.
|
||||
|
||||
(defmacro p [name & forms]
|
||||
`(do ~@forms))
|
||||
|
||||
(defmacro profile [options & forms]
|
||||
`(do ~@forms))
|
||||
|
||||
(defn add-basic-println-handler! [args])
|
|
@ -1,201 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.util
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.util]
|
||||
[cljs.core.async.macros :refer [go go-loop]]))
|
||||
(:require
|
||||
[clojure.string :as str]
|
||||
#?@(:clj [[clojure.core.async :as a :refer [go go-loop <! >!]]
|
||||
[clojure.core.async.impl.protocols]])
|
||||
#?@(:cljs [[cljs.core.async :as a :refer [<! >!]]
|
||||
[cljs.core.async.impl.protocols]])))
|
||||
|
||||
#?(:clj
|
||||
(defmacro raise-str
|
||||
"Like `raise`, but doesn't require a data argument."
|
||||
[& fragments]
|
||||
`(throw (ex-info (str ~@(map (fn [m#] (if (string? m#) m# (list 'pr-str m#))) fragments)) {}))))
|
||||
|
||||
#?(:clj
|
||||
(defmacro raise
|
||||
"The last argument must be a map."
|
||||
[& fragments]
|
||||
(let [msgs (butlast fragments)
|
||||
data (last fragments)]
|
||||
`(throw
|
||||
(ex-info
|
||||
(str ~@(map (fn [m#] (if (string? m#) m# (list 'pr-str m#))) msgs)) ~data)))))
|
||||
|
||||
#?(:clj
|
||||
(defmacro cond-let [& clauses]
|
||||
(when-let [[test expr & rest] clauses]
|
||||
`(~(if (vector? test) 'if-let 'if) ~test
|
||||
~expr
|
||||
(cond-let ~@rest)))))
|
||||
|
||||
(defn ensure-datalog-var [x]
|
||||
(or (and (symbol? x)
|
||||
(nil? (namespace x))
|
||||
(str/starts-with? (name x) "?"))
|
||||
(throw (ex-info (str x " is not a Datalog var.") {}))))
|
||||
|
||||
(defn var->sql-type-var
|
||||
"Turns '?xyz into :_xyz_type_tag."
|
||||
[x]
|
||||
(and
|
||||
(ensure-datalog-var x)
|
||||
(keyword (str "_" (subs (name x) 1) "_type_tag"))))
|
||||
|
||||
(defn var->sql-var
|
||||
"Turns '?xyz into :xyz."
|
||||
[x]
|
||||
(and
|
||||
(ensure-datalog-var x)
|
||||
(keyword (subs (name x) 1))))
|
||||
|
||||
(defn aggregate->sql-var
|
||||
"Turns (:max 'column) into :%max.column."
|
||||
[fn-kw x]
|
||||
(keyword (str "%" (name fn-kw) "." (name x))))
|
||||
|
||||
(defn dissoc-from
|
||||
"Given a map `m` and a key `k`, find the sub-map named by `k`
|
||||
and remove all of its keys in `vs`."
|
||||
[m k vs]
|
||||
(assoc m k (apply dissoc (get m k) vs)))
|
||||
|
||||
(defn concat-in
|
||||
{:static true}
|
||||
[m [k & ks] vs]
|
||||
(if ks
|
||||
(assoc m k (concat-in (get m k) ks vs))
|
||||
(assoc m k (concat (get m k) vs))))
|
||||
|
||||
(defn append-in
|
||||
"Associates a value into a sequence in a nested associative structure, where
|
||||
ks is a sequence of keys and v is the new value, and returns a new nested
|
||||
structure.
|
||||
Always puts the value last.
|
||||
If any levels do not exist, hash-maps will be created. If the destination
|
||||
sequence does not exist, a new one is created."
|
||||
{:static true}
|
||||
[m path v]
|
||||
(concat-in m path [v]))
|
||||
|
||||
(defn assoc-if
|
||||
([m k v]
|
||||
(if v
|
||||
(assoc m k v)
|
||||
m))
|
||||
([m k v & kvs]
|
||||
(if kvs
|
||||
(let [[kk vv & remainder] kvs]
|
||||
(apply assoc-if
|
||||
(assoc-if m k v)
|
||||
kk vv remainder))
|
||||
(assoc-if m k v))))
|
||||
|
||||
(defmacro while-let [binding & forms]
|
||||
`(loop []
|
||||
(when-let ~binding
|
||||
~@forms
|
||||
(recur))))
|
||||
|
||||
(defn every-pair? [f xs ys]
|
||||
(or (and (empty? xs) (empty? ys))
|
||||
(and (not (empty? xs))
|
||||
(not (empty? ys))
|
||||
(f (first xs) (first ys))
|
||||
(recur f (rest xs) (rest ys)))))
|
||||
|
||||
(defn mapvals [f m]
|
||||
(into (empty m) (map #(vector (first %) (f (second %))) m)))
|
||||
|
||||
(defn unblocking-chan?
|
||||
"Returns true if the channel will never block. That is to say, puts
|
||||
into this channel will never cause the buffer to be full."
|
||||
[chan]
|
||||
(a/unblocking-buffer?
|
||||
;; See http://dev.clojure.org/jira/browse/ASYNC-181.
|
||||
(#?(:cljs .-buf :clj .buf) chan)))
|
||||
|
||||
;; Modified from http://dev.clojure.org/jira/browse/ASYNC-23.
|
||||
#?(:cljs
|
||||
(deftype UnlimitedBuffer [buf]
|
||||
cljs.core.async.impl.protocols/UnblockingBuffer
|
||||
|
||||
cljs.core.async.impl.protocols/Buffer
|
||||
(full? [this]
|
||||
false)
|
||||
(remove! [this]
|
||||
(.pop buf))
|
||||
(add!* [this itm]
|
||||
(.unshift buf itm))
|
||||
(close-buf! [this])
|
||||
|
||||
cljs.core/ICounted
|
||||
(-count [this]
|
||||
(.-length buf))))
|
||||
|
||||
#?(:clj
|
||||
(deftype UnlimitedBuffer [^java.util.LinkedList buf]
|
||||
clojure.core.async.impl.protocols/UnblockingBuffer
|
||||
|
||||
clojure.core.async.impl.protocols/Buffer
|
||||
(full? [this]
|
||||
false)
|
||||
(remove! [this]
|
||||
(.removeLast buf))
|
||||
(add!* [this itm]
|
||||
(.addFirst buf itm))
|
||||
(close-buf! [this])
|
||||
|
||||
clojure.lang.Counted
|
||||
(count [this]
|
||||
(.size buf))))
|
||||
|
||||
(defn unlimited-buffer []
|
||||
(UnlimitedBuffer. #?(:cljs (array) :clj (java.util.LinkedList.))))
|
||||
|
||||
(defn group-by-kv
|
||||
"Returns a map of the elements of coll keyed by the first element of
|
||||
the result of f on each element. The value at each key will be a
|
||||
vector of the second element of the result of f on the corresponding
|
||||
elements, in the order they appeared in coll."
|
||||
{:static true}
|
||||
[f coll]
|
||||
(persistent!
|
||||
(reduce
|
||||
(fn [ret x]
|
||||
(let [[k v] (f x)]
|
||||
(assoc! ret k (conj (get ret k []) v))))
|
||||
(transient {}) coll)))
|
||||
|
||||
(defn repeated-keys
|
||||
"Takes a seq of maps.
|
||||
Returns the set of keys that appear in more than one map."
|
||||
[maps]
|
||||
(if (not (seq (rest maps)))
|
||||
#{}
|
||||
;; This is a perfect use case for transients, except that
|
||||
;; you can't use them for intersection due to CLJ-700.
|
||||
;; http://dev.clojure.org/jira/browse/CLJ-700
|
||||
(loop [overlapping #{}
|
||||
seen #{}
|
||||
key-sets (map (comp set keys) maps)]
|
||||
(if-let [ks (first key-sets)]
|
||||
(let [overlap (clojure.set/intersection seen ks)]
|
||||
(recur (clojure.set/union overlapping overlap)
|
||||
(clojure.set/union seen ks)
|
||||
(rest key-sets)))
|
||||
overlapping))))
|
|
@ -1,56 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.test-macros
|
||||
#?(:cljs
|
||||
(:require-macros
|
||||
[datomish.test-macros]))
|
||||
(:require
|
||||
[datomish.pair-chan]))
|
||||
|
||||
;; From https://github.com/plumatic/schema/blob/bf469889b730feb09448fd085be5828f28425b41/src/clj/schema/macros.clj#L10-L19.
|
||||
(defn cljs-env?
|
||||
"Take the &env from a macro, and tell whether we are expanding into cljs."
|
||||
[env]
|
||||
(boolean (:ns env)))
|
||||
|
||||
(defmacro if-cljs
|
||||
"Return then if we are generating cljs code and else for Clojure code.
|
||||
https://groups.google.com/d/msg/clojurescript/iBY5HaQda4A/w1lAQi9_AwsJ"
|
||||
[then else]
|
||||
(if (cljs-env? &env) then else))
|
||||
|
||||
;; It's a huge pain to declare cross-environment macros. This is awful, but making the namespace a
|
||||
;; parameter appears to be *even worse*.
|
||||
(defmacro deftest-async
|
||||
[name & body]
|
||||
`(if-cljs
|
||||
(cljs.test/deftest
|
||||
~(with-meta name {:async true})
|
||||
(cljs.test/async done#
|
||||
(->
|
||||
(datomish.pair-chan/go-pair ~@body)
|
||||
(cljs.core.async/take! (fn [[v# e#]]
|
||||
(cljs.test/is (= e# nil)) ;; Can't synchronously fail.
|
||||
(done#))))))
|
||||
(clojure.test/deftest
|
||||
~(with-meta name {:async true})
|
||||
(let [[v# e#] (clojure.core.async/<!! (datomish.pair-chan/go-pair ~@body))]
|
||||
(when e# (throw e#)) ;; Assert nil just to be safe, even though we should always throw first.
|
||||
(clojure.test/is (= e# nil))))))
|
||||
|
||||
(defmacro deftest-db
|
||||
[n conn-var & body]
|
||||
`(deftest-async ~n
|
||||
(let [~conn-var (datomish.pair-chan/<? (datomish.api/<connect ""))]
|
||||
(try
|
||||
~@body
|
||||
(finally
|
||||
(datomish.pair-chan/<? (datomish.api/<close ~conn-var)))))))
|
|
@ -1,40 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.cljify)
|
||||
|
||||
(defn cljify
|
||||
"In node, equivalent to `(js->clj o :keywordize-keys true),
|
||||
but successfully passes Clojure Records through. This allows JS API
|
||||
callers to round-trip values they receive from ClojureScript APIs."
|
||||
[x]
|
||||
;; This implementation is almost identical to js->clj, but it allows
|
||||
;; us to hook into the recursion into sequences and objects, and it
|
||||
;; passes through records.
|
||||
(if (record? x)
|
||||
x
|
||||
(cond
|
||||
(satisfies? IEncodeClojure x)
|
||||
(-js->clj x (apply array-map {:keywordize-keys true}))
|
||||
|
||||
(seq? x)
|
||||
(doall (map cljify x))
|
||||
|
||||
(coll? x)
|
||||
(into (empty x) (map cljify x))
|
||||
|
||||
(array? x)
|
||||
(vec (map cljify x))
|
||||
|
||||
(identical? (type x) js/Object)
|
||||
(into {} (for [k (js-keys x)]
|
||||
[(keyword k) (cljify (aget x k))]))
|
||||
|
||||
:else x)))
|
|
@ -1,16 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.core
|
||||
(:require [cljs.nodejs :as nodejs]))
|
||||
|
||||
(defn -main [& args])
|
||||
(set! *main-cli-fn* -main)
|
||||
(nodejs/enable-util-print!)
|
|
@ -1,26 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.js-sqlite
|
||||
(:require
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.js-util :refer [is-node?]]
|
||||
[datomish.promise-sqlite :as promise-sqlite]))
|
||||
|
||||
(def open promise-sqlite/open)
|
||||
|
||||
(extend-protocol s/ISQLiteConnectionFactory
|
||||
string
|
||||
(<sqlite-connection [path]
|
||||
(open path))
|
||||
|
||||
object ;; TODO: narrow this to the result of node-tempfile/tempfile.
|
||||
(<sqlite-connection [tempfile]
|
||||
(open (.-name tempfile))))
|
|
@ -1,18 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.node-tempfile
|
||||
(:require
|
||||
[cljs.nodejs :as nodejs]))
|
||||
|
||||
(def tmp (nodejs/require "tmp"))
|
||||
|
||||
(defn tempfile []
|
||||
(.fileSync tmp)) ;; Cleaned up on process exit.
|
|
@ -1,24 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.node-tempfile-macros)
|
||||
|
||||
(defmacro with-tempfile
|
||||
"Uses a tempfile for some content and delete it immediately"
|
||||
[bindings & body]
|
||||
(cond
|
||||
(= (count bindings) 0) `(do ~@body)
|
||||
(symbol? (bindings 0)) `(let ~(subvec bindings 0 2)
|
||||
(try
|
||||
(with-tempfile ~(subvec bindings 2) ~@body)
|
||||
(finally
|
||||
(.removeCallback ~(bindings 0))))) ;; See Node.js tmp module.
|
||||
:else (throw (java.lang.IllegalArgumentException.
|
||||
"with-tempfile only allows Symbols in bindings"))))
|
|
@ -1,44 +0,0 @@
|
|||
;; Copyright 2016 Mozilla
|
||||
;;
|
||||
;; Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
;; this file except in compliance with the License. You may obtain a copy of the
|
||||
;; License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
;; Unless required by applicable law or agreed to in writing, software distributed
|
||||
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
;; specific language governing permissions and limitations under the License.
|
||||
|
||||
(ns datomish.promise-sqlite
|
||||
(:require
|
||||
[datomish.sqlite :as s]
|
||||
[datomish.cljify :refer [cljify]]
|
||||
[cljs-promises.async]
|
||||
[cljs.nodejs :as nodejs]))
|
||||
|
||||
(def sqlite (nodejs/require "promise-sqlite"))
|
||||
|
||||
(defrecord SQLite3Connection [db]
|
||||
s/ISQLiteConnection
|
||||
(-execute!
|
||||
[db sql bindings]
|
||||
(cljs-promises.async/pair-port
|
||||
(.run (.-db db) sql (or (clj->js bindings) #js []))))
|
||||
|
||||
(-each
|
||||
[db sql bindings row-cb]
|
||||
(let [cb (fn [row]
|
||||
(row-cb (cljify row)))]
|
||||
(cljs-promises.async/pair-port
|
||||
(.each (.-db db) sql (or (clj->js bindings) #js []) (when row-cb cb)))))
|
||||
|
||||
(close
|
||||
[db]
|
||||
(cljs-promises.async/pair-port
|
||||
(.close (.-db db)))))
|
||||
|
||||
(defn open
|
||||
[path & {:keys [mode] :or {mode 6}}]
|
||||
(cljs-promises.async/pair-port
|
||||
(->
|
||||
(.open sqlite.DB path (clj->js {:mode mode}))
|
||||
(.then ->SQLite3Connection))))
|
|
@ -1 +0,0 @@
|
|||
{:externs ["externs/datomish.js"]}
|
|
@ -1,13 +0,0 @@
|
|||
var sqlite = {};
|
||||
|
||||
sqlite.DB = {};
|
||||
|
||||
/**
|
||||
* @return {Promise}
|
||||
*/
|
||||
sqlite.DB.open = function (path, options) {};
|
||||
|
||||
var DBVal = {};
|
||||
DBVal.run = function (sql, bindings) {};
|
||||
DBVal.close = function () {};
|
||||
DBVal.each = function (sql, bindings, cb) {};
|
Loading…
Reference in a new issue