summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--caching-tests/test.dbbin0 -> 3072 bytes
-rw-r--r--caching-tests/test.sql11
-rw-r--r--caching-tests/test.ur53
-rw-r--r--caching-tests/test.urp7
-rw-r--r--caching-tests/test.urs6
-rw-r--r--include/urweb/urweb_cpp.h4
-rw-r--r--src/c/urweb.c18
-rw-r--r--src/cjr_print.sml66
-rw-r--r--src/compiler.sig8
-rw-r--r--src/compiler.sml21
-rw-r--r--src/multimap_fn.sml14
-rw-r--r--src/sources5
-rw-r--r--src/sql.sig91
-rw-r--r--src/sql.sml8
-rw-r--r--src/sql_cache.sml186
15 files changed, 473 insertions, 25 deletions
diff --git a/caching-tests/test.db b/caching-tests/test.db
new file mode 100644
index 00000000..190d2868
--- /dev/null
+++ b/caching-tests/test.db
Binary files differ
diff --git a/caching-tests/test.sql b/caching-tests/test.sql
new file mode 100644
index 00000000..862245b7
--- /dev/null
+++ b/caching-tests/test.sql
@@ -0,0 +1,11 @@
+CREATE TABLE uw_Test_foo01(uw_id integer NOT NULL, uw_bar text NOT NULL,
+ PRIMARY KEY (uw_id)
+
+ );
+
+ CREATE TABLE uw_Test_foo10(uw_id integer NOT NULL, uw_bar text NOT NULL,
+ PRIMARY KEY (uw_id)
+
+ );
+
+ \ No newline at end of file
diff --git a/caching-tests/test.ur b/caching-tests/test.ur
new file mode 100644
index 00000000..d13379a8
--- /dev/null
+++ b/caching-tests/test.ur
@@ -0,0 +1,53 @@
+table foo01 : {Id : int, Bar : string} PRIMARY KEY Id
+table foo10 : {Id : int, Bar : string} PRIMARY KEY Id
+
+fun flush01 () : transaction page =
+ dml (INSERT INTO foo01 (Id, Bar) VALUES (42, "baz01"));
+ dml (UPDATE foo01 SET Bar = "baz01" WHERE Id = 42);
+ return <xml><body>
+ Flushed 1!
+ </body></xml>
+
+fun flush10 () : transaction page =
+ dml (UPDATE foo10 SET Bar = "baz10" WHERE Id = 42);
+ return <xml><body>
+ Flushed 2!
+ </body></xml>
+
+fun flush11 () : transaction page =
+ dml (UPDATE foo01 SET Bar = "baz11" WHERE Id = 42);
+ dml (UPDATE foo10 SET Bar = "baz11" WHERE Id = 42);
+ return <xml><body>
+ Flushed 1 and 2!
+ </body></xml>
+
+fun cache01 () : transaction page =
+ res <- oneOrNoRows (SELECT foo01.Bar FROM foo01 WHERE foo01.Id = 42);
+ return <xml><body>
+ Reading 1.
+ {case res of
+ None => <xml></xml>
+ | Some row => <xml>{[row.Foo01.Bar]}</xml>}
+ </body></xml>
+
+fun cache10 () : transaction page =
+ res <- oneOrNoRows (SELECT foo10.Bar FROM foo10 WHERE foo10.Id = 42);
+ return <xml><body>
+ Reading 2.
+ {case res of
+ None => <xml></xml>
+ | Some row => <xml>{[row.Foo10.Bar]}</xml>}
+ </body></xml>
+
+fun cache11 () : transaction page =
+ res <- oneOrNoRows (SELECT foo01.Bar FROM foo01 WHERE foo01.Id = 42);
+ bla <- oneOrNoRows (SELECT foo10.Bar FROM foo10 WHERE foo10.Id = 42);
+ return <xml><body>
+ Reading 1 and 2.
+ {case res of
+ None => <xml></xml>
+ | Some row => <xml>{[row.Foo01.Bar]}</xml>}
+ {case bla of
+ None => <xml></xml>
+ | Some row => <xml>{[row.Foo10.Bar]}</xml>}
+ </body></xml>
diff --git a/caching-tests/test.urp b/caching-tests/test.urp
new file mode 100644
index 00000000..123f58e5
--- /dev/null
+++ b/caching-tests/test.urp
@@ -0,0 +1,7 @@
+database test.db
+sql test.sql
+safeGet Test/flush01
+safeGet Test/flush10
+safeGet Test/flush11
+
+test
diff --git a/caching-tests/test.urs b/caching-tests/test.urs
new file mode 100644
index 00000000..ce7d0350
--- /dev/null
+++ b/caching-tests/test.urs
@@ -0,0 +1,6 @@
+val cache01 : unit -> transaction page
+val cache10 : unit -> transaction page
+val cache11 : unit -> transaction page
+val flush01 : unit -> transaction page
+val flush10 : unit -> transaction page
+val flush11 : unit -> transaction page
diff --git a/include/urweb/urweb_cpp.h b/include/urweb/urweb_cpp.h
index 5a4411e8..ea733c8c 100644
--- a/include/urweb/urweb_cpp.h
+++ b/include/urweb/urweb_cpp.h
@@ -77,6 +77,10 @@ int uw_next_entry(struct uw_context *);
void uw_write(struct uw_context *, const char*);
+// For caching.
+void uw_recordingStart(struct uw_context *);
+char *uw_recordingRead(struct uw_context *);
+
uw_Basis_source uw_Basis_new_client_source(struct uw_context *, uw_Basis_string);
uw_unit uw_Basis_set_client_source(struct uw_context *, uw_Basis_source, uw_Basis_string);
diff --git a/src/c/urweb.c b/src/c/urweb.c
index a1583f0c..78afcd05 100644
--- a/src/c/urweb.c
+++ b/src/c/urweb.c
@@ -476,6 +476,9 @@ struct uw_context {
char *output_buffer;
size_t output_buffer_size;
+
+ // For caching.
+ char *recording;
};
size_t uw_headers_max = SIZE_MAX;
@@ -558,6 +561,8 @@ uw_context uw_init(int id, uw_loggers *lg) {
ctx->output_buffer = malloc(1);
ctx->output_buffer_size = 1;
+ ctx->recording = 0;
+
return ctx;
}
@@ -1661,6 +1666,19 @@ void uw_write(uw_context ctx, const char* s) {
*ctx->page.front = 0;
}
+void uw_recordingStart(uw_context ctx) {
+ // TODO: remove following debug statement.
+ uw_write(ctx, "<!--Recording started here-->");
+ ctx->recording = ctx->page.front;
+}
+
+char *uw_recordingRead(uw_context ctx) {
+ char *recording = strdup(ctx->recording);
+ // TODO: remove following debug statement.
+ uw_write(ctx, "<!--Recording read here-->");
+ return recording;
+}
+
char *uw_Basis_attrifyInt(uw_context ctx, uw_Basis_int n) {
char *result;
int len;
diff --git a/src/cjr_print.sml b/src/cjr_print.sml
index 05dce35e..ecd29f71 100644
--- a/src/cjr_print.sml
+++ b/src/cjr_print.sml
@@ -16,7 +16,7 @@
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
@@ -734,7 +734,7 @@ fun unurlify fromClient env (t, loc) =
string (Int.toString (size has_arg)),
string ", ((*request)[0] == '/' ? ++*request : NULL), ",
newline,
-
+
if unboxable then
unurlify' "(*request)" (#1 t)
else
@@ -914,7 +914,7 @@ fun unurlify fromClient env (t, loc) =
space,
string "4, ((*request)[0] == '/' ? ++*request : NULL), ",
newline,
-
+
string "({",
newline,
p_typ env (t, loc),
@@ -1188,7 +1188,7 @@ fun urlify env t =
string "(ctx,",
space,
string "it",
- string (Int.toString level),
+ string (Int.toString level),
string ");",
newline]
else
@@ -1388,7 +1388,7 @@ fun urlify env t =
string (Int.toString level),
string ");",
newline])
-
+
| _ => (ErrorMsg.errorAt loc "Unable to choose a URL encoding function";
space)
in
@@ -1578,7 +1578,7 @@ and p_exp' par tail env (e, loc) =
newline],
string "tmp;",
newline,
- string "})"]
+ string "})"]
end
| ENone _ => string "NULL"
| ESome (t, e) =>
@@ -2078,7 +2078,7 @@ and p_exp' par tail env (e, loc) =
space,
p_exp' false false (E.pushERel
(E.pushERel env "r" (TRecord rnum, loc))
- "acc" state)
+ "acc" state)
body,
string ";",
newline]
@@ -2102,7 +2102,7 @@ and p_exp' par tail env (e, loc) =
newline,
string "uw_ensure_transaction(ctx);",
newline,
-
+
case prepared of
NONE =>
box [string "char *query = ",
@@ -2187,7 +2187,7 @@ and p_exp' par tail env (e, loc) =
string "uw_ensure_transaction(ctx);",
newline,
newline,
-
+
#dmlPrepared (Settings.currentDbms ()) {loc = loc,
id = id,
dml = dml',
@@ -3378,6 +3378,50 @@ fun p_file env (ds, ps) =
newline,
newline,
+ (* For caching. *)
+ box (List.map
+ (fn index =>
+ let val i = Int.toString index
+ in box [string "static char *cache",
+ string i,
+ string " = NULL;",
+ newline,
+ string "static uw_Basis_bool uw_Cache_check",
+ string i,
+ string "(uw_context ctx) { puts(\"Checked ",
+ string i,
+ string "\"); if (cache",
+ string i,
+ string " == NULL) { uw_recordingStart(ctx); return uw_Basis_False; } else { uw_write(ctx, cache",
+ string i,
+ string "); return uw_Basis_True; } };",
+ newline,
+ string "static uw_unit uw_Cache_store",
+ string i,
+ string "(uw_context ctx) { cache",
+ string i,
+ string " = uw_recordingRead(ctx); puts(\"Stored ",
+ string i,
+ string "\"); return uw_unit_v; };",
+ newline,
+ string "static uw_unit uw_Cache_flush",
+ string i,
+ string "(uw_context ctx) { free(cache",
+ string i,
+ string "); cache",
+ string i,
+ string " = NULL; puts(\"Flushed ",
+ string i,
+ string "\"); return uw_unit_v; };",
+ newline,
+ string "static uw_unit uw_Cache_ready",
+ string i,
+ string "(uw_context ctx) { return uw_unit_v; };",
+ newline,
+ newline]
+ end)
+ (!SqlCache.ffiIndices)),
+ newline,
p_list_sep newline (fn x => x) pds,
newline,
@@ -3433,7 +3477,7 @@ fun p_file env (ds, ps) =
makeChecker ("uw_check_envVar", Settings.getEnvVarRules ()),
newline,
-
+
string "extern void uw_sign(const char *in, char *out);",
newline,
string "extern int uw_hash_blocksize;",
@@ -3480,7 +3524,7 @@ fun p_file env (ds, ps) =
newline,
string ("uw_write_header(ctx, \"Last-modified: " ^ Date.fmt rfcFmt nowD ^ "\\r\\n\");"),
newline,
- string ("uw_write_header(ctx, \"Cache-Control: max-age=31536000, public\\r\\n\");"),
+ string ("uw_write_header(ctx, \"Cache-Control: max-age=31536000, public\\r\\n\");"),
newline,
string "uw_write(ctx, jslib);",
newline,
diff --git a/src/compiler.sig b/src/compiler.sig
index fa131cf4..a0a653a7 100644
--- a/src/compiler.sig
+++ b/src/compiler.sig
@@ -16,7 +16,7 @@
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
@@ -122,6 +122,7 @@ signature COMPILER = sig
val pathcheck : (Mono.file, Mono.file) phase
val sidecheck : (Mono.file, Mono.file) phase
val sigcheck : (Mono.file, Mono.file) phase
+ val sqlCache : (Mono.file, Mono.file) phase
val cjrize : (Mono.file, Cjr.file) phase
val prepare : (Cjr.file, Cjr.file) phase
val checknest : (Cjr.file, Cjr.file) phase
@@ -137,12 +138,12 @@ signature COMPILER = sig
val toCorify : (string, Core.file) transform
val toCore_untangle : (string, Core.file) transform
val toShake1 : (string, Core.file) transform
- val toEspecialize1' : (string, Core.file) transform
+ val toEspecialize1' : (string, Core.file) transform
val toShake1' : (string, Core.file) transform
val toRpcify : (string, Core.file) transform
val toCore_untangle2 : (string, Core.file) transform
val toShake2 : (string, Core.file) transform
- val toEspecialize1 : (string, Core.file) transform
+ val toEspecialize1 : (string, Core.file) transform
val toCore_untangle3 : (string, Core.file) transform
val toShake3 : (string, Core.file) transform
val toTag : (string, Core.file) transform
@@ -186,6 +187,7 @@ signature COMPILER = sig
val toPathcheck : (string, Mono.file) transform
val toSidecheck : (string, Mono.file) transform
val toSigcheck : (string, Mono.file) transform
+ val toSqlCache : (string, Mono.file) transform
val toCjrize : (string, Cjr.file) transform
val toPrepare : (string, Cjr.file) transform
val toChecknest : (string, Cjr.file) transform
diff --git a/src/compiler.sml b/src/compiler.sml
index 269a7824..fd143485 100644
--- a/src/compiler.sml
+++ b/src/compiler.sml
@@ -16,7 +16,7 @@
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
@@ -25,7 +25,7 @@
* POSSIBILITY OF SUCH DAMAGE.
*)
-structure Compiler :> COMPILER = struct
+structure Compiler :> COMPILER = struct
structure UrwebLrVals = UrwebLrValsFn(structure Token = LrParser.Token)
structure Lex = UrwebLexFn(structure Tokens = UrwebLrVals.Tokens)
@@ -268,7 +268,7 @@ val parseUr = {
| _ => absyn
end
handle LrParser.ParseError => [],
- print = SourcePrint.p_file}
+ print = SourcePrint.p_file}
fun p_job ({prefix, database, exe, sql, sources, debug, profile,
timeout, ffi, link, headers, scripts,
@@ -606,7 +606,7 @@ fun parseUrp' accLibs fname =
filterEnv = rev (!env),
sources = sources,
protocol = !protocol,
- dbms = !dbms,
+ dbms = (*!dbms*) SOME "sqlite",
sigFile = !sigFile,
safeGets = rev (!safeGets),
onError = !onError,
@@ -1091,7 +1091,7 @@ val parse = {
ErrorMsg.error ("Rooted module " ^ full ^ " has multiple versions.")
else
();
-
+
makeD true "" pieces
before ignore (foldl (fn (new, path) =>
let
@@ -1439,12 +1439,19 @@ val sigcheck = {
val toSigcheck = transform sigcheck "sigcheck" o toSidecheck
+val sqlCache = {
+ func = SqlCache.go,
+ print = MonoPrint.p_file MonoEnv.empty
+}
+
+val toSqlCache = transform sqlCache "sqlCache" o toSigcheck
+
val cjrize = {
func = Cjrize.cjrize,
print = CjrPrint.p_file CjrEnv.empty
}
-val toCjrize = transform cjrize "cjrize" o toSigcheck
+val toCjrize = transform cjrize "cjrize" o toSqlCache
val prepare = {
func = Prepare.prepare,
@@ -1597,7 +1604,7 @@ fun compile job =
compileC {cname = cname, oname = oname, ename = ename, libs = libs,
profile = #profile job, debug = #debug job, linker = #linker job, link = #link job}
-
+
before cleanup ())
end
handle ex => (((cleanup ()) handle _ => ()); raise ex)
diff --git a/src/multimap_fn.sml b/src/multimap_fn.sml
new file mode 100644
index 00000000..585b741f
--- /dev/null
+++ b/src/multimap_fn.sml
@@ -0,0 +1,14 @@
+functor MultimapFn (structure KeyMap : ORD_MAP structure ValSet : ORD_SET) = struct
+ type key = KeyMap.Key.ord_key
+ type item = ValSet.item
+ type items = ValSet.set
+ type multimap = ValSet.set KeyMap.map
+ fun inserts (kToVs : multimap, k : key, vs : items) : multimap =
+ KeyMap.unionWith ValSet.union (kToVs, KeyMap.singleton (k, vs))
+ fun insert (kToVs : multimap, k : key, v : item) : multimap =
+ inserts (kToVs, k, ValSet.singleton v)
+ fun find (kToVs : multimap, k : key) =
+ case KeyMap.find (kToVs, k) of
+ SOME vs => vs
+ | NONE => ValSet.empty
+end
diff --git a/src/sources b/src/sources
index f75803a3..b468c9a5 100644
--- a/src/sources
+++ b/src/sources
@@ -186,8 +186,13 @@ $(SRC)/mono_shake.sml
$(SRC)/fuse.sig
$(SRC)/fuse.sml
+$(SRC)/sql.sig
$(SRC)/sql.sml
+$(SRC)/multimap_fn.sml
+
+$(SRC)/sql_cache.sml
+
$(SRC)/iflow.sig
$(SRC)/iflow.sml
diff --git a/src/sql.sig b/src/sql.sig
new file mode 100644
index 00000000..573a8baf
--- /dev/null
+++ b/src/sql.sig
@@ -0,0 +1,91 @@
+signature SQL = sig
+
+val debug : bool ref
+
+type lvar = int
+
+datatype func =
+ DtCon0 of string
+ | DtCon1 of string
+ | UnCon of string
+ | Other of string
+
+datatype exp =
+ Const of Prim.t
+ | Var of int
+ | Lvar of lvar
+ | Func of func * exp list
+ | Recd of (string * exp) list
+ | Proj of exp * string
+
+datatype reln =
+ Known
+ | Sql of string
+ | PCon0 of string
+ | PCon1 of string
+ | Eq
+ | Ne
+ | Lt
+ | Le
+ | Gt
+ | Ge
+
+datatype prop =
+ True
+ | False
+ | Unknown
+ | And of prop * prop
+ | Or of prop * prop
+ | Reln of reln * exp list
+ | Cond of exp * prop
+
+datatype chunk =
+ String of string
+ | Exp of Mono.exp
+
+type 'a parser = chunk list -> ('a * chunk list) option
+
+val parse : 'a parser -> Mono.exp -> 'a option
+
+datatype Rel =
+ Exps of exp * exp -> prop
+ | Props of prop * prop -> prop
+
+datatype sqexp =
+ SqConst of Prim.t
+ | SqTrue
+ | SqFalse
+ | SqNot of sqexp
+ | Field of string * string
+ | Computed of string
+ | Binop of Rel * sqexp * sqexp
+ | SqKnown of sqexp
+ | Inj of Mono.exp
+ | SqFunc of string * sqexp
+ | Unmodeled
+ | Null
+
+datatype ('a,'b) sum = inl of 'a | inr of 'b
+
+datatype sitem =
+ SqField of string * string
+ | SqExp of sqexp * string
+
+type query1 = {Select : sitem list,
+ From : (string * string) list,
+ Where : sqexp option}
+
+datatype query =
+ Query1 of query1
+ | Union of query * query
+
+val query : query parser
+
+datatype dml =
+ Insert of string * (string * sqexp) list
+ | Delete of string * sqexp
+ | Update of string * (string * sqexp) list * sqexp
+
+val dml : dml parser
+
+end
diff --git a/src/sql.sml b/src/sql.sml
index c314eb3d..8642c9d2 100644
--- a/src/sql.sml
+++ b/src/sql.sml
@@ -1,4 +1,4 @@
-structure Sql = struct
+structure Sql :> SQL = struct
open Mono
@@ -238,7 +238,7 @@ fun string chs =
end
else
NONE
- | _ => NONE
+ | _ => NONE
val prim =
altL [wrap (follow (wrapP (follow (keep Char.isDigit) (follow (const ".") (keep Char.isDigit)))
@@ -267,7 +267,7 @@ fun sqlify chs =
((PCon (_, PConFfi {mod = "Basis", con = "False", ...}, NONE), _),
(EPrim (Prim.String "FALSE"), _))], _), _) :: chs =>
SOME (e, chs)
-
+
| _ => NONE
fun constK s = wrap (const s) (fn () => s)
@@ -317,7 +317,7 @@ fun sqexp chs =
and known chs = wrap (follow known' (follow (const "(") (follow sqexp (const ")"))))
(fn ((), ((), (e, ()))) => e) chs
-
+
and func chs = wrap (follow funcName (follow (const "(") (follow sqexp (const ")"))))
(fn (f, ((), (e, ()))) => (f, e)) chs
diff --git a/src/sql_cache.sml b/src/sql_cache.sml
new file mode 100644
index 00000000..7f9d98d0
--- /dev/null
+++ b/src/sql_cache.sml
@@ -0,0 +1,186 @@
+structure SqlCache = struct
+
+open Sql
+open Mono
+
+structure IS = IntBinarySet
+structure IM = IntBinaryMap
+structure StringKey = struct type ord_key = string val compare = String.compare end
+structure SS = BinarySetFn (StringKey)
+structure SM = BinaryMapFn (StringKey)
+structure SIMM = MultimapFn (structure KeyMap = SM structure ValSet = IS)
+
+val ffiIndices : int list ref = ref []
+val rs : int list ref = ref []
+val ws : int list ref = ref []
+
+val rec tablesRead =
+ fn Query1 {From=tablePairs, ...} => SS.fromList (map #1 tablePairs)
+ | Union (q1,q2) => SS.union (tablesRead q1, tablesRead q2)
+
+val tableWritten =
+ fn Insert (tab, _) => tab
+ | Delete (tab, _) => tab
+ | Update (tab, _, _) => tab
+
+fun tablesInExp' exp' =
+ let
+ val nothing = {read = SS.empty, written = SS.empty}
+ in
+ case exp' of
+ EQuery {query=e, ...} =>
+ (case parse query e of
+ SOME q => {read = tablesRead q, written = SS.empty}
+ | NONE => nothing)
+ | EDml (e, _) =>
+ (case parse dml e of
+ SOME q => {read = SS.empty, written = SS.singleton (tableWritten q)}
+ | NONE => nothing)
+ | _ => nothing
+ end
+
+val tablesInExp =
+ let
+ fun addTables (exp', {read, written}) =
+ let val {read = r, written = w} = tablesInExp' exp'
+ in {read = SS.union (r, read), written = SS.union (w, written)} end
+ in
+ MonoUtil.Exp.fold {typ = #2, exp = addTables}
+ {read = SS.empty, written = SS.empty}
+ end
+
+fun intExp (n, loc) = (EPrim (Prim.Int (Int64.fromInt n)), loc)
+fun intTyp loc = (TFfi ("Basis", "int"), loc)
+fun boolPat (b, loc) = (PCon (Enum,
+ PConFfi {mod = "Basis", datatyp = "bool", arg = NONE,
+ con = if b then "True" else "False"},
+ NONE),
+ loc)
+fun boolTyp loc = (TFfi ("Basis", "int"), loc)
+
+fun ffiAppExp (module, func, index, loc) =
+ (EFfiApp (module, func ^ Int.toString index, []), loc)
+
+fun sequence (befores, center, afters, loc) =
+ List.foldr (fn (exp, seq) => (ESeq (exp, seq), loc))
+ (List.foldl (fn (exp, seq) => (ESeq (seq, exp), loc))
+ center
+ afters)
+ befores
+
+fun antiguardUnit (cond, exp, loc) =
+ (ECase (cond,
+ [(boolPat (false, loc), exp),
+ (boolPat (true, loc), (ERecord [], loc))],
+ {disc = boolTyp loc, result = (TRecord [], loc)}),
+ loc)
+
+fun underAbs f (exp as (exp', loc)) =
+ case exp' of
+ EAbs (x, y, z, body) => (EAbs (x, y, z, underAbs f body), loc)
+ | _ => f exp
+
+fun addCacheCheck (index, exp) =
+ let
+ fun f (body as (_, loc)) =
+ let
+ val check = ffiAppExp ("Cache", "check", index, loc)
+ val store = ffiAppExp ("Cache", "store", index, loc)
+ in
+ antiguardUnit (check, sequence ([], body, [store], loc), loc)
+ end
+ in
+ underAbs f exp
+ end
+
+fun addCacheFlush (exp, tablesToIndices) =
+ let
+ fun addIndices (table, indices) = IS.union (indices, SIMM.find (tablesToIndices, table))
+ fun f (body as (_, loc)) =
+ let
+ fun mapFfi func = List.map (fn i => ffiAppExp ("Cache", func, i, loc))
+ val flushes =
+ IS.listItems (SS.foldr addIndices IS.empty (#written (tablesInExp body)))
+
+ in
+ sequence (mapFfi "flush" flushes, body, mapFfi "ready" flushes, loc)
+ end
+ in
+ underAbs f exp
+ end
+
+val handlerIndices =
+ let
+ val isUnit =
+ fn (TRecord [], _) => true
+ | _ => false
+ fun maybeAdd (d, soFar as {readers, writers}) =
+ case d of
+ DExport (Link ReadOnly, _, name, typs, typ, _) =>
+ if List.all isUnit (typ::typs)
+ then {readers = IS.add (readers, name), writers = writers}
+ else soFar
+ | DExport (_, _, name, _, _, _) => (* Not read only. *)
+ {readers = readers, writers = IS.add (writers, name)}
+ | _ => soFar
+ in
+ MonoUtil.File.fold {typ = #2, exp = #2, decl = maybeAdd}
+ {readers = IS.empty, writers = IS.empty}
+ end
+
+fun fileFoldMapiSelected f init (file, indices) =
+ let
+ fun doExp (original as ((a, index, b, exp, c), state)) =
+ if IS.member (indices, index)
+ then let val (newExp, newState) = f (index, exp, state)
+ in ((a, index, b, newExp, c), newState) end
+ else original
+ fun doDecl decl state =
+ let
+ val result =
+ case decl of
+ DVal x =>
+ let val (y, newState) = doExp (x, state)
+ in (DVal y, newState) end
+ | DValRec xs =>
+ let val (ys, newState) = ListUtil.foldlMap doExp state xs
+ in (DValRec ys, newState) end
+ | _ => (decl, state)
+ in
+ Search.Continue result
+ end
+ fun nada x y = Search.Continue (x, y)
+ in
+ case MonoUtil.File.mapfold {typ = nada, exp = nada, decl = doDecl} file init of
+ Search.Continue x => x
+ | _ => (file, init) (* Should never happen. *)
+ end
+
+fun fileMapSelected f = #1 o fileFoldMapiSelected (fn (_, x, _) => (f x, ())) ()
+
+val addCacheChecking =
+ let
+ fun f (index, exp, tablesToIndices) =
+ (addCacheCheck (index, exp),
+ SS.foldr (fn (table, tsToIs) => SIMM.insert (tsToIs, table, index))
+ tablesToIndices
+ (#read (tablesInExp exp)))
+ in
+ fileFoldMapiSelected f (SM.empty)
+ end
+
+fun addCacheFlushing (file, tablesToIndices, writers) =
+ fileMapSelected (fn exp => addCacheFlush (exp, tablesToIndices)) (file, writers)
+
+fun go file =
+ let
+ val {readers, writers} = handlerIndices file
+ val (fileWithChecks, tablesToIndices) = addCacheChecking (file, readers)
+ in
+ rs := IS.listItems readers;
+ ws := IS.listItems writers;
+ ffiIndices := IS.listItems readers;
+ addCacheFlushing (fileWithChecks, tablesToIndices, writers)
+ end
+
+end