From b1a6440a3fb285cdfd5301510b96b1ef3b96c050 Mon Sep 17 00:00:00 2001 From: Adam Chlipala Date: Sun, 1 Oct 2017 17:13:17 -0400 Subject: New .urp directives: mimeTypes and long form of file --- src/compiler.sig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'src/compiler.sig') diff --git a/src/compiler.sig b/src/compiler.sig index 952c7070..0ff84f1c 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -62,7 +62,8 @@ signature COMPILER = sig sigFile : string option, safeGets : string list, onError : (string * string list * string) option, - minHeap : int + minHeap : int, + mimeTypes : string option } val compile : string -> bool val compiler : string -> unit -- cgit v1.2.3 From f3373fd5809689bece7fd390f2d737aa0b43f594 Mon Sep 17 00:00:00 2001 From: Adam Chlipala Date: Sun, 3 Jun 2018 15:01:24 -0400 Subject: 'filecache' .urp directive, fixing a longstanding MonoUtil bug in the process --- doc/manual.tex | 1 + include/urweb/types_cpp.h | 1 + include/urweb/urweb_cpp.h | 4 + src/c/urweb.c | 126 ++++++++++++++++++++++++- src/cjr_print.sml | 24 ++++- src/compiler.sig | 3 + src/compiler.sml | 19 +++- src/demo.sml | 1 + src/filecache.sig | 35 +++++++ src/filecache.sml | 230 ++++++++++++++++++++++++++++++++++++++++++++++ src/mono_util.sml | 6 +- src/mysql.sml | 3 +- src/postgres.sml | 3 +- src/settings.sig | 6 +- src/settings.sml | 10 +- src/sources | 3 + src/sqlite.sml | 3 +- tests/dbupload.urp | 1 + tests/dbuploadOpt.ur | 27 ++++++ tests/dbuploadOpt.urp | 7 ++ 20 files changed, 501 insertions(+), 12 deletions(-) create mode 100644 src/filecache.sig create mode 100644 src/filecache.sml create mode 100644 tests/dbuploadOpt.ur create mode 100644 tests/dbuploadOpt.urp (limited to 'src/compiler.sig') diff --git a/doc/manual.tex b/doc/manual.tex index 985dab5b..857539db 100644 --- a/doc/manual.tex +++ b/doc/manual.tex @@ -151,6 +151,7 @@ Here is the complete list of directive forms. ``FFI'' stands for ``foreign func \item \texttt{exe FILENAME} sets the filename to which to write the output executable. The default for file \texttt{P.urp} is \texttt{P.exe}. \item \texttt{file URI FILENAME} asks for the application executable to respond to requests for \texttt{URI} by serving a snapshot of the contents of \texttt{FILENAME} as of compile time. That is, the file contents are baked into the executable. System file \texttt{/etc/mime.types} is consulted (again, at compile time) to figure out the right MIME type to suggest in the HTTP response. \item \texttt{file URI FILENAME MIME-TYPE} works like the simpler form of \texttt{file}, but the proper MIME type for the file is given directly. +\item \texttt{filecache PATH} sets a path to a directory to use for caching files stored in the SQL database. It can be expensive to schlep files back and forth between the database and an Ur/Web application, since database engines don't tend to be optimized for transferring large files. Ur/Web will still store the files in the database, as the ``version of record'' for your whole, consistent data set, but the application will try to query the database only in terms of cryptographic hashes, from which files can be retrieved from the cache. (This feature is currently only available for PostgreSQL, with the module \texttt{pgcrypto} installed, to drive SHA512 hashing. It would defeat the purpose to run the hashing operation in the application rather than the database engine!) \item \texttt{ffi FILENAME} reads the file \texttt{FILENAME.urs} to determine the interface to a new FFI module. The name of the module is calculated from \texttt{FILENAME} in the same way as for normal source files. See the files \texttt{include/urweb/urweb\_cpp.h} and \texttt{src/c/urweb.c} for examples of C headers and implementations for FFI modules. In general, every type or value \texttt{Module.ident} becomes \texttt{uw\_Module\_ident} in C. \item \texttt{html5} asks to generate HTML5 code, which primarily affects the first few lines of the output documents, like the \texttt{DOCTYPE}. This option is on by default. \item \texttt{include FILENAME} adds \texttt{FILENAME} to the list of files to be \texttt{\#include}d in C sources. This is most useful for interfacing with new FFI modules. diff --git a/include/urweb/types_cpp.h b/include/urweb/types_cpp.h index 2fa473ac..0c546d1c 100644 --- a/include/urweb/types_cpp.h +++ b/include/urweb/types_cpp.h @@ -105,6 +105,7 @@ typedef struct { uw_Basis_string time_format; int is_html5; + char *file_cache; } uw_app; typedef struct { diff --git a/include/urweb/urweb_cpp.h b/include/urweb/urweb_cpp.h index 2c60a781..5f1144b8 100644 --- a/include/urweb/urweb_cpp.h +++ b/include/urweb/urweb_cpp.h @@ -430,4 +430,8 @@ void *uw_Sqlcache_flush(struct uw_context *, uw_Sqlcache_Cache *, char **); int strcmp_nullsafe(const char *, const char *); +uw_unit uw_Basis_cache_file(struct uw_context *, uw_Basis_blob contents); +uw_Basis_blob uw_Basis_check_filecache(struct uw_context *, uw_Basis_string hash); +uw_Basis_bool uw_Basis_filecache_missed(struct uw_context *); + #endif diff --git a/src/c/urweb.c b/src/c/urweb.c index 283efcdd..e7efae38 100644 --- a/src/c/urweb.c +++ b/src/c/urweb.c @@ -13,8 +13,8 @@ #include #include #include -#include #include +#include #include #include @@ -514,6 +514,11 @@ struct uw_context { uw_Sqlcache_Unlock *cacheUnlock; int remoteSock; + + int file_cache_missed; + // Set if we are recovering from a miss in the file cache in handling an SQL + // query that only returns hashes of files. If so, this time around we will + // run queries to return actual file contents instead. }; size_t uw_headers_max = SIZE_MAX; @@ -608,6 +613,8 @@ uw_context uw_init(int id, uw_loggers *lg) { ctx->cacheUnlock = NULL; + ctx->file_cache_missed = 0; + return ctx; } @@ -3643,6 +3650,8 @@ int uw_commit(uw_context ctx) { } } + ctx->file_cache_missed = 0; + return 0; } @@ -5058,3 +5067,118 @@ int strcmp_nullsafe(const char *str1, const char *str2) { else return 1; } + +static int is_valid_hash(uw_Basis_string hash) { + for (; *hash; ++hash) + if (!isxdigit(*hash)) + return 0; + + return 1; +} + +uw_unit uw_Basis_cache_file(uw_context ctx, uw_Basis_blob contents) { + char *dir = ctx->app->file_cache, path[1024], tempfile[1024]; + unsigned char *res, *hash; + char *hash_encoded; + int fd, len, i; + ssize_t written_so_far = 0; + + if (!dir) + return uw_unit_v; + + hash = uw_malloc(ctx, SHA512_DIGEST_LENGTH); + res = SHA512((unsigned char *)contents.data, contents.size, hash); + if (!res) + uw_error(ctx, FATAL, "Can't hash file contents"); + + hash_encoded = uw_malloc(ctx, SHA512_DIGEST_LENGTH * 2 + 1); + for (i = 0; i < SHA512_DIGEST_LENGTH; ++i) + sprintf(hash_encoded + 2 * i, "%02x", (int)hash[i]); + hash_encoded[SHA512_DIGEST_LENGTH * 2] = 0; + + len = snprintf(tempfile, sizeof tempfile, "%s/tmpXXXXXX", dir); + if (len < 0 || len >= sizeof tempfile) + uw_error(ctx, FATAL, "Error assembling file path for cache (temporary)"); + + fd = mkstemp(tempfile); + if (fd < 0) + uw_error(ctx, FATAL, "Error creating temporary file for cache"); + + while (written_so_far < contents.size) { + ssize_t written_just_now = write(fd, contents.data + written_so_far, contents.size - written_so_far); + if (written_just_now <= 0) { + close(fd); + uw_error(ctx, FATAL, "Error writing all bytes to cached file"); + } + written_so_far += written_just_now; + } + + close(fd); + + len = snprintf(path, sizeof path, "%s/%s", dir, hash_encoded); + if (len < 0 || len >= sizeof path) + uw_error(ctx, FATAL, "Error assembling file path for cache"); + + if (rename(tempfile, path)) + uw_error(ctx, FATAL, "Error renaming temporary file into cache"); + + return uw_unit_v; +} + +uw_Basis_blob uw_Basis_check_filecache(uw_context ctx, uw_Basis_string hash) { + char path[1024], *dir = ctx->app->file_cache, *filedata; + int len; + long size, read_so_far = 0; + FILE *fp; + uw_Basis_blob res; + + // Hashes come formatted for printing by Postgres, which means they start with + // two extra characters. Let's remove them. + if (!hash[0] || !hash[1]) + uw_error(ctx, FATAL, "Hash to check against file cache came in not in Postgres format: %s", hash); + hash += 2; + + if (!dir) + uw_error(ctx, FATAL, "Checking file cache when no directory is set"); + + if (!is_valid_hash(hash)) + uw_error(ctx, FATAL, "Checking file cache with invalid hash %s", hash); + + len = snprintf(path, sizeof path, "%s/%s", dir, hash); + if (len < 0 || len >= sizeof path) + uw_error(ctx, FATAL, "Error assembling file path for cache"); + + fp = fopen(path, "r"); + if (!fp) { + ctx->file_cache_missed = 1; + uw_error(ctx, UNLIMITED_RETRY, "Missed in the file cache for hash %s", hash); + } + uw_push_cleanup(ctx, (void (*)(void *))fclose, fp); + + if (fseek(fp, 0L, SEEK_END)) + uw_error(ctx, FATAL, "Error seeking to end of cached file"); + + size = ftell(fp); + if (size < 0) + uw_error(ctx, FATAL, "Error getting size of cached file"); + + rewind(fp); + filedata = uw_malloc(ctx, size); + + while (read_so_far < size) { + size_t just_read = fread(filedata + read_so_far, 1, size - read_so_far, fp); + if (just_read <= 0) + uw_error(ctx, FATAL, "Error reading all bytes of cached file"); + read_so_far += just_read; + } + + uw_pop_cleanup(ctx); + + res.size = size; + res.data = filedata; + return res; +} + +uw_Basis_bool uw_Basis_filecache_missed(uw_context ctx) { + return !!(ctx->file_cache_missed); +} diff --git a/src/cjr_print.sml b/src/cjr_print.sml index 43265fb8..c83da031 100644 --- a/src/cjr_print.sml +++ b/src/cjr_print.sml @@ -2189,6 +2189,25 @@ and p_exp' par tail env (e, loc) = string ";"]) inputs, newline, + case Settings.getFileCache () of + NONE => box [] + | SOME _ => + p_list_sepi newline + (fn i => fn (_, t) => + case t of + Settings.Blob => + box [string "uw_Basis_cache_file(ctx, arg", + string (Int.toString (i + 1)), + string ");"] + | Settings.Nullable Settings.Blob => + box [string "if (arg", + string (Int.toString (i + 1)), + string ") uw_Basis_cache_file(ctx, arg", + string (Int.toString (i + 1)), + string ");"] + | _ => box []) + inputs, + newline, string "uw_ensure_transaction(ctx);", newline, newline, @@ -3677,7 +3696,10 @@ fun p_file env (ds, ps) = "uw_input_num", "uw_cookie_sig", "uw_check_url", "uw_check_mime", "uw_check_requestHeader", "uw_check_responseHeader", "uw_check_envVar", "uw_check_meta", case onError of NONE => "NULL" | SOME _ => "uw_onError", "my_periodics", "\"" ^ Prim.toCString (Settings.getTimeFormat ()) ^ "\"", - if Settings.getIsHtml5 () then "1" else "0"], + if Settings.getIsHtml5 () then "1" else "0", + (case Settings.getFileCache () of + NONE => "NULL" + | SOME s => "\"" ^ Prim.toCString s ^ "\"")], string "};", newline] end diff --git a/src/compiler.sig b/src/compiler.sig index 0ff84f1c..bcf69fd4 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -60,6 +60,7 @@ signature COMPILER = sig protocol : string option, dbms : string option, sigFile : string option, + fileCache : string option, safeGets : string list, onError : (string * string list * string) option, minHeap : int, @@ -125,6 +126,7 @@ signature COMPILER = sig val pathcheck : (Mono.file, Mono.file) phase val sidecheck : (Mono.file, Mono.file) phase val sigcheck : (Mono.file, Mono.file) phase + val filecache : (Mono.file, Mono.file) phase val sqlcache : (Mono.file, Mono.file) phase val cjrize : (Mono.file, Cjr.file) phase val prepare : (Cjr.file, Cjr.file) phase @@ -191,6 +193,7 @@ signature COMPILER = sig val toPathcheck : (string, Mono.file) transform val toSidecheck : (string, Mono.file) transform val toSigcheck : (string, Mono.file) transform + val toFilecache : (string, Mono.file) transform val toSqlcache : (string, Mono.file) transform val toCjrize : (string, Cjr.file) transform val toPrepare : (string, Cjr.file) transform diff --git a/src/compiler.sml b/src/compiler.sml index 3fb0b767..f724bf56 100644 --- a/src/compiler.sml +++ b/src/compiler.sml @@ -64,6 +64,7 @@ type job = { protocol : string option, dbms : string option, sigFile : string option, + fileCache : string option, safeGets : string list, onError : (string * string list * string) option, minHeap : int, @@ -388,6 +389,7 @@ fun institutionalizeJob (job : job) = Settings.setOnError (#onError job); Settings.setMinHeap (#minHeap job); Settings.setSigFile (#sigFile job); + Settings.setFileCache (#fileCache job); Settings.setMimeFilePath (Option.getOpt (#mimeTypes job, "/etc/mime.types"))) datatype commentableLine = @@ -467,6 +469,7 @@ fun parseUrp' accLibs fname = protocol = NONE, dbms = NONE, sigFile = NONE, + fileCache = NONE, safeGets = [], onError = NONE, minHeap = 0, @@ -601,6 +604,7 @@ fun parseUrp' accLibs fname = val protocol = ref NONE val dbms = ref NONE val sigFile = ref (Settings.getSigFile ()) + val fileCache = ref (Settings.getFileCache ()) val safeGets = ref [] val onError = ref NONE val minHeap = ref 0 @@ -640,6 +644,7 @@ fun parseUrp' accLibs fname = protocol = !protocol, dbms = !dbms, sigFile = !sigFile, + fileCache = !fileCache, safeGets = rev (!safeGets), onError = !onError, minHeap = !minHeap, @@ -702,6 +707,7 @@ fun parseUrp' accLibs fname = protocol = mergeO #2 (#protocol old, #protocol new), dbms = mergeO #2 (#dbms old, #dbms new), sigFile = mergeO #2 (#sigFile old, #sigFile new), + fileCache = mergeO #2 (#fileCache old, #fileCache new), safeGets = #safeGets old @ #safeGets new, onError = mergeO #2 (#onError old, #onError new), minHeap = Int.max (#minHeap old, #minHeap new), @@ -790,6 +796,10 @@ fun parseUrp' accLibs fname = (case !sigFile of NONE => sigFile := SOME arg | SOME _ => ()) + | "filecache" => + (case !fileCache of + NONE => fileCache := SOME arg + | SOME _ => ()) | "exe" => (case !exe of NONE => exe := SOME (relify arg) @@ -1513,6 +1523,13 @@ val sigcheck = { val toSigcheck = transform sigcheck "sigcheck" o toSidecheck +val filecache = { + func = FileCache.instrument, + print = MonoPrint.p_file MonoEnv.empty +} + +val toFilecache = transform filecache "filecache" o toSigcheck + val sqlcache = { func = (fn file => if Settings.getSqlcache () @@ -1521,7 +1538,7 @@ val sqlcache = { print = MonoPrint.p_file MonoEnv.empty } -val toSqlcache = transform sqlcache "sqlcache" o toSigcheck +val toSqlcache = transform sqlcache "sqlcache" o toFilecache val cjrize = { func = Cjrize.cjrize, diff --git a/src/demo.sml b/src/demo.sml index a682d28d..1e58e2f8 100644 --- a/src/demo.sml +++ b/src/demo.sml @@ -123,6 +123,7 @@ fun make' {prefix, dirname, guided} = protocol = mergeWith #2 (#protocol combined, #protocol urp), dbms = mergeWith #2 (#dbms combined, #dbms urp), sigFile = mergeWith #2 (#sigFile combined, #sigFile urp), + fileCache = mergeWith #2 (#fileCache combined, #fileCache urp), safeGets = #safeGets combined @ #safeGets urp, onError = NONE, minHeap = 0, diff --git a/src/filecache.sig b/src/filecache.sig new file mode 100644 index 00000000..db57135f --- /dev/null +++ b/src/filecache.sig @@ -0,0 +1,35 @@ +(* Copyright (c) 2013, Adam Chlipala + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * - The names of contributors may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *) + +(* Instrument to check a cache in the file system, to reconsitute blobs without + * silly shipping over an SQL connection. *) + +signature FILE_CACHE = sig + + val instrument : Mono.file -> Mono.file + +end diff --git a/src/filecache.sml b/src/filecache.sml new file mode 100644 index 00000000..e2291c10 --- /dev/null +++ b/src/filecache.sml @@ -0,0 +1,230 @@ +(* Copyright (c) 2013, Adam Chlipala + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * - The names of contributors may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *) + +structure FileCache :> FILE_CACHE = struct + +open Mono + +structure SS = BinarySetFn(struct + type ord_key = string + val compare = String.compare + end) + +val hasBlob = + MonoUtil.Typ.exists (fn TFfi ("Basis", "blob") => true + | _ => false) + +val unBlob = + MonoUtil.Typ.map (fn TFfi ("Basis", "blob") => TFfi ("Basis", "string") + | t => t) + +fun nodups (exps : (string * typ) list, tables : (string * (string * typ) list) list) = + let + val cols = map #1 exps @ ListUtil.mapConcat (map #1 o #2) tables + + val (_, good) = + foldl (fn (name, (names, good)) => + if SS.member(names, name) then + (names, false) + else + (SS.add (names, name), good)) (SS.empty, true) cols + in + good + end + +fun instrument file = + let + fun exp e = + case e of + EQuery {exps, tables, state, query, body, initial} => + if (List.exists (hasBlob o #2) exps + orelse List.exists (List.exists (hasBlob o #2) o #2) tables) + andalso nodups (exps, tables) then + let + val exps = ListMergeSort.sort + (fn ((x, _), (y, _)) => String.compare (x, y) = GREATER) + exps + val tables = ListMergeSort.sort + (fn ((x, _), (y, _)) => String.compare (x, y) = GREATER) + tables + val tables = map (fn (x, xts) => + (x, ListMergeSort.sort + (fn ((x, _), (y, _)) => String.compare (x, y) = GREATER) + xts)) tables + + val loc = #2 query + + fun wrapCol (name, t) = + case #1 t of + TFfi ("Basis", "blob") => + "DIGEST(" ^ name ^ ", 'sha512')" + | TOption t' => wrapCol (name, t') + | _ => name + + val mangle = Settings.mangleSql + + val cols = map (fn (name, t) => (mangle name, t)) exps + @ ListUtil.mapConcat (fn (_, cols) => + map (fn (name, t) => + (mangle name, + t)) cols) tables + + val prequery = + "SELECT " + ^ String.concatWith ", " (map wrapCol cols) + ^ " FROM (" + + val postquery = + ") AS Wrap" + + val wrapped_query = + (EStrcat ((EPrim (Prim.String (Prim.Normal, prequery)), loc), + (EStrcat (query, + (EPrim (Prim.String (Prim.Normal, postquery)), loc)), loc)), loc) + val wrapped_query = MonoOpt.optExp wrapped_query + + val exps' = map (fn (name, t) => (name, unBlob t)) exps + val tables' = map (fn (name, cols) => + (name, + map (fn (cname, t) => (cname, unBlob t)) cols)) tables + + val blob = (TFfi ("Basis", "blob"), loc) + val string = (TFfi ("Basis", "string"), loc) + + fun trycache (name, e, t : typ) = + (name, + case #1 t of + TFfi ("Basis", "blob") => + (EFfiApp ("Basis", + "check_filecache", + [(e, string)]), loc) + | TOption (TFfi ("Basis", "blob"), _) => + (ECase (e, + [((PNone string, loc), + (ENone blob, loc)), + ((PSome (string, (PVar ("hash", string), loc)), loc), + (ESome (blob, + (EFfiApp ("Basis", + "check_filecache", + [((ERel 0, loc), string)]), loc)), loc))], + {disc = (TOption string, loc), + result = (TOption blob, loc)}), loc) + | _ => e, + t) + + val wrapped_body_trycache = + (ELet ("uncached", + (TRecord (exps @ map (fn (name, cols) => + (name, (TRecord cols, loc))) tables), + loc), + (ERecord (map (fn (name, t) => + trycache (name, + (EField ((ERel 1, loc), + name), loc), + t)) exps + @ map (fn (tname, cols) => + (tname, + (ERecord (map (fn (name, t) => + trycache (name, + (EField ((EField ((ERel 1, loc), tname), loc), name), loc), + t)) cols), loc), + (TRecord cols, loc))) tables), loc), + MonoEnv.subExpInExp (2, (ERel 0, loc)) + + + (MonoEnv.liftExpInExp 0 body)), loc) + + fun maybeadd (e, t, acc) = + case #1 t of + TFfi ("Basis", "blob") => + (ESeq ((EFfiApp ("Basis", + "cache_file", + [(e, blob)]), loc), + acc), loc) + | TOption (TFfi ("Basis", "blob"), _) => + (ESeq ((ECase (e, + [((PNone blob, loc), + (ERecord [], loc)), + ((PSome (blob, (PVar ("blob", blob), loc)), loc), + (EFfiApp ("Basis", + "cache_file", + [((ERel 0, loc), blob)]), loc))], + {disc = t, + result = (TRecord [], loc)}), loc), + acc), loc) + | _ => acc + + val wrapped_body_addtocache = + foldl (fn ((name, t), e) => + maybeadd ((EField ((ERel 1, loc), name), loc), + t, e)) + (foldl (fn ((tname, cols), e) => + foldl (fn ((name, t), e) => + maybeadd ((EField ((EField ((ERel 1, loc), tname), loc), name), loc), + t, e)) e cols) body tables) + exps + in + ECase ((EFfiApp ("Basis", "filecache_missed", []), loc), + [((PCon (Enum, + PConFfi {mod = "Basis", + datatyp = "bool", + con = "False", + arg = NONE}, + NONE), loc), + (EQuery {exps = exps', + tables = tables', + state = state, + query = wrapped_query, + body = wrapped_body_trycache, + initial = initial}, loc)), + ((PCon (Enum, + PConFfi {mod = "Basis", + datatyp = "bool", + con = "True", + arg = NONE}, + NONE), loc), + (EQuery {exps = exps, + tables = tables, + state = state, + query = query, + body = wrapped_body_addtocache, + initial = initial}, loc))], + {disc = (TFfi ("Basis", "bool"), loc), + result = state}) + end + else + e + | _ => e + in + case Settings.getFileCache () of + NONE => file + | SOME _ => MonoUtil.File.map {typ = fn t => t, + exp = exp, + decl = fn d => d} file + end + +end diff --git a/src/mono_util.sml b/src/mono_util.sml index fc1a2bcb..fdf48d20 100644 --- a/src/mono_util.sml +++ b/src/mono_util.sml @@ -107,16 +107,16 @@ fun mapfold fc = | TOption t => S.map2 (mft t, fn t' => - (TOption t, loc)) + (TOption t', loc)) | TList t => S.map2 (mft t, fn t' => - (TList t, loc)) + (TList t', loc)) | TSource => S.return2 cAll | TSignal t => S.map2 (mft t, fn t' => - (TSignal t, loc)) + (TSignal t', loc)) in mft end diff --git a/src/mysql.sml b/src/mysql.sml index 52e4921e..e7cad84e 100644 --- a/src/mysql.sml +++ b/src/mysql.sml @@ -1609,6 +1609,7 @@ val () = addDbms {name = "mysql", onlyUnion = true, nestedRelops = false, windowFunctions = false, - supportsIsDistinctFrom = true} + supportsIsDistinctFrom = true, + supportsSHA512 = false} end diff --git a/src/postgres.sml b/src/postgres.sml index fac913f0..2b6bee8c 100644 --- a/src/postgres.sml +++ b/src/postgres.sml @@ -1153,7 +1153,8 @@ val () = addDbms {name = "postgres", onlyUnion = false, nestedRelops = true, windowFunctions = true, - supportsIsDistinctFrom = true} + supportsIsDistinctFrom = true, + supportsSHA512 = true} val () = setDbms "postgres" diff --git a/src/settings.sig b/src/settings.sig index 729218ac..986d6ed7 100644 --- a/src/settings.sig +++ b/src/settings.sig @@ -219,7 +219,8 @@ signature SETTINGS = sig onlyUnion : bool, nestedRelops : bool, windowFunctions : bool, - supportsIsDistinctFrom : bool + supportsIsDistinctFrom : bool, + supportsSHA512 : bool } val addDbms : dbms -> unit @@ -253,6 +254,9 @@ signature SETTINGS = sig val setSigFile : string option -> unit val getSigFile : unit -> string option + val setFileCache : string option -> unit + val getFileCache : unit -> string option + (* Which GET-able functions should be allowed to have side effects? *) val setSafeGets : string list -> unit val isSafeGet : string -> bool diff --git a/src/settings.sml b/src/settings.sml index 9e6d3e76..47a88932 100644 --- a/src/settings.sml +++ b/src/settings.sml @@ -646,7 +646,8 @@ type dbms = { onlyUnion : bool, nestedRelops : bool, windowFunctions: bool, - supportsIsDistinctFrom : bool + supportsIsDistinctFrom : bool, + supportsSHA512 : bool } val dbmses = ref ([] : dbms list) @@ -679,7 +680,8 @@ val curDb = ref ({name = "", onlyUnion = false, nestedRelops = false, windowFunctions = false, - supportsIsDistinctFrom = false} : dbms) + supportsIsDistinctFrom = false, + supportsSHA512 = false} : dbms) fun addDbms v = dbmses := v :: !dbmses fun setDbms s = @@ -724,6 +726,10 @@ val sigFile = ref (NONE : string option) fun setSigFile v = sigFile := v fun getSigFile () = !sigFile +val fileCache = ref (NONE : string option) +fun setFileCache v = fileCache := v +fun getFileCache () = !fileCache + structure SS = BinarySetFn(struct type ord_key = string val compare = String.compare diff --git a/src/sources b/src/sources index 52b1bdd7..5c0b2a84 100644 --- a/src/sources +++ b/src/sources @@ -231,6 +231,9 @@ $(SRC)/sidecheck.sml $(SRC)/sigcheck.sig $(SRC)/sigcheck.sml +$(SRC)/filecache.sig +$(SRC)/filecache.sml + $(SRC)/mono_inline.sml $(SRC)/sha1.sig diff --git a/src/sqlite.sml b/src/sqlite.sml index 0acd866b..db7052d1 100644 --- a/src/sqlite.sml +++ b/src/sqlite.sml @@ -855,6 +855,7 @@ val () = addDbms {name = "sqlite", onlyUnion = false, nestedRelops = false, windowFunctions = false, - supportsIsDistinctFrom = false} + supportsIsDistinctFrom = false, + supportsSHA512 = false} end diff --git a/tests/dbupload.urp b/tests/dbupload.urp index dd8417d1..daa68e2c 100644 --- a/tests/dbupload.urp +++ b/tests/dbupload.urp @@ -2,5 +2,6 @@ database dbname=dbupload sql dbupload.sql allow mime * rewrite all Dbupload/* +filecache /tmp/files dbupload diff --git a/tests/dbuploadOpt.ur b/tests/dbuploadOpt.ur new file mode 100644 index 00000000..466b49f3 --- /dev/null +++ b/tests/dbuploadOpt.ur @@ -0,0 +1,27 @@ +table t : { Id : int, Blob : option blob, MimeType : string } +sequence s + +fun getImage id : transaction page = + r <- oneRow1 (SELECT t.Blob, t.MimeType + FROM t + WHERE t.Id = {[id]}); + case r.Blob of + None => error Oh no! + | Some blob => returnBlob blob (blessMime r.MimeType) + +fun main () : transaction page = + let + fun handle r = + id <- nextval s; + dml (INSERT INTO t (Id, Blob, MimeType) + VALUES ({[id]}, {[if fileMimeType r.File = "image/jpeg" then Some (fileData r.File) else None]}, {[fileMimeType r.File]})); + main () + in + x <- queryX1 (SELECT t.Id FROM t) + (fn r =>
); + return +
+
+ {x} +
+ end diff --git a/tests/dbuploadOpt.urp b/tests/dbuploadOpt.urp new file mode 100644 index 00000000..816bcea1 --- /dev/null +++ b/tests/dbuploadOpt.urp @@ -0,0 +1,7 @@ +database dbname=dbuploadOpt +sql dbuploadOpt.sql +allow mime * +rewrite all DbuploadOpt/* +filecache /tmp/files + +dbuploadOpt -- cgit v1.2.3 From c1932084390aca19c748da024b7b168c160a3aea Mon Sep 17 00:00:00 2001 From: Adam Chlipala Date: Fri, 14 Dec 2018 15:42:59 -0500 Subject: New .urp option: safeGetDefault --- doc/manual.tex | 1 + src/compiler.sig | 1 + src/compiler.sml | 7 +++++++ src/demo.sml | 1 + src/settings.sig | 1 + src/settings.sml | 4 +++- 6 files changed, 14 insertions(+), 1 deletion(-) (limited to 'src/compiler.sig') diff --git a/doc/manual.tex b/doc/manual.tex index d2db4816..e064e59e 100644 --- a/doc/manual.tex +++ b/doc/manual.tex @@ -190,6 +190,7 @@ Here is the complete list of directive forms. ``FFI'' stands for ``foreign func \item \texttt{profile} generates an executable that may be used with gprof. \item \texttt{rewrite KIND FROM TO} gives a rule for rewriting canonical module paths. For instance, the canonical path of a page may be \texttt{Mod1.Mod2.mypage}, while you would rather the page were accessed via a URL containing only \texttt{page}. The directive \texttt{rewrite url Mod1/Mod2/mypage page} would accomplish that. The possible values of \texttt{KIND} determine which kinds of objects are affected. The kind \texttt{all} matches any object, and \texttt{url} matches page URLs. The kinds \texttt{table}, \texttt{sequence}, and \texttt{view} match those sorts of SQL entities, and \texttt{relation} matches any of those three. \texttt{cookie} matches HTTP cookies, and \texttt{style} matches CSS class names. If \texttt{FROM} ends in \texttt{/*}, it is interpreted as a prefix matching rule, and rewriting occurs by replacing only the appropriate prefix of a path with \texttt{TO}. The \texttt{TO} field may be left empty to express the idea of deleting a prefix. For instance, \texttt{rewrite url Main/*} will strip all \texttt{Main/} prefixes from URLs. While the actual external names of relations and styles have parts separated by underscores instead of slashes, all rewrite rules must be written in terms of slashes. An optional suffix of \cd{[-]} for a \cd{rewrite} directive asks to additionally replace all \cd{\_} characters with \cd{-} characters, which can be handy for, e.g., interfacing with an off-the-shelf CSS library that prefers hyphens over underscores. \item \texttt{safeGet URI} asks to allow the page handler assigned this canonical URI prefix to cause persistent side effects, even if accessed via an HTTP \cd{GET} request. +\item \texttt{safeGetDefault} asks to allow \emph{any} page handler to cause side effects, even if accessed via an HTTP \cd{GET} request. \item \texttt{script URL} adds \texttt{URL} to the list of extra JavaScript files to be included at the beginning of any page that uses JavaScript. This is most useful for importing JavaScript versions of functions found in new FFI modules. \item \texttt{serverOnly Module.ident} registers an FFI function or transaction that may only be run on the server. \item \texttt{sigfile PATH} sets a path where your application should look for a key to use in cryptographic signing. This is used to prevent cross-site request forgery attacks for any form handler that both reads a cookie and creates side effects. If the referenced file doesn't exist, an application will create it and read its saved data on future invocations. You can also initialize the file manually with any contents at least 16 bytes long; the first 16 bytes will be treated as the key. diff --git a/src/compiler.sig b/src/compiler.sig index bcf69fd4..7922393d 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -61,6 +61,7 @@ signature COMPILER = sig dbms : string option, sigFile : string option, fileCache : string option, + safeGetDefault : bool, safeGets : string list, onError : (string * string list * string) option, minHeap : int, diff --git a/src/compiler.sml b/src/compiler.sml index f724bf56..271cf2f1 100644 --- a/src/compiler.sml +++ b/src/compiler.sml @@ -65,6 +65,7 @@ type job = { dbms : string option, sigFile : string option, fileCache : string option, + safeGetDefault : bool, safeGets : string list, onError : (string * string list * string) option, minHeap : int, @@ -385,6 +386,7 @@ fun institutionalizeJob (job : job) = Settings.setMetaRules (#filterMeta job); Option.app Settings.setProtocol (#protocol job); Option.app Settings.setDbms (#dbms job); + Settings.setSafeGetDefault (#safeGetDefault job); Settings.setSafeGets (#safeGets job); Settings.setOnError (#onError job); Settings.setMinHeap (#minHeap job); @@ -470,6 +472,7 @@ fun parseUrp' accLibs fname = dbms = NONE, sigFile = NONE, fileCache = NONE, + safeGetDefault = false, safeGets = [], onError = NONE, minHeap = 0, @@ -605,6 +608,7 @@ fun parseUrp' accLibs fname = val dbms = ref NONE val sigFile = ref (Settings.getSigFile ()) val fileCache = ref (Settings.getFileCache ()) + val safeGetDefault = ref false val safeGets = ref [] val onError = ref NONE val minHeap = ref 0 @@ -645,6 +649,7 @@ fun parseUrp' accLibs fname = dbms = !dbms, sigFile = !sigFile, fileCache = !fileCache, + safeGetDefault = !safeGetDefault, safeGets = rev (!safeGets), onError = !onError, minHeap = !minHeap, @@ -708,6 +713,7 @@ fun parseUrp' accLibs fname = dbms = mergeO #2 (#dbms old, #dbms new), sigFile = mergeO #2 (#sigFile old, #sigFile new), fileCache = mergeO #2 (#fileCache old, #fileCache new), + safeGetDefault = #safeGetDefault old orelse #safeGetDefault new, safeGets = #safeGets old @ #safeGets new, onError = mergeO #2 (#onError old, #onError new), minHeap = Int.max (#minHeap old, #minHeap new), @@ -829,6 +835,7 @@ fun parseUrp' accLibs fname = | "include" => headers := relifyA arg :: !headers | "script" => scripts := arg :: !scripts | "clientToServer" => clientToServer := ffiS () :: !clientToServer + | "safeGetDefault" => safeGetDefault := true | "safeGet" => safeGets := arg :: !safeGets | "effectful" => effectful := ffiS () :: !effectful | "benignEffectful" => benignEffectful := ffiS () :: !benignEffectful diff --git a/src/demo.sml b/src/demo.sml index 1e58e2f8..eaec38bb 100644 --- a/src/demo.sml +++ b/src/demo.sml @@ -124,6 +124,7 @@ fun make' {prefix, dirname, guided} = dbms = mergeWith #2 (#dbms combined, #dbms urp), sigFile = mergeWith #2 (#sigFile combined, #sigFile urp), fileCache = mergeWith #2 (#fileCache combined, #fileCache urp), + safeGetDefault = #safeGetDefault combined orelse #safeGetDefault urp, safeGets = #safeGets combined @ #safeGets urp, onError = NONE, minHeap = 0, diff --git a/src/settings.sig b/src/settings.sig index 986d6ed7..6ba7e96a 100644 --- a/src/settings.sig +++ b/src/settings.sig @@ -258,6 +258,7 @@ signature SETTINGS = sig val getFileCache : unit -> string option (* Which GET-able functions should be allowed to have side effects? *) + val setSafeGetDefault : bool -> unit val setSafeGets : string list -> unit val isSafeGet : string -> bool diff --git a/src/settings.sml b/src/settings.sml index cfbe98a5..3772fc04 100644 --- a/src/settings.sml +++ b/src/settings.sml @@ -740,9 +740,11 @@ structure SS = BinarySetFn(struct val compare = String.compare end) +val safeGetDefault = ref false val safeGet = ref SS.empty +fun setSafeGetDefault b = safeGetDefault := b fun setSafeGets ls = safeGet := SS.addList (SS.empty, ls) -fun isSafeGet x = SS.member (!safeGet, x) +fun isSafeGet x = !safeGetDefault orelse SS.member (!safeGet, x) val onError = ref (NONE : (string * string list * string) option) fun setOnError x = onError := x -- cgit v1.2.3 From 7578916b630bd84ec3f8e7d97aaaa1cc7828e5ef Mon Sep 17 00:00:00 2001 From: Adam Chlipala Date: Sun, 16 Dec 2018 16:45:37 -0500 Subject: Specialize: ignore recursive references in classifying polymorphic uses of datatypes --- src/compiler.sig | 1 + src/compiler.sml | 3 ++- src/core_util.sig | 6 ++++++ src/core_util.sml | 16 ++++++++++++++++ src/specialize.sml | 34 +++++++++++++++++++++++++++++++--- 5 files changed, 56 insertions(+), 4 deletions(-) (limited to 'src/compiler.sig') diff --git a/src/compiler.sig b/src/compiler.sig index 7922393d..09c913f8 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -164,6 +164,7 @@ signature COMPILER = sig val toUnpoly2 : (string, Core.file) transform val toShake4'' : (string, Core.file) transform val toEspecialize3 : (string, Core.file) transform + val toSpecialize3 : (string, Core.file) transform val toReduce2 : (string, Core.file) transform val toShake5 : (string, Core.file) transform val toMarshalcheck : (string, Core.file) transform diff --git a/src/compiler.sml b/src/compiler.sml index 271cf2f1..e7de4d82 100644 --- a/src/compiler.sml +++ b/src/compiler.sml @@ -1390,8 +1390,9 @@ val toUnpoly2 = transform unpoly "unpoly2" o toShake4' val toSpecialize2 = transform specialize "specialize2" o toUnpoly2 val toShake4'' = transform shake "shake4'" o toSpecialize2 val toEspecialize3 = transform especialize "especialize3" o toShake4'' +val toSpecialize3 = transform specialize "specialize3" o toEspecialize3 -val toReduce2 = transform reduce "reduce2" o toEspecialize3 +val toReduce2 = transform reduce "reduce2" o toSpecialize3 val toShake5 = transform shake "shake5" o toReduce2 diff --git a/src/core_util.sig b/src/core_util.sig index 835577a3..8d295f1e 100644 --- a/src/core_util.sig +++ b/src/core_util.sig @@ -161,6 +161,12 @@ structure Decl : sig decl : (Core.decl', 'state, 'abort) Search.mapfolder} -> (Core.decl, 'state, 'abort) Search.mapfolder + val map : {kind : Core.kind' -> Core.kind', + con : Core.con' -> Core.con', + exp : Core.exp' -> Core.exp', + decl : Core.decl' -> Core.decl'} + -> Core.decl -> Core.decl + val fold : {kind : Core.kind' * 'state -> 'state, con : Core.con' * 'state -> 'state, exp : Core.exp' * 'state -> 'state, diff --git a/src/core_util.sml b/src/core_util.sml index 57ef16f7..d1d3d9c4 100644 --- a/src/core_util.sml +++ b/src/core_util.sml @@ -1029,6 +1029,22 @@ fun mapfold {kind = fk, con = fc, exp = fe, decl = fd} = decl = fn () => fd, bind = fn ((), _) => ()} () +fun mapB {kind, con, exp, decl, bind} ctx d = + case mapfoldB {kind = fn ctx => fn k => fn () => S.Continue (kind ctx k, ()), + con = fn ctx => fn c => fn () => S.Continue (con ctx c, ()), + exp = fn ctx => fn e => fn () => S.Continue (exp ctx e, ()), + decl = fn ctx => fn d => fn () => S.Continue (decl ctx d, ()), + bind = bind} ctx d () of + S.Continue (d, ()) => d + | S.Return _ => raise Fail "CoreUtil.Decl.mapB: Impossible" + +fun map {kind, con, exp, decl} d = + mapB {kind = fn () => kind, + con = fn () => con, + exp = fn () => exp, + decl = fn () => decl, + bind = fn _ => ()} () d + fun fold {kind, con, exp, decl} s d = case mapfold {kind = fn k => fn s => S.Continue (k, kind (k, s)), con = fn c => fn s => S.Continue (c, con (c, s)), diff --git a/src/specialize.sml b/src/specialize.sml index 9dc2cf1b..70e646e3 100644 --- a/src/specialize.sml +++ b/src/specialize.sml @@ -248,6 +248,27 @@ val specDecl = U.Decl.foldMap {kind = kind, con = con, exp = exp, decl = decl} fun specialize file = let + (*val () = CorePrint.debug := true + val () = print "SPECIALIZING\n"*) + + (* Let's run around a file, finding any polymorphic uses of a datatype. + * However, don't count polymorphism within a datatype's own definition! + * To that end, we run a silly transform on the file before traversing. *) + val file' = + map (fn d => + case #1 d of + DDatatype dts => + U.Decl.map {kind = fn x => x, + exp = fn x => x, + decl = fn x => x, + con = fn CNamed n => + if List.exists (fn (_, n', _, _) => n' = n) dts then + CUnit + else + CNamed n + | c => c} d + | _ => d) file + val fancyDatatypes = U.File.fold {kind = fn (_, fd) => fd, exp = fn (_, fd) => fd, decl = fn (_, fd) => fd, @@ -256,12 +277,18 @@ fun specialize file = CApp (c1, c2) => if isOpen c2 then case findApp (c, []) of - SOME (n, _) => IS.add (fd, n) + SOME (n, _) => + ((*Print.preface ("Disqualifier", + CorePrint.p_con CoreEnv.empty (c, ErrorMsg.dummySpan));*) + IS.add (fd, n)) | NONE => fd else fd | _ => fd} - IS.empty file + IS.empty file' + + (* Why did we find the polymorphism? + * It would be incoherent to specialize a datatype used polymorphically. *) fun doDecl (d, st) = let @@ -271,7 +298,8 @@ fun specialize file = case #1 d of DDatatype dts => if List.exists (fn (_, n, _, _) => IS.member (fancyDatatypes, n)) dts then - ([d], st) + ((*Print.preface ("Skipping", CorePrint.p_decl CoreEnv.empty d);*) + ([d], st)) else ((case #decls st of [] => [d] -- cgit v1.2.3 From e6c93e5b8ed862d096d2120aa0be2a125b332776 Mon Sep 17 00:00:00 2001 From: Artyom Shalkhakov Date: Mon, 7 Jan 2019 15:54:06 +0200 Subject: -endpoints switch to view all endpoints defined in JSON format --- src/compiler.sig | 1 + src/compiler.sml | 7 +++++ src/endpoints.sig | 41 ++++++++++++++++++++++++++++ src/endpoints.sml | 78 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ src/main.mlton.sml | 28 +++++++++++++++----- 5 files changed, 148 insertions(+), 7 deletions(-) create mode 100644 src/endpoints.sig create mode 100644 src/endpoints.sml (limited to 'src/compiler.sig') diff --git a/src/compiler.sig b/src/compiler.sig index 09c913f8..d4521b9f 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -171,6 +171,7 @@ signature COMPILER = sig val toEffectize : (string, Core.file) transform val toCss : (string, Css.report) transform val toMonoize : (string, Mono.file) transform + val toEndpoints : (string, Endpoints.report) transform val toMono_opt1 : (string, Mono.file) transform val toUntangle : (string, Mono.file) transform val toMono_reduce : (string, Mono.file) transform diff --git a/src/compiler.sml b/src/compiler.sml index 868dd628..4ef9ba19 100644 --- a/src/compiler.sml +++ b/src/compiler.sml @@ -1429,6 +1429,13 @@ val mono_opt = { print = MonoPrint.p_file MonoEnv.empty } +val endpoints = { + func = Endpoints.summarize, + print = Endpoints.p_report +} + +val toEndpoints = transform endpoints "endpoints" o toMonoize + val toMono_opt1 = transform mono_opt "mono_opt1" o toMonoize val untangle = { diff --git a/src/endpoints.sig b/src/endpoints.sig new file mode 100644 index 00000000..d766eb43 --- /dev/null +++ b/src/endpoints.sig @@ -0,0 +1,41 @@ +(* Copyright (c) 2010, Adam Chlipala + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * - The names of contributors may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *) + +signature ENDPOINTS = sig + + datatype method = GET | POST + val methodToString : method -> string + + type endpoint = {Method : method, Url : string} + val p_endpoint : endpoint Print.printer + + type report = {Endpoints : endpoint list} + val p_report : report Print.printer + + val summarize : Mono.file -> report + +end diff --git a/src/endpoints.sml b/src/endpoints.sml new file mode 100644 index 00000000..22186cbb --- /dev/null +++ b/src/endpoints.sml @@ -0,0 +1,78 @@ +(* Copyright (c) 2010, 2013, Adam Chlipala + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * - The names of contributors may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *) + +structure Endpoints :> ENDPOINTS = struct + +open Print.PD +open Print + +open Mono + +datatype method = GET | POST + +fun methodToString GET = "GET" + | methodToString POST = "POST" + +type endpoint = {Method : method, Url : string} +type report = {Endpoints : endpoint list} + +fun p_endpoint {Method = m, Url = u} = + box [string "{", + string "\"method\": \"", string (methodToString m), string "\",", + string "\"url\": \"", string u, string "\"", + string "}"] + +fun p_report {Endpoints = el} = + box [string "{\"endpoints\":", + space, + string "[", + p_list_sep (box [string ",", newline]) p_endpoint el, + string "]}"] + +fun summarize file = + let + fun exportKindToMethod (Link _) = GET + | exportKindToMethod (Action _) = POST + | exportKindToMethod (Rpc _) = POST + | exportKindToMethod (Extern _) = POST + + fun decl ((d, _), st as endpoints) = + let + in + case d of + DExport (ek, id, i, tl, rt, f) => + {Method = exportKindToMethod ek, Url = id} :: st + | _ => st + end + + val (decls, _) = file + val ep = foldl decl [] decls + in + {Endpoints = ep} + end + +end diff --git a/src/main.mlton.sml b/src/main.mlton.sml index 99005df5..56d98587 100644 --- a/src/main.mlton.sml +++ b/src/main.mlton.sml @@ -49,7 +49,7 @@ fun parse_flags flag_info args = | "--h" => "-help" | "--help" => "-help" | _ => arg - + fun loop [] : string list = [] | loop (arg :: args) = let @@ -114,6 +114,7 @@ fun oneRun args = val demo = ref (NONE : (string * bool) option) val tutorial = ref false val css = ref false + val endpoints = ref false val () = (Compiler.debug := false; Elaborate.verbose := false; @@ -162,12 +163,14 @@ fun oneRun args = SOME "print numeric version number and exit"), ("css", set_true css, SOME "print categories of CSS properties"), + ("endpoints", set_true endpoints, + SOME "print exposed URL endpoints"), ("print-ccompiler", ZERO printCCompiler, SOME "print C compiler and exit"), ("print-cinclude", ZERO printCInclude, SOME "print directory of C headers and exit"), ("ccompiler", ONE ("", Settings.setCCompiler), - SOME "set the C compiler to "), + SOME "set the C compiler to "), ("demo", ONE ("", fn prefix => demo := SOME (prefix, false)), NONE), @@ -268,8 +271,8 @@ fun oneRun args = " only one is allowed.\nSpecified projects: "^ String.concatWith ", " files) in - case (!css, !demo, !tutorial) of - (true, _, _) => + case (!css, !demo, !tutorial, !endpoints) of + (true, _, _, _) => (case Compiler.run Compiler.toCss job of NONE => OS.Process.failure | SOME {Overall = ov, Classes = cl} => @@ -282,13 +285,24 @@ fun oneRun args = app (print o Css.othersToString) ots; print "\n")) cl; OS.Process.success)) - | (_, SOME (prefix, guided), _) => + | (_, SOME (prefix, guided), _, _) => if Demo.make' {prefix = prefix, dirname = job, guided = guided} then OS.Process.success else OS.Process.failure - | (_, _, true) => (Tutorial.make job; - OS.Process.success) + | (_, _, true, _) => (Tutorial.make job; + OS.Process.success) + | (_, _, _, true) => + (case Compiler.run Compiler.toEndpoints job of + NONE => OS.Process.failure + | SOME es => + let + val r = Endpoints.p_report es + in + Print.eprint r; + print "\n"; + OS.Process.success + end) | _ => if !tc then (Compiler.check Compiler.toElaborate job; -- cgit v1.2.3 From ba1871b3b9cc669c43420f993719690b45326e2f Mon Sep 17 00:00:00 2001 From: Artyom Shalkhakov Date: Sat, 12 Jan 2019 16:20:14 +0200 Subject: Including app.js in output of endpoints --- src/cjr_print.sml | 6 ++++-- src/compiler.sig | 4 +++- src/compiler.sml | 29 +++++++++++++++++++++++++---- src/demo.sml | 4 ++++ src/endpoints.sig | 5 ++++- src/endpoints.sml | 25 ++++++++++++++++++++++++- src/main.mlton.sml | 24 ++++++------------------ src/settings.sig | 3 +++ src/settings.sml | 7 ++++++- 9 files changed, 79 insertions(+), 28 deletions(-) (limited to 'src/compiler.sig') diff --git a/src/cjr_print.sml b/src/cjr_print.sml index 31653a74..5983b9e5 100644 --- a/src/cjr_print.sml +++ b/src/cjr_print.sml @@ -2550,8 +2550,10 @@ fun p_decl env (dAll as (d, loc) : decl) = (case Settings.getOutputJsFile () of NONE => "app." ^ SHA1.bintohex (SHA1.hash s) ^ ".js" | SOME s => s) - val () = app_js := OS.Path.joinDirFile {dir = Settings.getUrlPrefix (), - file = name} + val js = OS.Path.joinDirFile {dir = Settings.getUrlPrefix (), + file = name} + val () = app_js := js + val () = Endpoints.setJavaScript js in box [string "static char jslib[] = \"", string (Prim.toCString s), diff --git a/src/compiler.sig b/src/compiler.sig index d4521b9f..6ed2f9a6 100644 --- a/src/compiler.sig +++ b/src/compiler.sig @@ -35,6 +35,7 @@ signature COMPILER = sig sources : string list, exe : string, sql : string option, + endpoints : string option, debug : bool, profile : bool, timeout : int, @@ -116,6 +117,7 @@ signature COMPILER = sig val css : (Core.file, Css.report) phase val monoize : (Core.file, Mono.file) phase val mono_opt : (Mono.file, Mono.file) phase + val endpoints : (Mono.file, Mono.file) phase val untangle : (Mono.file, Mono.file) phase val mono_reduce : (Mono.file, Mono.file) phase val mono_shake : (Mono.file, Mono.file) phase @@ -171,7 +173,7 @@ signature COMPILER = sig val toEffectize : (string, Core.file) transform val toCss : (string, Css.report) transform val toMonoize : (string, Mono.file) transform - val toEndpoints : (string, Endpoints.report) transform + val toEndpoints : (string, Mono.file) transform val toMono_opt1 : (string, Mono.file) transform val toUntangle : (string, Mono.file) transform val toMono_reduce : (string, Mono.file) transform diff --git a/src/compiler.sml b/src/compiler.sml index 4ef9ba19..7099effc 100644 --- a/src/compiler.sml +++ b/src/compiler.sml @@ -39,6 +39,7 @@ type job = { sources : string list, exe : string, sql : string option, + endpoints : string option, debug : bool, profile : bool, timeout : int, @@ -275,7 +276,7 @@ val parseUr = { handle LrParser.ParseError => [], print = SourcePrint.p_file} -fun p_job ({prefix, database, exe, sql, sources, debug, profile, +fun p_job ({prefix, database, exe, sql, endpoints, sources, debug, profile, timeout, ffi, link, headers, scripts, clientToServer, effectful, benignEffectful, clientOnly, serverOnly, jsModule, jsFuncs, ...} : job) = let @@ -304,6 +305,10 @@ fun p_job ({prefix, database, exe, sql, sources, debug, profile, NONE => string "No SQL file." | SOME sql => string ("SQL fle: " ^ sql), newline, + case endpoints of + NONE => string "No endpoints file." + | SOME ep => string ("Endpoints fle: " ^ ep), + newline, string "Timeout: ", string (Int.toString timeout), newline, @@ -443,6 +448,7 @@ fun parseUrp' accLibs fname = sources = [fname], exe = fname ^ ".exe", sql = NONE, + endpoints = NONE, debug = Settings.getDebug (), profile = false, timeout = 120, @@ -581,6 +587,7 @@ fun parseUrp' accLibs fname = val database = ref (Settings.getDbstring ()) val exe = ref (Settings.getExe ()) val sql = ref (Settings.getSql ()) + val endpoints = ref (Settings.getEndpoints ()) val debug = ref (Settings.getDebug ()) val profile = ref false val timeout = ref NONE @@ -622,6 +629,7 @@ fun parseUrp' accLibs fname = exe = Option.getOpt (!exe, OS.Path.joinBaseExt {base = OS.Path.base filename, ext = SOME "exe"}), sql = !sql, + endpoints = !endpoints, debug = !debug, profile = !profile, timeout = Option.getOpt (!timeout, 60), @@ -684,6 +692,7 @@ fun parseUrp' accLibs fname = database = mergeO (fn (old, _) => old) (#database old, #database new), exe = #exe old, sql = #sql old, + endpoints = #endpoints old, debug = #debug old orelse #debug new, profile = #profile old orelse #profile new, timeout = #timeout old, @@ -1430,13 +1439,13 @@ val mono_opt = { } val endpoints = { - func = Endpoints.summarize, - print = Endpoints.p_report + func = Endpoints.collect, + print = MonoPrint.p_file MonoEnv.empty } val toEndpoints = transform endpoints "endpoints" o toMonoize -val toMono_opt1 = transform mono_opt "mono_opt1" o toMonoize +val toMono_opt1 = transform mono_opt "mono_opt1" o toEndpoints val untangle = { func = Untangle.untangle, @@ -1726,6 +1735,18 @@ fun compile job = TextIO.closeOut outf end; + case #endpoints job of + NONE => () + | SOME endpoints => + let + val report = Endpoints.summarize () + val outf = TextIO.openOut endpoints + val s = TextIOPP.openOut {dst = outf, wid = 80} + in + Print.fprint s (Endpoints.p_report report); + TextIO.closeOut outf + end; + compileC {cname = cname, oname = oname, ename = ename, libs = libs, profile = #profile job, debug = #debug job, linker = #linker job, link = #link job} diff --git a/src/demo.sml b/src/demo.sml index eaec38bb..ef57e65b 100644 --- a/src/demo.sml +++ b/src/demo.sml @@ -98,6 +98,10 @@ fun make' {prefix, dirname, guided} = NONE => OS.Path.joinDirFile {dir = dirname, file = "demo.sql"} | SOME s => s), + endpoints = SOME (case Settings.getEndpoints () of + NONE => OS.Path.joinDirFile {dir = dirname, + file = "demo-endpoints.json"} + | SOME e => e), debug = Settings.getDebug (), timeout = Int.max (#timeout combined, #timeout urp), profile = false, diff --git a/src/endpoints.sig b/src/endpoints.sig index f2c3c305..89e72add 100644 --- a/src/endpoints.sig +++ b/src/endpoints.sig @@ -36,6 +36,9 @@ signature ENDPOINTS = sig type report = {Endpoints : endpoint list} val p_report : report Print.printer - val summarize : Mono.file -> report + val reset : unit -> unit + val collect : Mono.file -> Mono.file + val setJavaScript : string -> unit + val summarize : unit -> report end diff --git a/src/endpoints.sml b/src/endpoints.sml index bb0b1d66..5699f319 100644 --- a/src/endpoints.sml +++ b/src/endpoints.sml @@ -59,7 +59,14 @@ fun p_report {Endpoints = el} = p_list_sep (box [string ",", newline]) p_endpoint el, string "]}"] -fun summarize file = +val endpoints = ref ([] : endpoint list) +val jsFile = ref (NONE : string option) + +fun setJavaScript x = jsFile := SOME x + +fun reset () = (endpoints := []; jsFile := NONE) + +fun collect file = let fun exportKindToMethod (Link _) = GET | exportKindToMethod (Action _) = POST @@ -75,6 +82,8 @@ fun summarize file = | _ => st end + val () = reset () + val (decls, _) = file val ep = foldl decl [] decls @@ -87,6 +96,20 @@ fun summarize file = {Method = GET, Url = f, LastModified = NONE, ContentType = SOME "text/javascript"} :: st val ep = foldl jsfile ep (Settings.listJsFiles ()) + in + endpoints := ep; + file + end + +fun summarize () = + let + val ep = !endpoints + val js = !jsFile + val ep = + case js of + NONE => ep + | SOME js => + {Method = GET, Url = js, LastModified = NONE, ContentType = SOME "text/javascript"} :: ep in {Endpoints = ep} end diff --git a/src/main.mlton.sml b/src/main.mlton.sml index 56d98587..bfa40265 100644 --- a/src/main.mlton.sml +++ b/src/main.mlton.sml @@ -114,7 +114,6 @@ fun oneRun args = val demo = ref (NONE : (string * bool) option) val tutorial = ref false val css = ref false - val endpoints = ref false val () = (Compiler.debug := false; Elaborate.verbose := false; @@ -163,8 +162,6 @@ fun oneRun args = SOME "print numeric version number and exit"), ("css", set_true css, SOME "print categories of CSS properties"), - ("endpoints", set_true endpoints, - SOME "print exposed URL endpoints"), ("print-ccompiler", ZERO printCCompiler, SOME "print C compiler and exit"), ("print-cinclude", ZERO printCInclude, @@ -220,6 +217,8 @@ fun oneRun args = SOME "serve JavaScript as "), ("sql", ONE ("", Settings.setSql o SOME), SOME "output sql script as "), + ("endpoints", ONE ("", Settings.setEndpoints o SOME), + SOME "output exposed URL endpoints in JSON as "), ("static", call_true Settings.setStaticLinking, SOME "enable static linking"), ("stop", ONE ("", Compiler.setStop), @@ -271,8 +270,8 @@ fun oneRun args = " only one is allowed.\nSpecified projects: "^ String.concatWith ", " files) in - case (!css, !demo, !tutorial, !endpoints) of - (true, _, _, _) => + case (!css, !demo, !tutorial) of + (true, _, _) => (case Compiler.run Compiler.toCss job of NONE => OS.Process.failure | SOME {Overall = ov, Classes = cl} => @@ -285,24 +284,13 @@ fun oneRun args = app (print o Css.othersToString) ots; print "\n")) cl; OS.Process.success)) - | (_, SOME (prefix, guided), _, _) => + | (_, SOME (prefix, guided), _) => if Demo.make' {prefix = prefix, dirname = job, guided = guided} then OS.Process.success else OS.Process.failure - | (_, _, true, _) => (Tutorial.make job; + | (_, _, true) => (Tutorial.make job; OS.Process.success) - | (_, _, _, true) => - (case Compiler.run Compiler.toEndpoints job of - NONE => OS.Process.failure - | SOME es => - let - val r = Endpoints.p_report es - in - Print.eprint r; - print "\n"; - OS.Process.success - end) | _ => if !tc then (Compiler.check Compiler.toElaborate job; diff --git a/src/settings.sig b/src/settings.sig index a6a9c5fc..97d56b45 100644 --- a/src/settings.sig +++ b/src/settings.sig @@ -240,6 +240,9 @@ signature SETTINGS = sig val setSql : string option -> unit val getSql : unit -> string option + val setEndpoints : string option -> unit + val getEndpoints : unit -> string option + val setCoreInline : int -> unit val getCoreInline : unit -> int diff --git a/src/settings.sml b/src/settings.sml index f42df135..0e999587 100644 --- a/src/settings.sml +++ b/src/settings.sml @@ -703,6 +703,10 @@ val sql = ref (NONE : string option) fun setSql so = sql := so fun getSql () = !sql +val endpoints = ref (NONE : string option) +fun setEndpoints so = endpoints := so +fun getEndpoints () = !endpoints + val coreInline = ref 5 fun setCoreInline n = coreInline := n fun getCoreInline () = !coreInline @@ -729,7 +733,7 @@ fun getSigFile () = !sigFile val fileCache = ref (NONE : string option) fun setFileCache v = - (if Option.isSome v andalso (case #supportsSHA512 (currentDbms ()) of NONE => true + (if Option.isSome v andalso (case #supportsSHA512 (currentDbms ()) of NONE => true | SOME _ => false) then ErrorMsg.error "The selected database engine is incompatible with file caching." else @@ -1007,6 +1011,7 @@ fun reset () = dbstring := NONE; exe := NONE; sql := NONE; + endpoints := NONE; coreInline := 5; monoInline := 5; staticLinking := false; -- cgit v1.2.3