aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/lua
diff options
context:
space:
mode:
authorGravatar borenet <borenet@google.com>2015-06-25 07:47:40 -0700
committerGravatar Commit bot <commit-bot@chromium.org>2015-06-25 07:47:41 -0700
commit93558dc74be4a92a8dfba87fcffd4441a37c0fc8 (patch)
treee5f3136fc4b8cde416059146061794247efecf1f /tools/lua
parentfb8d6884e0e01d0c2f8596adf5af1efb0d08de7e (diff)
Add lua scripts for generating n-grams from SKPs
Diffstat (limited to 'tools/lua')
-rw-r--r--tools/lua/ngrams.lua62
-rw-r--r--tools/lua/ngrams_aggregate.lua22
2 files changed, 84 insertions, 0 deletions
diff --git a/tools/lua/ngrams.lua b/tools/lua/ngrams.lua
new file mode 100644
index 0000000000..c94ffb3b38
--- /dev/null
+++ b/tools/lua/ngrams.lua
@@ -0,0 +1,62 @@
+-- Generate n-grams of Skia API calls from SKPs.
+
+-- To test this locally, run:
+-- $ GYP_DEFINES="skia_shared_lib=1" make lua_pictures
+-- $ out/Debug/lua_pictures -q -r $SKP_DIR -l tools/lua/ngrams.lua > /tmp/lua-output && lua tools/lua/ngrams_aggregate.lua
+
+-- To run on Cluster Telemetry, copy and paste the contents of this file into
+-- the box at https://skia-tree-status.appspot.com/skia-telemetry/lua_script,
+-- and paste the contents of ngrams_aggregate.lua into the "aggregator script"
+-- box on the same page.
+
+-- Change n as desired.
+local n = 3
+
+-- This algorithm uses a list-of-lists for each SKP. For API call, append a
+-- list containing just the verb to the master list. Then, backtrack over the
+-- last (n-1) sublists in the master list and append the verb to those
+-- sublists. At the end of execution, the master list contains a sublist for
+-- every verb in the SKP file. Each sublist has length n, with the exception of
+-- the last n-1 sublists, which are discarded in the summarize() function,
+-- which generates counts for each n-gram.
+
+local ngrams = {}
+local currentFile = ""
+
+function sk_scrape_startcanvas(c, fileName)
+ currentFile = fileName
+ ngrams[currentFile] = {}
+end
+
+function sk_scrape_endcanvas(c, fileName)
+end
+
+function sk_scrape_accumulate(t)
+ table.insert(ngrams[currentFile], {t.verb})
+ for i = 1, n-1 do
+ local idx = #ngrams[currentFile] - i
+ if idx > 0 then
+ table.insert(ngrams[currentFile][idx], t.verb)
+ end
+ end
+end
+
+function sk_scrape_summarize()
+ -- Count the n-grams.
+ local counts = {}
+ for file, ngramsInFile in pairs(ngrams) do
+ for i = 1, #ngramsInFile - (n-1) do
+ local ngram = table.concat(ngramsInFile[i], " ")
+ if counts[ngram] == nil then
+ counts[ngram] = 1
+ else
+ counts[ngram] = counts[ngram] + 1
+ end
+ end
+ end
+
+ -- Write out code for aggregating.
+ for ngram, count in pairs(counts) do
+ io.write("if counts['", ngram, "'] == nil then counts['", ngram, "'] = ", count, " else counts['", ngram, "'] = counts['", ngram, "'] + ", count, " end\n")
+ end
+end
diff --git a/tools/lua/ngrams_aggregate.lua b/tools/lua/ngrams_aggregate.lua
new file mode 100644
index 0000000000..f8c9338149
--- /dev/null
+++ b/tools/lua/ngrams_aggregate.lua
@@ -0,0 +1,22 @@
+-- Aggregate the output from ngrams.lua.
+
+-- Get the data from all shards.
+counts = {}
+dofile("/tmp/lua-output")
+
+-- Put the data into a sortable "array".
+countArray = {}
+for ngram, count in pairs(counts) do
+ table.insert(countArray, {count, ngram})
+end
+
+-- Sort the data.
+function compare(a, b)
+ return a[1] > b[1]
+end
+table.sort(countArray, compare)
+
+-- Write the result.
+for i, countPair in ipairs(countArray) do
+ io.write(countPair[1], "\t", countPair[2], "\n")
+end