aboutsummaryrefslogtreecommitdiff
path: root/salis.py
diff options
context:
space:
mode:
Diffstat (limited to 'salis.py')
-rwxr-xr-xsalis.py80
1 files changed, 74 insertions, 6 deletions
diff --git a/salis.py b/salis.py
index dc0eb93..7167b03 100755
--- a/salis.py
+++ b/salis.py
@@ -70,16 +70,16 @@ option_list = [
["d", "data-push-pow", "POW", "data aggregation interval exponent (interval == 2^{POW} >= {sync-pow}); a value of 0 disables data aggregation (requires 'sqlite')", 28, False, ipos, [new]],
["f", "force", None, "overwrite existing simulation of given name", False, False, bool, [new]],
["F", "muta-flip", None, "cosmic rays flip bits instead of randomizing whole bytes", False, False, bool, [bench, new]],
- ["g", "compiler", "CC", "C compiler to use", "gcc", False, str, [bench, load, new]],
- ["G", "compiler-flags", "FLAGS", "base set of flags to pass to C compiler", "-Wall -Wextra -Werror", False, str, [bench, load, new]],
+ ["g", "compiler", "CC", "C compiler to use", "gcc", False, str, [bench, load, new, serve]],
+ ["G", "compiler-flags", "FLAGS", "base set of flags to pass to C compiler", "-Wall -Wextra -Werror", False, str, [bench, load, new, serve]],
["M", "muta-pow", "POW", "mutator range exponent (range == 2^{POW})", 32, False, ipos, [bench, new]],
["m", "mvec-pow", "POW", "memory vector size exponent (size == 2^{POW})", 20, False, ipos, [bench, new]],
["n", "name", "NAME", "name of new or loaded simulation", "def.sim", False, str, [load, new, serve]],
- ["o", "optimized", None, "builds salis binary with optimizations", False, False, bool, [bench, load, new]],
+ ["o", "optimized", None, "build with optimizations", False, False, bool, [bench, load, new, serve]],
["P", "port", "PORT", "port number for data server", 8080, False, iport, [serve]],
["p", "pre-cmd", "CMD", "shell command to wrap call to executable (e.g. gdb, time, valgrind, etc.)", None, False, str, [bench, load, new]],
["s", "seed", "SEED", "seed value for new simulation; a value of 0 disables cosmic rays; a value of -1 creates a random seed", 0, False, seed, [bench, new]],
- ["T", "keep-temp-dir", None, "delete temporary directory on exit", False, False, bool, [bench, load, new]],
+ ["T", "keep-temp-dir", None, "delete temporary directory on exit", False, False, bool, [bench, load, new, serve]],
["t", "thread-gap", "N", "memory gap between cores in bytes (may help reduce cache misses)", 0x100, False, inat, [bench, load, new]],
["u", "ui", uis, "user interface", "curses", False, str, [load, new]],
["U", "update", None, "update vendors (call 'git commit' afterwards to track updated files, if any)", False, False, bool, [serve]],
@@ -216,8 +216,42 @@ if args.command in ["serve"]:
info("Connecting to SQLite database:", sim_db)
db_con = sqlite3.connect(sim_db)
db_con.row_factory = sqlite3.Row
+ db_con.enable_load_extension(True)
db_cur = db_con.cursor()
+ # Build SQLite event-array render extension
+ sqlx_flags = set()
+ sqlx_defines = set()
+ sqlx_links = set()
+
+ sqlx_flags.update({*args.compiler_flags.split(), "-shared", "-fPIC", "-Idata"})
+ sqlx_defines.add(f"-DMVEC_SIZE={2 ** args.mvec_pow}ul")
+
+ if arch_vars.mvec_loop: sqlx_defines.add("-DMVEC_LOOP")
+
+ if args.optimized:
+ sqlx_flags.add("-O3")
+ sqlx_defines.add("-DNDEBUG")
+ else:
+ sqlx_flags.add("-ggdb")
+
+ sqlx_links.add("-lz")
+
+ sqlx_tempdir = TemporaryDirectory(prefix="salis_sqlx_", delete=not args.keep_temp_dir)
+ info("Created a temporary salis SQLite extension directory at:", sqlx_tempdir.name)
+
+ sqlx_so = os.path.join(sqlx_tempdir.name, "render.so")
+ info("Building salis SQLite extension at:", sqlx_so)
+
+ sqlx_build_cmd = [args.compiler, "data/render.c", "-o", sqlx_so]
+ sqlx_build_cmd.extend(sqlx_flags)
+ sqlx_build_cmd.extend(sqlx_defines)
+ sqlx_build_cmd.extend(sqlx_links)
+
+ info("Using build command:", sqlx_build_cmd)
+ subprocess.run(sqlx_build_cmd, check=True)
+ db_cur.execute(f"SELECT load_extension('{sqlx_so}')")
+
# Generate configuration so front-end knows how to render the plots.
# Each architecture may also provide its own set of plots, which will be merged with the
# default dictionary below.
@@ -256,10 +290,26 @@ if args.command in ["serve"]:
},
}
+ heatmaps = {
+ "Events": {
+ f"aev_{i}": {
+ "table": f"aev_{i}",
+ } for i in range(args.cores)
+ } | {
+ f"eev_{i}": {
+ "table": f"eev_{i}",
+ } for i in range(args.cores)
+ },
+ }
+
for key in arch_vars.plots:
plots[key] = (plots[key] if key in plots else {}) | arch_vars.plots[key]
+ for key in arch_vars.heatmaps:
+ heatmaps[key] = (heatmaps[key] if key in heatmaps else {}) | arch_vars.heatmaps[key]
+
info("Generated plot configuration:", plots)
+ info("Generated heatmap configuration:", heatmaps)
# NOTE: this server implementation is very minimal and has no built-in security.
# Please do not put this on the internet! Only run the data server within secure
@@ -292,8 +342,9 @@ if args.command in ["serve"]:
if bits.path == "/": return self.send_file_as("data/index.html", "text/html")
if bits.path.split("/")[1] in ["js", "vendor", "vue"]: return self.send_file_as("data" + bits.path, "text/javascript")
- if bits.path == "/opts": return self.send_as_json(opt_vars | {"name": args.name})
+ if bits.path == "/opts": return self.send_as_json(opt_vars | {"mvec_loop": arch_vars.mvec_loop, "name": args.name})
if bits.path == "/plots": return self.send_as_json(plots)
+ if bits.path == "/heatmaps": return self.send_as_json(heatmaps)
if bits.path == "/data":
http_query = urllib.parse.parse_qs(bits.query)
@@ -303,11 +354,28 @@ if args.command in ["serve"]:
x_axis = http_query["x_axis"][0]
x_low = http_query["x_low"][0]
x_high = http_query["x_high"][0]
- sql_query = f"SELECT * FROM (SELECT rowid, * FROM {table} WHERE {x_axis} >= {x_low} AND {x_axis} <= {x_high} AND rowid % {nth} == 0 ORDER BY {x_axis} DESC LIMIT {entries}) ORDER BY {x_axis} ASC;"
+ is_eva = http_query["is_eva"][0]
+
+ if is_eva == "true":
+ hm_left = http_query["hm_left"][0]
+ hm_px_count = http_query["hm_px_count"][0]
+ hm_px_pow = http_query["hm_px_pow"][0]
+ selects = ", ".join([f"cycl_{i}" for i in range(args.cores)]) + f", eva_render({hm_left}, {hm_px_count}, {hm_px_pow}, evts) as eva_render, step"
+ else:
+ selects = "*"
+
+ sql_query = f"SELECT * FROM (SELECT rowid, {selects} FROM {table} WHERE {x_axis} >= {x_low} AND {x_axis} <= {x_high} AND rowid % {nth} == 0 ORDER BY {x_axis} DESC LIMIT {entries}) ORDER BY {x_axis} ASC;"
sql_res = db_cur.execute(sql_query)
sql_list = [dict(row) for row in sql_res.fetchall()]
return self.send_as_json(sql_list)
+ if bits.path == "/x_high":
+ http_query = urllib.parse.parse_qs(bits.query)
+ x_axis = http_query["x_axis"][0]
+ sql_query = f"SELECT {x_axis} as x_high FROM general ORDER BY {x_axis} DESC LIMIT 1;"
+ sql_dict = dict(db_cur.execute(sql_query).fetchone())
+ return self.send_as_json(sql_dict)
+
self.log_error(f"Unsupported endpoint: {bits.path}")
self.send_response(400)
self.end_headers()