Compare commits
6 Commits
eaba782ca6
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
12a95af8af
|
|||
|
f309508fdd
|
|||
|
18e1407254
|
|||
|
e0c6be3270
|
|||
|
43de918fa3
|
|||
|
c775ba686b
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -10,7 +10,6 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
*.swp
|
*.swp
|
||||||
*~
|
*~
|
||||||
Makefile
|
|
||||||
build
|
build
|
||||||
build-*
|
build-*
|
||||||
.deps
|
.deps
|
||||||
|
|||||||
13
Makefile
Normal file
13
Makefile
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
docker:
|
||||||
|
docker build -t fail-base -f fail-base.dockerfile . --build-arg CACHE_DATE="$(shell date)"
|
||||||
|
docker build -t fail-generic-tracing -f fail-generic-tracing.dockerfile .
|
||||||
|
docker build -t fail-demo -f fail-demo.dockerfile . --build-arg CACHE_DATE="$(shell date)"
|
||||||
|
|
||||||
|
run:
|
||||||
|
docker compose up -d --force-recreate --renew-anon-volumes -y
|
||||||
|
|
||||||
|
stop:
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
ssh:
|
||||||
|
ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 5022 fail@127.0.0.1
|
||||||
@ -7,19 +7,21 @@ services:
|
|||||||
MYSQL_USER: fail
|
MYSQL_USER: fail
|
||||||
MYSQL_PASSWORD: fail
|
MYSQL_PASSWORD: fail
|
||||||
MYSQL_DATABASE: fail
|
MYSQL_DATABASE: fail
|
||||||
# ports:
|
ports:
|
||||||
# - "3306:3306"
|
- "3306:3306"
|
||||||
networks:
|
networks:
|
||||||
- fail-network
|
- fail-network
|
||||||
|
|
||||||
fail-demo:
|
fail-demo:
|
||||||
image: danceos/fail-demo
|
image: fail-demo
|
||||||
container_name: fail-demo
|
container_name: fail-demo
|
||||||
ports:
|
ports:
|
||||||
- "5000:5000" # Result Browser
|
- "5000:5000" # Result Browser
|
||||||
- "5022:22" # SSH
|
- "5022:22" # SSH
|
||||||
networks:
|
networks:
|
||||||
- fail-network
|
- fail-network
|
||||||
|
volumes:
|
||||||
|
- "../3 Wasm/examples/build-bochs:/home/fail/fail-wasm/examples/build-bochs"
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
fail-network:
|
fail-network:
|
||||||
@ -3,7 +3,7 @@
|
|||||||
# generic-tracing experiment was already built and the binaries are in
|
# generic-tracing experiment was already built and the binaries are in
|
||||||
# place (~fail/bin/*)
|
# place (~fail/bin/*)
|
||||||
|
|
||||||
FROM danceos/fail-generic-tracing
|
FROM fail-generic-tracing
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
|
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ RUN chown fail /home/fail/.my.cnf
|
|||||||
USER fail
|
USER fail
|
||||||
WORKDIR /home/fail
|
WORKDIR /home/fail
|
||||||
RUN echo 'export PATH=$HOME/bin:$PATH' >> ~/.profile \
|
RUN echo 'export PATH=$HOME/bin:$PATH' >> ~/.profile \
|
||||||
&& echo 'cd $HOME/fail-targets' >> ~/.profile
|
&& echo 'cd $HOME/fail-wasm/examples' >> ~/.profile
|
||||||
|
|
||||||
WORKDIR fail
|
WORKDIR fail
|
||||||
RUN mkdir build; cd build
|
RUN mkdir build; cd build
|
||||||
@ -149,6 +149,7 @@ ARG CACHE_DATE=1970-01-01
|
|||||||
WORKDIR /home/fail
|
WORKDIR /home/fail
|
||||||
# RUN git clone https://github.com/danceos/fail-targets.git
|
# RUN git clone https://github.com/danceos/fail-targets.git
|
||||||
RUN git clone https://gitea.vps.chriphost.de/christoph/fail-targets
|
RUN git clone https://gitea.vps.chriphost.de/christoph/fail-targets
|
||||||
|
RUN git clone https://gitea.vps.chriphost.de/christoph/fail-wasm
|
||||||
|
|
||||||
USER root
|
USER root
|
||||||
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
# Inherit from docker container that has the fail source code prepared,
|
# Inherit from docker container that has the fail source code prepared,
|
||||||
# including all tools which are needed to build FAIL*
|
# including all tools which are needed to build FAIL*
|
||||||
FROM danceos/fail-base
|
FROM fail-base
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
|
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
|
||||||
|
|
||||||
59
flake.lock
generated
59
flake.lock
generated
@ -1,59 +0,0 @@
|
|||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"flake-utils": {
|
|
||||||
"inputs": {
|
|
||||||
"systems": "systems"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1731533236,
|
|
||||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1768032153,
|
|
||||||
"narHash": "sha256-6kD1MdY9fsE6FgSwdnx29hdH2UcBKs3/+JJleMShuJg=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "3146c6aa9995e7351a398e17470e15305e6e18ff",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"id": "nixpkgs",
|
|
||||||
"type": "indirect"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-utils": "flake-utils",
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1681028828,
|
|
||||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
||||||
334
flake.nix
334
flake.nix
@ -1,334 +0,0 @@
|
|||||||
rec {
|
|
||||||
description = "FAIL* - Fault Injection Leveraged";
|
|
||||||
|
|
||||||
inputs = {
|
|
||||||
nixpkgs.url = "nixpkgs"; # Use nixpkgs from system registry
|
|
||||||
flake-utils.url = "github:numtide/flake-utils";
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs = {
|
|
||||||
self,
|
|
||||||
nixpkgs,
|
|
||||||
flake-utils,
|
|
||||||
}:
|
|
||||||
# Create a shell (and possibly package) for each possible system, not only x86_64-linux
|
|
||||||
flake-utils.lib.eachDefaultSystem (system: let
|
|
||||||
pkgs = import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
config.allowUnfree = true;
|
|
||||||
overlays = [];
|
|
||||||
};
|
|
||||||
inherit (pkgs) lib stdenv;
|
|
||||||
|
|
||||||
# ===========================================================================================
|
|
||||||
# Define custom dependencies
|
|
||||||
# ===========================================================================================
|
|
||||||
|
|
||||||
# 64 bit C/C++ compilers that don't collide (use the same libc)
|
|
||||||
bintools = pkgs.wrapBintoolsWith {
|
|
||||||
bintools = pkgs.bintools.bintools; # Unwrapped bintools
|
|
||||||
libc = pkgs.glibc;
|
|
||||||
};
|
|
||||||
gcc = lib.hiPrio (pkgs.wrapCCWith {
|
|
||||||
cc = pkgs.gcc.cc; # Unwrapped gcc
|
|
||||||
libc = pkgs.glibc;
|
|
||||||
bintools = bintools;
|
|
||||||
});
|
|
||||||
clang = pkgs.wrapCCWith {
|
|
||||||
cc = pkgs.clang.cc; # Unwrapped clang
|
|
||||||
libc = pkgs.glibc;
|
|
||||||
bintools = bintools;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Multilib C/C++ compilers that don't collide (use the same libc)
|
|
||||||
# bintools_multilib = pkgs.wrapBintoolsWith {
|
|
||||||
# bintools = pkgs.bintools.bintools; # Unwrapped bintools
|
|
||||||
# libc = pkgs.glibc_multi;
|
|
||||||
# };
|
|
||||||
# gcc_multilib = lib.hiPrio (pkgs.wrapCCWith {
|
|
||||||
# cc = pkgs.gcc.cc; # Unwrapped gcc
|
|
||||||
# libc = pkgs.glibc_multi;
|
|
||||||
# bintools = bintools_multilib;
|
|
||||||
# });
|
|
||||||
# clang_multilib = pkgs.wrapCCWith {
|
|
||||||
# cc = pkgs.clang.cc; # Unwrapped clang
|
|
||||||
# libc = pkgs.glibc_multi;
|
|
||||||
# bintools = bintools_multilib;
|
|
||||||
# };
|
|
||||||
|
|
||||||
aspectcxx = stdenv.mkDerivation rec {
|
|
||||||
pname = "aspectcxx";
|
|
||||||
version = "1.2";
|
|
||||||
|
|
||||||
src = pkgs.fetchurl {
|
|
||||||
url = "http://www.aspectc.org/releases/${version}/ac-bin-linux-x86-64bit-${version}.tar.gz";
|
|
||||||
sha256 = "sha256-8wOrPYC99E3aV5/Js2EBI4V/OoD9y10fYZt8ikPtvt4="; # 1.2
|
|
||||||
# sha256 = "sha256-gmqoZFkPvs60b6yuMHNtYvuGJXGnbwAYEiyXxp/fmPI="; # 2.0
|
|
||||||
# sha256 = "sha256-+rEflemXNwn4XZZwSOqCsr6/KFGV8wxW6PeXzHHUK0o="; # 2.5
|
|
||||||
};
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
pkgs.autoPatchelfHook
|
|
||||||
];
|
|
||||||
|
|
||||||
unpackPhase = ''
|
|
||||||
tar xvzf $src
|
|
||||||
'';
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out/bin
|
|
||||||
cp aspectc++/ac++ $out/bin/
|
|
||||||
cp aspectc++/ag++ $out/bin/
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
libpcl = stdenv.mkDerivation rec {
|
|
||||||
pname = "libpcl";
|
|
||||||
version = "1.12-2";
|
|
||||||
|
|
||||||
src = pkgs.fetchurl {
|
|
||||||
url = "https://launchpadlibrarian.net/521269537/libpcl1_1.12-2_amd64.deb";
|
|
||||||
sha256 = "sha256-GL3mjPAccAtRMAJPnDMCHiDf6xNvGi4oUWylOIqBjP0=";
|
|
||||||
};
|
|
||||||
|
|
||||||
nativeBuildInputs = with pkgs; [
|
|
||||||
dpkg
|
|
||||||
autoPatchelfHook
|
|
||||||
];
|
|
||||||
|
|
||||||
unpackPhase = ''
|
|
||||||
dpkg-deb -x $src .
|
|
||||||
'';
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out
|
|
||||||
cp -r usr/* $out/
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
libpcl-dev = stdenv.mkDerivation rec {
|
|
||||||
pname = "libpcl-dev";
|
|
||||||
version = "1.12-2";
|
|
||||||
|
|
||||||
src = pkgs.fetchurl {
|
|
||||||
url = "https://launchpadlibrarian.net/521269536/libpcl1-dev_${version}_amd64.deb";
|
|
||||||
sha256 = "sha256-Z1wP0K8hfV1f9ypee9XIx6H0JOTidhtXDBe82mlRaOg=";
|
|
||||||
};
|
|
||||||
|
|
||||||
nativeBuildInputs = with pkgs; [
|
|
||||||
dpkg
|
|
||||||
autoPatchelfHook
|
|
||||||
];
|
|
||||||
|
|
||||||
unpackPhase = ''
|
|
||||||
dpkg-deb -x $src .
|
|
||||||
'';
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out
|
|
||||||
cp -r usr/* $out/
|
|
||||||
|
|
||||||
# Hacky bullshit
|
|
||||||
cp ${libpcl}/lib/x86_64-linux-gnu/libpcl.so.1.0.11 $out/lib/x86_64-linux-gnu/libpcl.so.1.0.11
|
|
||||||
rm $out/share/doc/libpcl1-dev/changelog.Debian.gz
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
# ===========================================================================================
|
|
||||||
# Specify dependencies
|
|
||||||
# https://nixos.org/manual/nixpkgs/stable/#ssec-stdenv-dependencies-overview
|
|
||||||
# Just for a "nix develop" shell, buildInputs can be used for everything.
|
|
||||||
# ===========================================================================================
|
|
||||||
|
|
||||||
# Add dependencies to nativeBuildInputs if they are executed during the build:
|
|
||||||
# - Those which are needed on $PATH during the build, for example cmake and pkg-config
|
|
||||||
# - Setup hooks, for example makeWrapper
|
|
||||||
# - Interpreters needed by patchShebangs for build scripts (with the --build flag), which can be the case for e.g. perl
|
|
||||||
nativeBuildInputs = with pkgs; [
|
|
||||||
# Languages:
|
|
||||||
bintools
|
|
||||||
gcc
|
|
||||||
# clang
|
|
||||||
aspectcxx
|
|
||||||
|
|
||||||
# C/C++:
|
|
||||||
gnumake
|
|
||||||
cmake
|
|
||||||
pkg-config
|
|
||||||
doxygen
|
|
||||||
];
|
|
||||||
|
|
||||||
# Add dependencies to buildInputs if they will end up copied or linked into the final output or otherwise used at runtime:
|
|
||||||
# - Libraries used by compilers, for example zlib
|
|
||||||
# - Interpreters needed by patchShebangs for scripts which are installed, which can be the case for e.g. perl
|
|
||||||
buildInputs = with pkgs; [
|
|
||||||
# C/C++:
|
|
||||||
libpcl-dev
|
|
||||||
libiberty
|
|
||||||
libelf
|
|
||||||
libdwarf
|
|
||||||
boost
|
|
||||||
|
|
||||||
llvmPackages_18.llvm
|
|
||||||
mariadb-connector-c
|
|
||||||
fontconfig
|
|
||||||
zlib
|
|
||||||
capstone
|
|
||||||
protobuf
|
|
||||||
binutils
|
|
||||||
|
|
||||||
# No clue what I need from those
|
|
||||||
xorg.libX11
|
|
||||||
xorg.libXext
|
|
||||||
xorg.libXrender
|
|
||||||
xorg.libXrandr
|
|
||||||
xorg.libXinerama
|
|
||||||
xorg.libXcursor
|
|
||||||
xorg.libXi
|
|
||||||
xorg.libXfixes
|
|
||||||
];
|
|
||||||
# ===========================================================================================
|
|
||||||
# Define buildable + installable packages
|
|
||||||
# ===========================================================================================
|
|
||||||
package = stdenv.mkDerivation {
|
|
||||||
inherit nativeBuildInputs buildInputs;
|
|
||||||
pname = "fail";
|
|
||||||
version = "1.0.1";
|
|
||||||
src = ./.;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
cmakeFlags = [
|
|
||||||
# Our CMake is too new :( Try it anyway, can still downgrade later...
|
|
||||||
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
|
||||||
|
|
||||||
# AspectC++
|
|
||||||
# "-DCMAKE_AGPP_FLAGS=--c_compiler${clang}/bin/clang++"
|
|
||||||
"-DCMAKE_AGPP_FLAGS=-D__NO_MATH_INLINES"
|
|
||||||
# "-DCMAKE_AGPP_FLAGS=--c_compiler${clang}/bin/clang++ -D__NO_MATH_INLINES -D__STRICT_ANSI__"
|
|
||||||
|
|
||||||
# Tell CMake where the libs are
|
|
||||||
"-DLibIberty_INCLUDE_DIRS=${pkgs.libiberty}/include"
|
|
||||||
"-DLibIberty_LIBRARIES=${pkgs.libiberty}/lib/libiberty.a"
|
|
||||||
|
|
||||||
"-DLIBELF_INCLUDE_DIRS=${pkgs.libelf}/include"
|
|
||||||
"-DLIBELF_LIBRARIES=${pkgs.libelf}/lib/libelf.a"
|
|
||||||
|
|
||||||
"-DLIBDWARF_INCLUDE_DIRS=${pkgs.libdwarf}/include"
|
|
||||||
"-DLIBDWARF_LIBRARIES=${pkgs.libdwarf}/lib/libdwarf.a"
|
|
||||||
|
|
||||||
"-DCAPSTONE_INCLUDE_DIR=${pkgs.capstone}/include"
|
|
||||||
"-DCAPSTONE_LIBRARY=${pkgs.capstone}/lib/libcapstone.a"
|
|
||||||
|
|
||||||
"-DLIBPCL_LIBRARIES=${libpcl-dev}/lib/libpcl.a"
|
|
||||||
];
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out/bin
|
|
||||||
mv ./fail $out/bin/
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
in rec {
|
|
||||||
# Provide package for "nix build"
|
|
||||||
defaultPackage = package;
|
|
||||||
defaultApp = flake-utils.lib.mkApp {
|
|
||||||
drv = defaultPackage;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Provide environment for "nix develop"
|
|
||||||
devShell = pkgs.mkShell {
|
|
||||||
inherit nativeBuildInputs buildInputs;
|
|
||||||
name = description;
|
|
||||||
|
|
||||||
# =========================================================================================
|
|
||||||
# Define environment variables
|
|
||||||
# =========================================================================================
|
|
||||||
|
|
||||||
# Custom dynamic libraries:
|
|
||||||
# LD_LIBRARY_PATH = builtins.concatStringsSep ":" [
|
|
||||||
# # Rust Bevy GUI app:
|
|
||||||
# # "${pkgs.xorg.libX11}/lib"
|
|
||||||
# # "${pkgs.xorg.libXcursor}/lib"
|
|
||||||
# # "${pkgs.xorg.libXrandr}/lib"
|
|
||||||
# # "${pkgs.xorg.libXi}/lib"
|
|
||||||
# # "${pkgs.libGL}/lib"
|
|
||||||
#
|
|
||||||
# # JavaFX app:
|
|
||||||
# # "${pkgs.libGL}/lib"
|
|
||||||
# # "${pkgs.gtk3}/lib"
|
|
||||||
# # "${pkgs.glib.out}/lib"
|
|
||||||
# # "${pkgs.xorg.libXtst}/lib"
|
|
||||||
# ];
|
|
||||||
|
|
||||||
# Dynamic libraries from buildinputs:
|
|
||||||
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath buildInputs;
|
|
||||||
|
|
||||||
# Set matplotlib backend
|
|
||||||
# MPLBACKEND = "TkAgg";
|
|
||||||
|
|
||||||
# =========================================================================================
|
|
||||||
# Define shell environment
|
|
||||||
# =========================================================================================
|
|
||||||
|
|
||||||
# Setup the shell when entering the "nix develop" environment (bash script).
|
|
||||||
shellHook = let
|
|
||||||
mkCmakeScript = type: let
|
|
||||||
typeLower = lib.toLower type;
|
|
||||||
in
|
|
||||||
pkgs.writers.writeFish "cmake-${typeLower}.fish" ''
|
|
||||||
cd $FLAKE_PROJECT_ROOT
|
|
||||||
|
|
||||||
echo "Removing build directory ./cmake-build-${typeLower}/"
|
|
||||||
rm -rf ./cmake-build-${typeLower}
|
|
||||||
|
|
||||||
echo "Creating build directory"
|
|
||||||
mkdir cmake-build-${typeLower}
|
|
||||||
cd cmake-build-${typeLower}
|
|
||||||
|
|
||||||
echo "Running cmake"
|
|
||||||
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE="${type}" -DCMAKE_EXPORT_COMPILE_COMMANDS="On" ..
|
|
||||||
|
|
||||||
echo "Linking compile_commands.json"
|
|
||||||
cd ..
|
|
||||||
ln -sf ./cmake-build-${typeLower}/compile_commands.json ./compile_commands.json
|
|
||||||
'';
|
|
||||||
|
|
||||||
cmakeDebug = mkCmakeScript "Debug";
|
|
||||||
cmakeRelease = mkCmakeScript "Release";
|
|
||||||
|
|
||||||
mkBuildScript = type: let
|
|
||||||
typeLower = lib.toLower type;
|
|
||||||
in
|
|
||||||
pkgs.writers.writeFish "cmake-build.fish" ''
|
|
||||||
cd $FLAKE_PROJECT_ROOT/cmake-build-${typeLower}
|
|
||||||
|
|
||||||
echo "Running cmake"
|
|
||||||
cmake --build .
|
|
||||||
'';
|
|
||||||
|
|
||||||
buildDebug = mkBuildScript "Debug";
|
|
||||||
buildRelease = mkBuildScript "Release";
|
|
||||||
|
|
||||||
# Use this to specify commands that should be ran after entering fish shell
|
|
||||||
initProjectShell = pkgs.writers.writeFish "init-shell.fish" ''
|
|
||||||
echo "Entering \"${description}\" environment..."
|
|
||||||
|
|
||||||
# Determine the project root, used e.g. in cmake scripts
|
|
||||||
set -g -x FLAKE_PROJECT_ROOT (git rev-parse --show-toplevel)
|
|
||||||
|
|
||||||
# C/C++:
|
|
||||||
# abbr -a cmake-debug "${cmakeDebug}"
|
|
||||||
# abbr -a cmake-release "${cmakeRelease}"
|
|
||||||
# abbr -a build-debug "${buildDebug}"
|
|
||||||
# abbr -a build-release "${buildRelease}"
|
|
||||||
'';
|
|
||||||
in
|
|
||||||
builtins.concatStringsSep "\n" [
|
|
||||||
# Launch into pure fish shell
|
|
||||||
''
|
|
||||||
exec "$(type -p fish)" -C "source ${initProjectShell} && abbr -a menu '${pkgs.bat}/bin/bat "${initProjectShell}"'"
|
|
||||||
''
|
|
||||||
];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
build-all:
|
|
||||||
docker build -t danceos/fail-base fail-base --build-arg CACHE_DATE="$(shell date)"
|
|
||||||
docker build -t danceos/fail-generic-tracing fail-generic-tracing
|
|
||||||
docker build -t danceos/fail-demo fail-demo --build-arg CACHE_DATE="$(shell date)"
|
|
||||||
|
|
||||||
run-all: .compose ssh
|
|
||||||
|
|
||||||
.compose:
|
|
||||||
docker compose up -d --force-recreate --renew-anon-volumes -y
|
|
||||||
|
|
||||||
ssh:
|
|
||||||
ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 5022 fail@127.0.0.1
|
|
||||||
@ -1,9 +1,12 @@
|
|||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
from . import details
|
from . import details
|
||||||
from . import model
|
from app.detaildealer import detaildealer
|
||||||
|
|
||||||
|
|
||||||
def scrub(table_name):
|
def scrub(table_name):
|
||||||
return ''.join( chr for chr in table_name if chr.isalnum() or chr == '_' )
|
return "".join(chr for chr in table_name if chr.isalnum() or chr == "_")
|
||||||
|
|
||||||
|
|
||||||
class Resulttype:
|
class Resulttype:
|
||||||
def __init__(self, name, count):
|
def __init__(self, name, count):
|
||||||
@ -16,28 +19,27 @@ class Resulttype:
|
|||||||
def getCount(self):
|
def getCount(self):
|
||||||
return self.count
|
return self.count
|
||||||
|
|
||||||
|
|
||||||
class Variant:
|
class Variant:
|
||||||
def __init__(self, id, name, table, benchmark, detail):
|
def __init__(self, id, name, table, benchmark, detail):
|
||||||
self.id = id
|
self.id = id
|
||||||
self.dbname = name
|
self.dbname = name
|
||||||
self.parenttable = table # TableDetails
|
self.parenttable = table # TableDetails
|
||||||
self.details = detail # VariantDetails
|
self.details = detail # VariantDetails
|
||||||
self.benchmark = benchmark # BenchmarkDetails
|
self.benchmark = benchmark # BenchmarkDetails
|
||||||
self.results = {}
|
self.results = {}
|
||||||
self.totalresults = 0
|
self.totalresults = 0
|
||||||
|
|
||||||
|
|
||||||
def getMapper(self):
|
def getMapper(self):
|
||||||
mapper = self.benchmark.getMapper()
|
mapper = self.benchmark.getMapper()
|
||||||
if not mapper: #try benchmark mapper
|
if not mapper: # try benchmark mapper
|
||||||
mapper = self.details.getMapper()
|
mapper = self.details.getMapper()
|
||||||
if not mapper: # of not there, try parent tables mapper
|
if not mapper: # of not there, try parent tables mapper
|
||||||
mapper = self.parenttable.getMapper()
|
mapper = self.parenttable.getMapper()
|
||||||
if not mapper: # no mapper found at all, try default mapper
|
if not mapper: # no mapper found at all, try default mapper
|
||||||
mapper = model.detaildealer.getDefaultMapper()
|
mapper = detaildealer.getDefaultMapper()
|
||||||
return mapper
|
return mapper
|
||||||
|
|
||||||
|
|
||||||
def addResulttype(self, name, count):
|
def addResulttype(self, name, count):
|
||||||
mapper = self.getMapper()
|
mapper = self.getMapper()
|
||||||
label = mapper.getLabel(name)
|
label = mapper.getLabel(name)
|
||||||
@ -70,17 +72,28 @@ class Variant:
|
|||||||
return self.totalresults
|
return self.totalresults
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
ret = "Variant: " + self.getDetails().getTitle() + " - " + self.getBenchmarkDetails().getTitle() +" (id: " + str( self.id )+ ")" + " "
|
ret = (
|
||||||
ret += "Total Results: " + str( self.totalresults ) + "\n"
|
"Variant: "
|
||||||
|
+ self.getDetails().getTitle()
|
||||||
|
+ " - "
|
||||||
|
+ self.getBenchmarkDetails().getTitle()
|
||||||
|
+ " (id: "
|
||||||
|
+ str(self.id)
|
||||||
|
+ ")"
|
||||||
|
+ " "
|
||||||
|
)
|
||||||
|
ret += "Total Results: " + str(self.totalresults) + "\n"
|
||||||
for v in self.results:
|
for v in self.results:
|
||||||
ret += "\t" + v.name + ": " + str( v.count ) + "\n"
|
ret += "\t" + v.name + ": " + str(v.count) + "\n"
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
__repr__ = __str__
|
__repr__ = __str__
|
||||||
|
|
||||||
'''A ResultTable contains n Variants'''
|
|
||||||
class ResultTable:
|
|
||||||
|
|
||||||
|
"""A ResultTable contains n Variants"""
|
||||||
|
|
||||||
|
|
||||||
|
class ResultTable:
|
||||||
def __init__(self, name, cfg):
|
def __init__(self, name, cfg):
|
||||||
self.name = scrub(name)
|
self.name = scrub(name)
|
||||||
self.details = cfg.getTable(name)
|
self.details = cfg.getTable(name)
|
||||||
@ -89,7 +102,7 @@ class ResultTable:
|
|||||||
def addVariant(self, var):
|
def addVariant(self, var):
|
||||||
if var.getId() in self.variants:
|
if var.getId() in self.variants:
|
||||||
return
|
return
|
||||||
self.variants[var.getId()] = var # Add if not existing yet
|
self.variants[var.getId()] = var # Add if not existing yet
|
||||||
|
|
||||||
def getVariant(self, id):
|
def getVariant(self, id):
|
||||||
if id in self.variants:
|
if id in self.variants:
|
||||||
@ -97,7 +110,7 @@ class ResultTable:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def getVariantById(self, varid):
|
def getVariantById(self, varid):
|
||||||
for k,v in self.variants.items():
|
for k, v in self.variants.items():
|
||||||
if int(v.getId()) == int(varid):
|
if int(v.getId()) == int(varid):
|
||||||
return v
|
return v
|
||||||
return None
|
return None
|
||||||
@ -110,12 +123,16 @@ class ResultTable:
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
ret = "Result: " + self.getDetails().getTitle() + "\n"
|
ret = "Result: " + self.getDetails().getTitle() + "\n"
|
||||||
for k,v in self.variants.items():
|
for k, v in self.variants.items():
|
||||||
ret += "\t" + str(v) + "\n"
|
ret += "\t" + str(v) + "\n"
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
__repr__ = __str__
|
__repr__ = __str__
|
||||||
|
|
||||||
'''Overview has n ResultTables'''
|
|
||||||
|
"""Overview has n ResultTables"""
|
||||||
|
|
||||||
|
|
||||||
class Overview:
|
class Overview:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.tables = {}
|
self.tables = {}
|
||||||
@ -130,7 +147,7 @@ class Overview:
|
|||||||
return self.tables.get(dbname, None)
|
return self.tables.get(dbname, None)
|
||||||
|
|
||||||
def getVariantById(self, variant_id):
|
def getVariantById(self, variant_id):
|
||||||
for key,table in self.tables.items():
|
for key, table in self.tables.items():
|
||||||
variant = table.getVariantById(variant_id)
|
variant = table.getVariantById(variant_id)
|
||||||
if variant:
|
if variant:
|
||||||
return variant
|
return variant
|
||||||
@ -139,5 +156,3 @@ class Overview:
|
|||||||
|
|
||||||
def length(self):
|
def length(self):
|
||||||
return len(self.tables)
|
return len(self.tables)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
tools/analysis/resultbrowser/app/detaildealer.py
Normal file
4
tools/analysis/resultbrowser/app/detaildealer.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# Instantiate global detail dealer, will be initialized in reloadOverview
|
||||||
|
from app import details
|
||||||
|
|
||||||
|
detaildealer = details.DetailDealer()
|
||||||
@ -1,31 +1,60 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import MySQLdb
|
|
||||||
import MySQLdb.cursors
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import MySQLdb
|
||||||
|
import MySQLdb.cursors
|
||||||
|
|
||||||
|
from app.detaildealer import detaildealer
|
||||||
|
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from . import data
|
from . import data, details
|
||||||
from . import details
|
|
||||||
|
|
||||||
"""Get command line options"""
|
"""Get command line options"""
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
parser = OptionParser()
|
parser = OptionParser()
|
||||||
parser.add_option("-c", "--conf", type="string", help="MySQL config file", dest="config", default= os.path.join(os.path.expanduser("~"),".my.cnf"))
|
parser.add_option(
|
||||||
parser.add_option("-s", "--host", type="string", help="Webserver hostname", dest="host", default="localhost")
|
"-c",
|
||||||
parser.add_option("-d", "--details", type="string", help="Detailed information (YAML configuration file)", dest="details", default=None)
|
"--conf",
|
||||||
parser.add_option("-p", "--port", type="string", help="Webserver port", dest="port", default="5000")
|
type="string",
|
||||||
|
help="MySQL config file",
|
||||||
|
dest="config",
|
||||||
|
default=os.path.join(os.path.expanduser("~"), ".my.cnf"),
|
||||||
|
)
|
||||||
|
parser.add_option(
|
||||||
|
"-s",
|
||||||
|
"--host",
|
||||||
|
type="string",
|
||||||
|
help="Webserver hostname",
|
||||||
|
dest="host",
|
||||||
|
default="localhost",
|
||||||
|
)
|
||||||
|
parser.add_option(
|
||||||
|
"-d",
|
||||||
|
"--details",
|
||||||
|
type="string",
|
||||||
|
help="Detailed information (YAML configuration file)",
|
||||||
|
dest="details",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
parser.add_option(
|
||||||
|
"-p", "--port", type="string", help="Webserver port", dest="port", default="5000"
|
||||||
|
)
|
||||||
opts, args = parser.parse_args()
|
opts, args = parser.parse_args()
|
||||||
|
|
||||||
"""Check if configuration files exist"""
|
"""Check if configuration files exist"""
|
||||||
|
|
||||||
|
|
||||||
def checkConfigFile(msg, fname):
|
def checkConfigFile(msg, fname):
|
||||||
if not os.path.isfile(fname):
|
if not os.path.isfile(fname):
|
||||||
sys.exit("Error: '" + fname + "' not found")
|
sys.exit("Error: '" + fname + "' not found")
|
||||||
else:
|
else:
|
||||||
print(msg, "->", fname)
|
print(msg, "->", fname)
|
||||||
|
|
||||||
|
|
||||||
# Check sql config
|
# Check sql config
|
||||||
sqlconfig = opts.config
|
sqlconfig = opts.config
|
||||||
checkConfigFile("MySQL config", sqlconfig)
|
checkConfigFile("MySQL config", sqlconfig)
|
||||||
@ -35,50 +64,74 @@ if opts.details:
|
|||||||
checkConfigFile("Details", opts.details)
|
checkConfigFile("Details", opts.details)
|
||||||
|
|
||||||
# Instantiate global detail dealer, will be initialized in reloadOverview
|
# Instantiate global detail dealer, will be initialized in reloadOverview
|
||||||
detaildealer = details.DetailDealer()
|
# detaildealer = details.DetailDealer()
|
||||||
|
|
||||||
|
|
||||||
"""Remove all characters from string except alphanuermics and _"""
|
"""Remove all characters from string except alphanuermics and _"""
|
||||||
|
|
||||||
|
|
||||||
def scrub(table_name):
|
def scrub(table_name):
|
||||||
return ''.join( chr for chr in table_name if chr.isalnum() or chr == '_' )
|
return "".join(chr for chr in table_name if chr.isalnum() or chr == "_")
|
||||||
|
|
||||||
|
|
||||||
"""Global mysql handles"""
|
"""Global mysql handles"""
|
||||||
db = None
|
db = None
|
||||||
cur = None
|
cur = None
|
||||||
|
|
||||||
|
|
||||||
def loadSession(dbconf):
|
def loadSession(dbconf):
|
||||||
global db
|
global db
|
||||||
if db:
|
if db:
|
||||||
db.close()
|
db.close()
|
||||||
db = MySQLdb.connect(read_default_file=dbconf, cursorclass=MySQLdb.cursors.DictCursor)
|
db = MySQLdb.connect(
|
||||||
|
read_default_file=dbconf, cursorclass=MySQLdb.cursors.DictCursor
|
||||||
|
)
|
||||||
return db.cursor()
|
return db.cursor()
|
||||||
|
|
||||||
|
|
||||||
def closeSession():
|
def closeSession():
|
||||||
if cur: cur.close()
|
if cur:
|
||||||
|
cur.close()
|
||||||
global db
|
global db
|
||||||
db.close()
|
db.close()
|
||||||
db = None
|
db = None
|
||||||
|
|
||||||
|
|
||||||
'''Populate variant results for overview data'''
|
"""Populate variant results for overview data"""
|
||||||
|
|
||||||
|
|
||||||
def getVariants(cur, table):
|
def getVariants(cur, table):
|
||||||
restbl = table.getDetails().getDBName()
|
restbl = table.getDetails().getDBName()
|
||||||
cur.execute("""SELECT sum((t.time2 - t.time1 + 1) * width) AS total, resulttype,variant, v.id as variant_id, benchmark, details FROM variant v JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id GROUP BY v.id, resulttype, details""" % (restbl)) # % is used here, as a tablename must not be quoted
|
cur.execute(
|
||||||
|
"""SELECT sum((t.time2 - t.time1 + 1) * width) AS total, resulttype,variant, v.id as variant_id, benchmark, details FROM variant v JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id GROUP BY v.id, resulttype, details"""
|
||||||
|
% (restbl)
|
||||||
|
) # % is used here, as a tablename must not be quoted
|
||||||
res = cur.fetchall()
|
res = cur.fetchall()
|
||||||
rdic = {}
|
rdic = {}
|
||||||
# Build dict with variant id as key
|
# Build dict with variant id as key
|
||||||
for r in res:
|
for r in res:
|
||||||
# if variant entry already exists:
|
# if variant entry already exists:
|
||||||
variant = table.getVariant(int(r['variant_id']))
|
variant = table.getVariant(int(r["variant_id"]))
|
||||||
if not variant: # if variant did not exist yet, create it:
|
if not variant: # if variant did not exist yet, create it:
|
||||||
variant_details = detaildealer.getVariant(restbl, r['variant'])
|
variant_details = detaildealer.getVariant(restbl, r["variant"])
|
||||||
benchmark_details = detaildealer.getBenchmark(restbl, r['variant'], r['benchmark'])
|
benchmark_details = detaildealer.getBenchmark(
|
||||||
|
restbl, r["variant"], r["benchmark"]
|
||||||
|
)
|
||||||
table_details = detaildealer.getTable(restbl)
|
table_details = detaildealer.getTable(restbl)
|
||||||
variant = data.Variant(int(r['variant_id']), r['variant'], table_details, benchmark_details, variant_details)
|
variant = data.Variant(
|
||||||
variant.addResulttype(r['resulttype'], r['total'])
|
int(r["variant_id"]),
|
||||||
|
r["variant"],
|
||||||
|
table_details,
|
||||||
|
benchmark_details,
|
||||||
|
variant_details,
|
||||||
|
)
|
||||||
|
variant.addResulttype(r["resulttype"], r["total"])
|
||||||
table.addVariant(variant)
|
table.addVariant(variant)
|
||||||
|
|
||||||
'''Get overview data for index page'''
|
|
||||||
|
"""Get overview data for index page"""
|
||||||
|
|
||||||
|
|
||||||
def reloadOverview():
|
def reloadOverview():
|
||||||
overview = data.Overview()
|
overview = data.Overview()
|
||||||
detaildealer.reload(opts.details)
|
detaildealer.reload(opts.details)
|
||||||
@ -89,33 +142,42 @@ def reloadOverview():
|
|||||||
for rdic in result_tables:
|
for rdic in result_tables:
|
||||||
# r is the tablename, -> result_FOOBAR
|
# r is the tablename, -> result_FOOBAR
|
||||||
for key, tablename in rdic.items():
|
for key, tablename in rdic.items():
|
||||||
table = data.ResultTable(tablename,detaildealer)
|
table = data.ResultTable(tablename, detaildealer)
|
||||||
getVariants(cur, table)
|
getVariants(cur, table)
|
||||||
overview.add(table)
|
overview.add(table)
|
||||||
# Check if objdump table exists
|
# Check if objdump table exists
|
||||||
cur.execute("SHOW TABLES like 'objdump'")
|
cur.execute("SHOW TABLES like 'objdump'")
|
||||||
objdump_exists = (len(cur.fetchall()) == 1)
|
objdump_exists = len(cur.fetchall()) == 1
|
||||||
closeSession()
|
closeSession()
|
||||||
return overview, objdump_exists
|
return overview, objdump_exists
|
||||||
|
|
||||||
|
|
||||||
"""Load overview data at server startup"""
|
"""Load overview data at server startup"""
|
||||||
print("Loading overview data from database. This may take a while ...")
|
print("Loading overview data from database. This may take a while ...")
|
||||||
overview_data, objdump_exists = reloadOverview()
|
overview_data, objdump_exists = reloadOverview()
|
||||||
print("done.")
|
print("done.")
|
||||||
|
|
||||||
|
|
||||||
## Get overview data for views.index()
|
## Get overview data for views.index()
|
||||||
def getOverview():
|
def getOverview():
|
||||||
return overview_data
|
return overview_data
|
||||||
|
|
||||||
|
|
||||||
def objdumpExists():
|
def objdumpExists():
|
||||||
return objdump_exists
|
return objdump_exists
|
||||||
|
|
||||||
|
|
||||||
"""Get Results for one variant id"""
|
"""Get Results for one variant id"""
|
||||||
|
|
||||||
|
|
||||||
def getVariantResult(table, variantid):
|
def getVariantResult(table, variantid):
|
||||||
cur = loadSession(sqlconfig)
|
cur = loadSession(sqlconfig)
|
||||||
restbl = scrub(table)
|
restbl = scrub(table)
|
||||||
|
|
||||||
stmt = "SELECT resulttype, count(*) as total from %s r join fsppilot on r.pilot_id=fsppilot.id join variant on fsppilot.variant_id=variant.id" % (restbl)
|
stmt = (
|
||||||
|
"SELECT resulttype, count(*) as total from %s r join fsppilot on r.pilot_id=fsppilot.id join variant on fsppilot.variant_id=variant.id"
|
||||||
|
% (restbl)
|
||||||
|
)
|
||||||
where = " WHERE variant.id = %s group by resulttype ORDER BY resulttype "
|
where = " WHERE variant.id = %s group by resulttype ORDER BY resulttype "
|
||||||
stmt = stmt + where
|
stmt = stmt + where
|
||||||
cur.execute(stmt, variantid)
|
cur.execute(stmt, variantid)
|
||||||
@ -123,10 +185,13 @@ def getVariantResult(table, variantid):
|
|||||||
closeSession()
|
closeSession()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
'''Show objdump together with according injection result types.'''
|
|
||||||
|
"""Show objdump together with according injection result types."""
|
||||||
|
|
||||||
|
|
||||||
def getCode(result_table, variant_id, resultlabel=None):
|
def getCode(result_table, variant_id, resultlabel=None):
|
||||||
result_table = scrub(result_table)
|
result_table = scrub(result_table)
|
||||||
filt = ''
|
filt = ""
|
||||||
if not variant_id or not result_table:
|
if not variant_id or not result_table:
|
||||||
return None
|
return None
|
||||||
variant = overview_data.getVariantById(variant_id)
|
variant = overview_data.getVariantById(variant_id)
|
||||||
@ -137,15 +202,18 @@ def getCode(result_table, variant_id, resultlabel=None):
|
|||||||
filt = " and ( "
|
filt = " and ( "
|
||||||
for dbn in dbnames[:-1]:
|
for dbn in dbnames[:-1]:
|
||||||
filt += "resulttype = '" + dbn + "' OR "
|
filt += "resulttype = '" + dbn + "' OR "
|
||||||
filt += "resulttype = '" + dbnames[-1] +"' ) "
|
filt += "resulttype = '" + dbnames[-1] + "' ) "
|
||||||
else:
|
else:
|
||||||
filt = " and resulttype = '" + resultlabel + "' "
|
filt = " and resulttype = '" + resultlabel + "' "
|
||||||
|
|
||||||
# I especially like this one:
|
# I especially like this one:
|
||||||
select = "SELECT instr_address, opcode, disassemble, comment, sum(t.time2 - t.time1 + 1) as totals, GROUP_CONCAT(DISTINCT resulttype SEPARATOR ', ') as results FROM variant v "
|
select = "SELECT instr_address, opcode, disassemble, comment, sum(t.time2 - t.time1 + 1) as totals, GROUP_CONCAT(DISTINCT resulttype SEPARATOR ', ') as results FROM variant v "
|
||||||
join = " JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id JOIN objdump ON objdump.variant_id = v.id AND objdump.instr_address = injection_instr_absolute " %(scrub(result_table))
|
join = (
|
||||||
where = "WHERE v.id = %s "
|
" JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id JOIN objdump ON objdump.variant_id = v.id AND objdump.instr_address = injection_instr_absolute "
|
||||||
group = "GROUP BY injection_instr_absolute ORDER BY totals DESC "
|
% (scrub(result_table))
|
||||||
|
)
|
||||||
|
where = "WHERE v.id = %s "
|
||||||
|
group = "GROUP BY injection_instr_absolute ORDER BY totals DESC "
|
||||||
|
|
||||||
cur = loadSession(sqlconfig)
|
cur = loadSession(sqlconfig)
|
||||||
stmt = select + join + where + filt + group
|
stmt = select + join + where + filt + group
|
||||||
@ -153,27 +221,35 @@ def getCode(result_table, variant_id, resultlabel=None):
|
|||||||
dump = cur.fetchall()
|
dump = cur.fetchall()
|
||||||
|
|
||||||
closeSession()
|
closeSession()
|
||||||
resulttypes = variant.getResultLabels()
|
resulttypes = variant.getResultLabels()
|
||||||
return dump, resulttypes
|
return dump, resulttypes
|
||||||
|
|
||||||
|
|
||||||
def getCodeExcerpt(variant_id, instr_addr):
|
def getCodeExcerpt(variant_id, instr_addr):
|
||||||
code = {}
|
code = {}
|
||||||
limit = 8
|
limit = 8
|
||||||
cur = loadSession(sqlconfig)
|
cur = loadSession(sqlconfig)
|
||||||
cur.execute( """(SELECT instr_address, opcode, disassemble, comment FROM objdump \
|
cur.execute(
|
||||||
|
"""(SELECT instr_address, opcode, disassemble, comment FROM objdump \
|
||||||
WHERE instr_address < %s AND variant_id = %s \
|
WHERE instr_address < %s AND variant_id = %s \
|
||||||
ORDER BY instr_address DESC LIMIT %s) \
|
ORDER BY instr_address DESC LIMIT %s) \
|
||||||
ORDER BY instr_address ASC""" , (instr_addr, variant_id, limit))
|
ORDER BY instr_address ASC""",
|
||||||
|
(instr_addr, variant_id, limit),
|
||||||
|
)
|
||||||
below = cur.fetchall()
|
below = cur.fetchall()
|
||||||
code['below'] = below
|
code["below"] = below
|
||||||
cur.execute("""SELECT instr_address, opcode, disassemble, comment FROM objdump \
|
cur.execute(
|
||||||
|
"""SELECT instr_address, opcode, disassemble, comment FROM objdump \
|
||||||
WHERE instr_address >= %s AND variant_id = %s \
|
WHERE instr_address >= %s AND variant_id = %s \
|
||||||
ORDER BY instr_address ASC LIMIT %s""", (instr_addr, variant_id, limit+1))
|
ORDER BY instr_address ASC LIMIT %s""",
|
||||||
|
(instr_addr, variant_id, limit + 1),
|
||||||
|
)
|
||||||
upper = cur.fetchall()
|
upper = cur.fetchall()
|
||||||
code['upper'] = upper
|
code["upper"] = upper
|
||||||
closeSession()
|
closeSession()
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
|
||||||
def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=None):
|
def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=None):
|
||||||
restypefilter = None
|
restypefilter = None
|
||||||
if resultlabel:
|
if resultlabel:
|
||||||
@ -185,12 +261,15 @@ def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=No
|
|||||||
restypefilter = " and ( "
|
restypefilter = " and ( "
|
||||||
for dbn in dbnames[:-1]:
|
for dbn in dbnames[:-1]:
|
||||||
restypefilter += "resulttype = '" + dbn + "' OR "
|
restypefilter += "resulttype = '" + dbn + "' OR "
|
||||||
restypefilter += "resulttype = '" + dbnames[-1] +"' ) "
|
restypefilter += "resulttype = '" + dbnames[-1] + "' ) "
|
||||||
|
|
||||||
select = "SELECT bitoffset as 'Bit Offset', hex(injection_instr_absolute) as 'Instruction Address', hex(original_value) as 'Original Value', hex(data_address) as 'Data Address', resulttype as 'Result Type', details as 'Details' from %s " % scrub(result_table)
|
select = (
|
||||||
join = "JOIN fsppilot ON pilot_id = fsppilot.id "
|
"SELECT bitoffset as 'Bit Offset', hex(injection_instr_absolute) as 'Instruction Address', hex(original_value) as 'Original Value', hex(data_address) as 'Data Address', resulttype as 'Result Type', details as 'Details' from %s "
|
||||||
where = "WHERE variant_id = %s and injection_instr_absolute = %s "
|
% scrub(result_table)
|
||||||
order = "ORDER BY data_address, bitoffset"
|
)
|
||||||
|
join = "JOIN fsppilot ON pilot_id = fsppilot.id "
|
||||||
|
where = "WHERE variant_id = %s and injection_instr_absolute = %s "
|
||||||
|
order = "ORDER BY data_address, bitoffset"
|
||||||
|
|
||||||
cur = loadSession(sqlconfig)
|
cur = loadSession(sqlconfig)
|
||||||
if not restypefilter:
|
if not restypefilter:
|
||||||
@ -204,8 +283,7 @@ def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=No
|
|||||||
closeSession()
|
closeSession()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def showDBstatus():
|
def showDBstatus():
|
||||||
res = "TODO"
|
res = "TODO"
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,42 +1,53 @@
|
|||||||
from flask import render_template,request
|
from flask import render_template, request
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
|
|
||||||
# import model
|
# import model
|
||||||
# import data
|
# import data
|
||||||
|
|
||||||
from . import model
|
from . import model
|
||||||
|
|
||||||
from . import data
|
from . import data
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
@app.route('/index')
|
@app.route("/")
|
||||||
|
@app.route("/index")
|
||||||
def index():
|
def index():
|
||||||
reload_overview = request.args.get('reload', False)
|
reload_overview = request.args.get("reload", False)
|
||||||
if reload_overview:
|
if reload_overview:
|
||||||
print("Reloading overview...")
|
print("Reloading overview...")
|
||||||
model.reloadOverview()
|
model.reloadOverview()
|
||||||
return render_template("index.html", overview=model.getOverview(), objdump_there = model.objdumpExists())
|
return render_template(
|
||||||
|
"index.html", overview=model.getOverview(), objdump_there=model.objdumpExists()
|
||||||
|
)
|
||||||
|
|
||||||
@app.route('/code')
|
|
||||||
|
@app.route("/code")
|
||||||
def code():
|
def code():
|
||||||
variant_id = request.args.get('variant_id', None)
|
variant_id = request.args.get("variant_id", None)
|
||||||
resulttype = request.args.get('resulttype', None)
|
resulttype = request.args.get("resulttype", None)
|
||||||
table = request.args.get('table', None)
|
table = request.args.get("table", None)
|
||||||
res,restypes = model.getCode(table, variant_id, resulttype)
|
res, restypes = model.getCode(table, variant_id, resulttype)
|
||||||
var_dets = model.getOverview().getVariantById(variant_id)
|
var_dets = model.getOverview().getVariantById(variant_id)
|
||||||
return render_template("code.html", results=res, resulttypes=restypes, variant_details=var_dets )
|
return render_template(
|
||||||
|
"code.html", results=res, resulttypes=restypes, variant_details=var_dets
|
||||||
|
)
|
||||||
|
|
||||||
@app.route('/instr_details')
|
|
||||||
|
@app.route("/instr_details")
|
||||||
def instr_details():
|
def instr_details():
|
||||||
table = request.args.get('table', None)
|
table = request.args.get("table", None)
|
||||||
variant_id = request.args.get('variant_id', None)
|
variant_id = request.args.get("variant_id", None)
|
||||||
instr_addr = request.args.get('instr_address', None)
|
instr_addr = request.args.get("instr_address", None)
|
||||||
resulttype = request.args.get('resulttype', None)
|
resulttype = request.args.get("resulttype", None)
|
||||||
codeexcerpt = model.getCodeExcerpt(variant_id, instr_addr)
|
codeexcerpt = model.getCodeExcerpt(variant_id, instr_addr)
|
||||||
var_dets = model.getOverview().getVariantById(variant_id)
|
var_dets = model.getOverview().getVariantById(variant_id)
|
||||||
results = model.getResultsbyInstruction(table, variant_id, instr_addr, resulttype)
|
results = model.getResultsbyInstruction(table, variant_id, instr_addr, resulttype)
|
||||||
return render_template("instr_details.html", code=codeexcerpt, result=results, variant_details=var_dets)
|
return render_template(
|
||||||
|
"instr_details.html", code=codeexcerpt, result=results, variant_details=var_dets
|
||||||
|
)
|
||||||
|
|
||||||
@app.route('/about')
|
|
||||||
|
@app.route("/about")
|
||||||
def about():
|
def about():
|
||||||
stat = model.showDBstatus()
|
stat = model.showDBstatus()
|
||||||
return render_template("about.html", status=stat)
|
return render_template("about.html", status=stat)
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
from app import app
|
from app import app, model
|
||||||
from app import model
|
|
||||||
|
|
||||||
app.run(debug=False, port=int(model.opts.port), host=model.opts.host)
|
app.run(debug=False, port=int(model.opts.port), host=model.opts.host)
|
||||||
|
|||||||
Reference in New Issue
Block a user