Compare commits

...

17 Commits

Author SHA1 Message Date
85f0fc97b5 Add gitea workflow 2026-02-09 13:09:13 +01:00
12a95af8af dockerfile: mount wasm build dir 2026-02-05 17:56:22 +01:00
f309508fdd dockerfile: move to repo root 2026-01-29 00:59:29 +01:00
18e1407254 remove flake.nix 2026-01-25 19:47:34 +01:00
e0c6be3270 dockerfile: update image tags 2026-01-25 19:47:27 +01:00
43de918fa3 expose database port in composefile 2026-01-18 21:17:11 +01:00
c775ba686b resultbrowser: remove circular detaildealer import 2026-01-18 21:07:44 +01:00
eaba782ca6 update mysql host 2026-01-18 19:51:43 +01:00
47b5b147b6 dockerfile: build dump/import/prune tools for generic-tracing 2026-01-18 19:51:38 +01:00
06f7346533 dockerfile: don't cache fail/fail-targets clones 2026-01-18 19:19:42 +01:00
569ee2b898 orchestrate containers using compose instead of makefile 2026-01-18 18:47:52 +01:00
ba991c0639 dockerfile: rename linked outputs 2026-01-18 18:47:17 +01:00
5a9c7b84ee dockerfile: replace links to fail/fail-targets with my forks 2026-01-18 18:47:17 +01:00
59da5d3763 add fork notice to readme 2026-01-18 18:47:17 +01:00
7b0e79d45b dockerfile: finally produce a working dockerfile 2026-01-18 18:47:17 +01:00
81f30926a2 ignore prebuilt binaries 2026-01-18 18:47:17 +01:00
f4f688acb6 dockerfile: update makefile to run with current docker versions 2026-01-18 18:47:17 +01:00
18 changed files with 524 additions and 653 deletions

View File

@ -0,0 +1,58 @@
name: Build FAIL* Docker Images
on:
push:
branches: [disabled]
# branches: [master]
# paths:
# - ".gitea/workflows/fail-docker.yaml"
# - "fail-base.dockerfile"
# - "fail-demo.dockerfile"
# - "fail-generic-tracing.dockerfile"
jobs:
fail-base-docker:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Login to container registry
uses: docker/login-action@v3
with:
registry: gitea.vps.chriphost.de
username: ${{ secrets.CONTAINER_REGISTRY_USER }}
password: ${{ secrets.CONTAINER_REGISTRY_TOKEN }}
- name: Build FAIL* Base Docker Image
run: docker build --file fail-base.dockerfile --tag gitea.vps.chriphost.de/christoph/fail-base:latest .
- name: Push FAIL* Base Docker Image
run: docker push gitea.vps.chriphost.de/christoph/fail-base:latest
fail-generic-tracing-docker:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Login to container registry
uses: docker/login-action@v3
with:
registry: gitea.vps.chriphost.de
username: ${{ secrets.CONTAINER_REGISTRY_USER }}
password: ${{ secrets.CONTAINER_REGISTRY_TOKEN }}
- name: Build FAIL* Generic Tracing Docker Image
run: docker build --file fail-generic-tracing.dockerfile --tag gitea.vps.chriphost.de/christoph/fail-generic-tracing:latest .
- name: Push FAIL* Generic Tracing Docker Image
run: docker push gitea.vps.chriphost.de/christoph/fail-generic-tracing:latest
fail-demo-docker:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Login to container registry
uses: docker/login-action@v3
with:
registry: gitea.vps.chriphost.de
username: ${{ secrets.CONTAINER_REGISTRY_USER }}
password: ${{ secrets.CONTAINER_REGISTRY_TOKEN }}
- name: Build FAIL* Demo Docker Image
run: docker build --file fail-demo.dockerfile --tag gitea.vps.chriphost.de/christoph/fail-demo:latest .
- name: Push FAIL* Demo Docker Image
run: docker push gitea.vps.chriphost.de/christoph/fail-demo:latest

2
.gitignore vendored
View File

@ -10,7 +10,6 @@
*.pyc *.pyc
*.swp *.swp
*~ *~
Makefile
build build
build-* build-*
.deps .deps
@ -56,3 +55,4 @@ debuggers/openocd/src/target/xscale_debug.h
debuggers/openocd/stamp-h1 debuggers/openocd/stamp-h1
.idea .idea
0 prebuilt

13
Makefile Normal file
View File

@ -0,0 +1,13 @@
docker:
docker build -t gitea.vps.chriphost.de/christoph/fail-base:latest -f fail-base.dockerfile . --build-arg CACHE_DATE="$(shell date)"
docker build -t gitea.vps.chriphost.de/christoph/fail-generic-tracing:latest -f fail-generic-tracing.dockerfile .
docker build -t gitea.vps.chriphost.de/christoph/fail-demo:latest -f fail-demo.dockerfile . --build-arg CACHE_DATE="$(shell date)"
run:
docker compose up -d --force-recreate --renew-anon-volumes -y
stop:
docker compose down
ssh:
ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 5022 fail@127.0.0.1

View File

@ -1,3 +1,5 @@
Forked from [https://github.com/danceos/fail](https://github.com/danceos/fail).
FAIL* - FAult Injection Leveraged FAIL* - FAult Injection Leveraged
======================================= =======================================

28
docker-compose.yaml Normal file
View File

@ -0,0 +1,28 @@
services:
fail-db:
image: mysql
container_name: fail-db
environment:
MYSQL_ROOT_PASSWORD: fail
MYSQL_USER: fail
MYSQL_PASSWORD: fail
MYSQL_DATABASE: fail
ports:
- "3306:3306"
networks:
- fail-network
fail-demo:
image: gitea.vps.chriphost.de/christoph/fail-demo:latest
container_name: fail-demo
ports:
- "5000:5000" # Result Browser
- "5022:22" # SSH
networks:
- fail-network
volumes:
- "../3 Wasm/examples/build-bochs:/home/fail/fail-wasm/examples/build-bochs"
networks:
fail-network:
driver: bridge

132
fail-base.dockerfile Normal file
View File

@ -0,0 +1,132 @@
# Set the base image to Ubuntu Jammy. Jammy includes the correct dependency versions except for LLVM.
FROM ubuntu:jammy
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
# TODO: Can shrink the image size down by a LOT:
# - A lot of dependencies are unnecessary, I just need to figure out which
# - Separate build-time / runtime dependencies, then do 2 stages
# - Combine multiple RUNs into a single one
# - Use apt-get --no-install-recommends during package install and apt-get clean afterwards
# Install basic packages
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive TZ=Europe/Berlin apt-get install -y --no-install-recommends \
build-essential \
ca-certificates \
cmake \
cmake-curses-gui \
wget \
git \
doxygen \
screen \
openssh-server \
neovim \
ranger
# NOTE: Only required if the base image is not Jammy
# Install Boost 1.74 (focal includes 1.71)
# Jammy includes the correct version but its llvm is too old
# RUN wget https://archives.boost.io/release/1.74.0/source/boost_1_74_0.tar.gz \
# && tar xvzf boost_1_74_0.tar.gz && cd boost_1_74_0 \
# && chmod +x ./bootstrap.sh \
# && ./bootstrap.sh --prefix=/usr/local \
# && ./b2 && ./b2 install
# && cd / && rm -rf boost_1_74_0.tar.gz && rm -rf boost_1_74_0
# ENV BOOST_ROOT=/usr/local
# NOTE: Only required if the base image is not Focal
# Install LLVM 6.0 from source
RUN git clone https://github.com/llvm/llvm-project.git && cd llvm-project \
&& git checkout llvmorg-6.0.0 \
&& mkdir build && cd build \
&& cmake -G "Unix Makefiles" ../llvm \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_TERMINFO=OFF \
-DLLVM_ENABLE_CURSES=OFF \
-DLLVM_ENABLE_PROJECTS="clang;lld" \
&& make -j$(nproc) && make install \
&& cd / && rm -rf ./llvm-project
# Install packages required to build FAIL*
RUN DEBIAN_FRONTEND=noninteractive TZ=Europe/Berlin apt-get install -y --no-install-recommends \
# binutils-dev \
libmysqlclient-dev \
libprotobuf-dev \
libtinfo-dev \
libpcl1-dev \
libdwarf-dev \
libelf-dev \
libiberty-dev \
libboost-thread-dev \
libboost-system-dev \
libboost-regex-dev \
libboost-coroutine-dev \
libboost-context-dev \
libfontconfig1-dev \
zlib1g-dev \
libz3-dev \
libsdl1.2-dev \
# libsdl2-dev \
libgtk2.0-dev \
# libgtk-3-dev \
# libwxbase3.0-dev \
libwxgtk3.0-gtk3-dev \
libncurses-dev \
# libncurses5-dev \
libx11-dev \
xorg-dev \
libasound2-dev
# Install packages required to run FAIL*
RUN DEBIAN_FRONTEND=noninteractive TZ=Europe/Berlin apt-get install -y --no-install-recommends \
protobuf-compiler \
libtinfo6 \
# libtinfo5 \
libmariadb3 \
libprotobuf23 \
libncurses6 \
&& apt-get clean
# NOTE: Only required if we install llvm from apt
# Symlink clang++/llvm-config to match docs
# RUN ln -sf /usr/bin/clang++-6.0 /usr/bin/clang++ \
# && ln -sf /usr/bin/llvm-config-6.0 /usr/bin/llvm-config
# Add a user for compiling FAIL*
RUN useradd fail \
&& mkdir /home/fail && chown fail /home/fail \
&& echo 'fail:fail' | chpasswd && chsh fail --shell /bin/bash \
&& adduser fail sudo
# SSH login fix. Otherwise user is kicked off after login
RUN mkdir /var/run/sshd \
&& sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
ENV NOTVISIBLE="in users profile"
RUN echo "export VISIBLE=now" >> /etc/profile
USER fail
ENV HOME=/home/fail
WORKDIR /home/fail
# Get AspectC++ (originally v1.2) for 64 Bit
ARG acversion="2.5"
RUN wget http://www.aspectc.org/releases/"$acversion"/ac-bin-linux-x86-64bit-"$acversion".tar.gz \
&& tar xvzf ac-bin-linux-x86-64bit-"$acversion".tar.gz \
&& mkdir bin && mv aspectc++/ac++ aspectc++/ag++ bin/ \
&& rm -rf aspectc++ && rm -rf ac-bin-linux-x86-64bit-"$acversion".tar.gz
ENV PATH=/home/fail/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
# Clone FAIL*
# Break docker layer cache
ARG CACHE_DATE=1970-01-01
# RUN git clone https://github.com/danceos/fail.git
RUN git clone https://gitea.vps.chriphost.de/christoph/fail
WORKDIR fail
USER root
# Accept SSH connections
EXPOSE 22
CMD ["/usr/sbin/sshd", "-D"]

View File

@ -3,20 +3,9 @@
# generic-tracing experiment was already built and the binaries are in # generic-tracing experiment was already built and the binaries are in
# place (~fail/bin/*) # place (~fail/bin/*)
FROM danceos/fail-generic-tracing FROM gitea.vps.chriphost.de/christoph/fail-generic-tracing:latest
MAINTAINER Christian Dietrich <stettberger@dokucode.de>
# Install Additional Packages
RUN apt-get install -y \
python-minimal \
grub-common \
xorriso \
grub-pc-bin \
mysql-client \
python-flask \
python-mysqldb \
python-yaml
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
# Passwort for MySQL Daemon # Passwort for MySQL Daemon
ADD my.cnf /home/fail/.my.cnf ADD my.cnf /home/fail/.my.cnf
@ -24,10 +13,8 @@ RUN chown fail /home/fail/.my.cnf
USER fail USER fail
WORKDIR /home/fail WORKDIR /home/fail
RUN echo 'export PATH=$HOME/bin:$PATH' >> ~/.profile;\ RUN echo 'export PATH=$HOME/bin:$PATH' >> ~/.profile \
echo 'cd $HOME/fail-targets' >> ~/.profile && echo 'cd $HOME/fail-wasm/examples' >> ~/.profile
RUN git clone https://github.com/danceos/fail-targets.git
WORKDIR fail WORKDIR fail
RUN mkdir build; cd build RUN mkdir build; cd build
@ -36,7 +23,7 @@ WORKDIR build
RUN cmake \ RUN cmake \
-DAGXX=/home/fail/bin/ag++ \ -DAGXX=/home/fail/bin/ag++ \
-DBOOST_THREAD_LIBRARY=/usr/lib/x86_64-linux-gnu/libpthread.so \ -DBOOST_THREAD_LIBRARY=/usr/lib/x86_64-linux-gnu/libpthread.so \
# Enable / Disable features \
-DBUILD_ARM=OFF \ -DBUILD_ARM=OFF \
-DBUILD_BOCHS=ON \ -DBUILD_BOCHS=ON \
-DBUILD_CAPSTONE_DISASSEMBLER=OFF \ -DBUILD_CAPSTONE_DISASSEMBLER=OFF \
@ -54,17 +41,16 @@ RUN cmake \
-DBUILD_QEMU=OFF \ -DBUILD_QEMU=OFF \
-DBUILD_T32=OFF \ -DBUILD_T32=OFF \
-DBUILD_X86=ON \ -DBUILD_X86=ON \
# \
-DCLIENT_JOB_INITIAL=1 \ -DCLIENT_JOB_INITIAL=1 \
-DCLIENT_JOB_LIMIT=1000 \ -DCLIENT_JOB_LIMIT=1000 \
-DCLIENT_JOB_REQUEST_SEC=30 \ -DCLIENT_JOB_REQUEST_SEC=30 \
-DCLIENT_RAND_BACKOFF_TEND=8 \ -DCLIENT_RAND_BACKOFF_TEND=8 \
-DCLIENT_RAND_BACKOFF_TSTART=3 \ -DCLIENT_RAND_BACKOFF_TSTART=3 \
-DCLIENT_RETRY_COUNT=3 \ -DCLIENT_RETRY_COUNT=3 \
# ;-separated list \
# ;-separated list
-DCMAKE_AGPP_FLAGS="-D__NO_MATH_INLINES" \ -DCMAKE_AGPP_FLAGS="-D__NO_MATH_INLINES" \
# \
-DCMAKE_BUILD_TYPE=Release \ -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr/local \ -DCMAKE_INSTALL_PREFIX=/usr/local \
-DCONFIG_BOCHS_COMPRESS_STATE=ON \ -DCONFIG_BOCHS_COMPRESS_STATE=ON \
@ -89,18 +75,16 @@ RUN cmake \
-DCONFIG_SR_SAVE=ON \ -DCONFIG_SR_SAVE=ON \
-DCONFIG_SUPPRESS_INTERRUPTS=ON \ -DCONFIG_SUPPRESS_INTERRUPTS=ON \
-DENABLE_DATABASE_TESTS=OFF \ -DENABLE_DATABASE_TESTS=OFF \
# ;-separated list \
# ;-separated list
-DEXPERIMENTS_ACTIVATED="generic-experiment" \ -DEXPERIMENTS_ACTIVATED="generic-experiment" \
# \
-DLibIberty_INCLUDE_DIRS=/usr/include/libiberty \ -DLibIberty_INCLUDE_DIRS=/usr/include/libiberty \
-DLibIberty_LIBRARIES=/usr/lib/x86_64-linux-gnu/libiberty.a \ -DLibIberty_LIBRARIES=/usr/lib/x86_64-linux-gnu/libiberty.a \
-DMYSQL_CONFIG=/usr/bin/mysql_config \ -DMYSQL_CONFIG=/usr/bin/mysql_config \
-DMYSQL_CONFIG_PREFER_PATH=/bin \ -DMYSQL_CONFIG_PREFER_PATH=/bin \
# ;-separated list \
# ;-separated list
-DPLUGINS_ACTIVATED="serialoutput" \ -DPLUGINS_ACTIVATED="serialoutput" \
# \
-DSERVER_COMM_HOSTNAME=localhost \ -DSERVER_COMM_HOSTNAME=localhost \
-DSERVER_COMM_TCP_PORT=1111 \ -DSERVER_COMM_TCP_PORT=1111 \
-DSERVER_OUT_QUEUE_SIZE=0 \ -DSERVER_OUT_QUEUE_SIZE=0 \
@ -114,26 +98,61 @@ RUN cmake \
-DTEST_MYSQL_USER=fail_test \ -DTEST_MYSQL_USER=fail_test \
-DVERBOSE_MAKE=OFF \ -DVERBOSE_MAKE=OFF \
-D_filename=/usr/include/wx-3.0/wx/version.h \ -D_filename=/usr/include/wx-3.0/wx/version.h \
# ;-separated list \
# ;-separated list
-Dbochs_configure_params="--enable-a20-pin;--enable-x86-64;--enable-cpu-level=6;--enable-ne2000;--enable-acpi;--enable-pci;--enable-usb;--enable-trace-cache;--enable-fast-function-calls;--enable-host-specific-asms;--enable-disasm;--enable-readline;--enable-clgd54xx;--enable-fpu;--enable-vmx=2;--enable-monitor-mwait;--enable-cdrom;--enable-sb16=linux;--enable-gdb-stub;--disable-docbook;--with-nogui;--with-x11;--with-wx;--with-sdl" \ -Dbochs_configure_params="--enable-a20-pin;--enable-x86-64;--enable-cpu-level=6;--enable-ne2000;--enable-acpi;--enable-pci;--enable-usb;--enable-trace-cache;--enable-fast-function-calls;--enable-host-specific-asms;--enable-disasm;--enable-readline;--enable-clgd54xx;--enable-fpu;--enable-vmx=2;--enable-monitor-mwait;--enable-cdrom;--enable-sb16=linux;--enable-gdb-stub;--disable-docbook;--with-nogui;--with-x11;--with-wx;--with-sdl" \
# \
-Dbochs_install_prefix=/home/fail/fail/simulators/bochs/install \ -Dbochs_install_prefix=/home/fail/fail/simulators/bochs/install \
.. ..
# We need to manually build Bochs first to generate the bochs/config.h - the external_project configure step # We need to manually build Bochs first to generate the bochs/config.h - the external_project configure step
# (where this file is generated) is run at build time (not configure time), but the build order is not defined correctly. # (where this file is generated) is run at build time (not configure time), but the build order is not defined correctly.
RUN cmake --build . --target libfailbochs_external-configure -- -j$(nproc) # RUN cmake --build . --target libfailbochs_external-configure \
# && cmake --build .
RUN cmake --build . --target libfailbochs_external-configure -- -j$(nproc) \
&& cmake --build . -- -j$(nproc)
# Make FAIL* RUN ln -s /home/fail/fail/build/bin/fail-client /home/fail/bin/generic-experiment-client \
RUN cmake --build . -- -j$(nproc) && ln -s /home/fail/fail/build/bin/generic-experiment-server /home/fail/bin/ \
&& ln -s /home/fail/fail/tools/analysis/resultbrowser/run.py /home/fail/bin/resultbrowser.py
# Make FAIL*
RUN ln -s /home/fail/fail/build/bin/fail-client /home/fail/bin/generic-experiment-client; \
ln -s /home/fail/fail/build/bin/generic-experiment-server /home/fail/bin/; \
ln -s /home/fail/fail/tools/analysis/resultbrowser/run.py /home/fail/bin/resultbrowser
# For the resultbrowser, we expose port 5000 to the outside world.
EXPOSE 5000
USER root USER root
# Install additional packages
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive TZ=Europe/Berlin apt-get install -y --no-install-recommends \
grub-common \
xorriso \
grub-pc-bin \
mysql-client \
python2-minimal \
python2-dev \
# python2-flask \
# python2-mysqldb \
# python2-yaml \
unzip \
bochs \
&& apt-get clean
# Fix old shebangs
RUN ln -sf /usr/bin/python2 /usr/bin/python
# Install python packages
RUN wget https://bootstrap.pypa.io/pip/2.7/get-pip.py \
&& python2 get-pip.py \
&& wget https://raw.githubusercontent.com/paulfitz/mysql-connector-c/master/include/my_config.h -O /usr/include/mysql/my_config.h \
&& pip2 install flask pyyaml MySQL-python
USER fail
# Clone FAIL* targets
# Break docker layer cache
ARG CACHE_DATE=1970-01-01
WORKDIR /home/fail
# RUN git clone https://github.com/danceos/fail-targets.git
RUN git clone https://gitea.vps.chriphost.de/christoph/fail-targets
RUN git clone https://gitea.vps.chriphost.de/christoph/fail-wasm
USER root
# Resultbrowser
EXPOSE 5000

View File

@ -1,20 +1,21 @@
# Inherit from docker container that has the fail source code prepared, # Inherit from docker container that has the fail source code prepared,
# including all tools which are needed to build FAIL* # including all tools which are needed to build FAIL*
FROM danceos/fail-base FROM gitea.vps.chriphost.de/christoph/fail-base:latest
MAINTAINER Christian Dietrich <stettberger@dokucode.de>
LABEL org.opencontainers.image.authors="Christian Dietrich <stettberger@dokucode.de>"
USER fail USER fail
# Configure the Weather Monitor Experiment # Configure the Weather Monitor Experiment
ENV PATH /home/fail/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ENV PATH=/home/fail/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
WORKDIR /home/fail/fail WORKDIR /home/fail/fail
RUN mkdir build-tracer; cd build-tracer RUN mkdir build-tracer && cd build-tracer
WORKDIR build-tracer WORKDIR build-tracer
RUN cmake \ RUN cmake \
-DAGXX=/home/fail/bin/ag++ \ -DAGXX=/home/fail/bin/ag++ \
-DBOOST_THREAD_LIBRARY=/usr/lib/x86_64-linux-gnu/libpthread.so \ -DBOOST_THREAD_LIBRARY=/usr/lib/x86_64-linux-gnu/libpthread.so \
# Enable / Disable features \
-DBUILD_ARM=OFF \ -DBUILD_ARM=OFF \
-DBUILD_BOCHS=ON \ -DBUILD_BOCHS=ON \
-DBUILD_CAPSTONE_DISASSEMBLER=OFF \ -DBUILD_CAPSTONE_DISASSEMBLER=OFF \
@ -22,27 +23,26 @@ RUN cmake \
-DBUILD_CONVERT_TRACE=OFF \ -DBUILD_CONVERT_TRACE=OFF \
-DBUILD_DATA_AGGREGATOR=OFF \ -DBUILD_DATA_AGGREGATOR=OFF \
-DBUILD_DUMP_HOPS=OFF \ -DBUILD_DUMP_HOPS=OFF \
-DBUILD_DUMP_TRACE=OFF \ -DBUILD_DUMP_TRACE=ON \
-DBUILD_FAULTSPACEPLOT=OFF \ -DBUILD_FAULTSPACEPLOT=OFF \
-DBUILD_GEM5=OFF \ -DBUILD_GEM5=OFF \
-DBUILD_IMPORT_TRACE=OFF \ -DBUILD_IMPORT_TRACE=ON \
-DBUILD_LLVM_DISASSEMBLER=ON \ -DBUILD_LLVM_DISASSEMBLER=ON \
-DBUILD_PANDA=OFF \ -DBUILD_PANDA=OFF \
-DBUILD_PRUNE_TRACE=OFF \ -DBUILD_PRUNE_TRACE=ON \
-DBUILD_QEMU=OFF \ -DBUILD_QEMU=OFF \
-DBUILD_T32=OFF \ -DBUILD_T32=OFF \
-DBUILD_X86=ON \ -DBUILD_X86=ON \
# \
-DCLIENT_JOB_INITIAL=1 \ -DCLIENT_JOB_INITIAL=1 \
-DCLIENT_JOB_LIMIT=1000 \ -DCLIENT_JOB_LIMIT=1000 \
-DCLIENT_JOB_REQUEST_SEC=30 \ -DCLIENT_JOB_REQUEST_SEC=30 \
-DCLIENT_RAND_BACKOFF_TEND=8 \ -DCLIENT_RAND_BACKOFF_TEND=8 \
-DCLIENT_RAND_BACKOFF_TSTART=3 \ -DCLIENT_RAND_BACKOFF_TSTART=3 \
-DCLIENT_RETRY_COUNT=3 \ -DCLIENT_RETRY_COUNT=3 \
# ;-separated list \
# ;-separated list
-DCMAKE_AGPP_FLAGS="-D__NO_MATH_INLINES" \ -DCMAKE_AGPP_FLAGS="-D__NO_MATH_INLINES" \
# \
-DCMAKE_BUILD_TYPE=Release \ -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr/local \ -DCMAKE_INSTALL_PREFIX=/usr/local \
-DCONFIG_BOCHS_COMPRESS_STATE=ON \ -DCONFIG_BOCHS_COMPRESS_STATE=ON \
@ -67,18 +67,16 @@ RUN cmake \
-DCONFIG_SR_SAVE=ON \ -DCONFIG_SR_SAVE=ON \
-DCONFIG_SUPPRESS_INTERRUPTS=ON \ -DCONFIG_SUPPRESS_INTERRUPTS=ON \
-DENABLE_DATABASE_TESTS=OFF \ -DENABLE_DATABASE_TESTS=OFF \
# ;-separated list \
# ;-separated list
-DEXPERIMENTS_ACTIVATED="generic-tracing" \ -DEXPERIMENTS_ACTIVATED="generic-tracing" \
# \
-DLibIberty_INCLUDE_DIRS=/usr/include/libiberty \ -DLibIberty_INCLUDE_DIRS=/usr/include/libiberty \
-DLibIberty_LIBRARIES=/usr/lib/x86_64-linux-gnu/libiberty.a \ -DLibIberty_LIBRARIES=/usr/lib/x86_64-linux-gnu/libiberty.a \
-DMYSQL_CONFIG=/usr/bin/mysql_config \ -DMYSQL_CONFIG=/usr/bin/mysql_config \
-DMYSQL_CONFIG_PREFER_PATH=/bin \ -DMYSQL_CONFIG_PREFER_PATH=/bin \
# ;-separated list \
# ;-separated list
-DPLUGINS_ACTIVATED="tracing;serialoutput" \ -DPLUGINS_ACTIVATED="tracing;serialoutput" \
# \
-DSERVER_COMM_HOSTNAME=localhost \ -DSERVER_COMM_HOSTNAME=localhost \
-DSERVER_COMM_TCP_PORT=1111 \ -DSERVER_COMM_TCP_PORT=1111 \
-DSERVER_OUT_QUEUE_SIZE=0 \ -DSERVER_OUT_QUEUE_SIZE=0 \
@ -92,26 +90,25 @@ RUN cmake \
-DTEST_MYSQL_USER=fail_test \ -DTEST_MYSQL_USER=fail_test \
-DVERBOSE_MAKE=OFF \ -DVERBOSE_MAKE=OFF \
-D_filename=/usr/include/wx-3.0/wx/version.h \ -D_filename=/usr/include/wx-3.0/wx/version.h \
# ;-separated list \
# ;-separated list
-Dbochs_configure_params="--enable-a20-pin;--enable-x86-64;--enable-cpu-level=6;--enable-ne2000;--enable-acpi;--enable-pci;--enable-usb;--enable-trace-cache;--enable-fast-function-calls;--enable-host-specific-asms;--enable-disasm;--enable-readline;--enable-clgd54xx;--enable-fpu;--enable-vmx=2;--enable-monitor-mwait;--enable-cdrom;--enable-sb16=linux;--enable-gdb-stub;--disable-docbook;--with-nogui;--with-x11;--with-wx;--with-sdl" \ -Dbochs_configure_params="--enable-a20-pin;--enable-x86-64;--enable-cpu-level=6;--enable-ne2000;--enable-acpi;--enable-pci;--enable-usb;--enable-trace-cache;--enable-fast-function-calls;--enable-host-specific-asms;--enable-disasm;--enable-readline;--enable-clgd54xx;--enable-fpu;--enable-vmx=2;--enable-monitor-mwait;--enable-cdrom;--enable-sb16=linux;--enable-gdb-stub;--disable-docbook;--with-nogui;--with-x11;--with-wx;--with-sdl" \
# \
-Dbochs_install_prefix=/home/fail/fail/simulators/bochs/install \ -Dbochs_install_prefix=/home/fail/fail/simulators/bochs/install \
.. ..
# We need to manually build Bochs first to generate the bochs/config.h - the external_project configure step # We need to manually build Bochs first to generate the bochs/config.h - the external_project configure step
# (where this file is generated) is run at build time (not configure time), but the build order is not defined correctly. # (where this file is generated) is run at build time (not configure time), but the build order is not defined correctly.
RUN cmake --build . --target libfailbochs_external-configure -- -j$(nproc) # RUN cmake --build . --target libfailbochs_external-configure \
# && cmake --build .
RUN cmake --build . --target libfailbochs_external-configure -- -j$(nproc) \
&& cmake --build . -- -j$(nproc)
# Make FAIL* RUN ln -s /home/fail/fail/build-tracer/bin/fail-client /home/fail/bin/generic-tracing-client \
RUN cmake --build . -- -j$(nproc) && ln -s /home/fail/fail/build-tracer/bin/import-trace /home/fail/bin/ \
&& ln -s /home/fail/fail/build-tracer/bin/prune-trace /home/fail/bin/ \
RUN ln -s /home/fail/fail/build-tracer/bin/fail-client /home/fail/bin/fail-x86-tracing; \ && ln -s /home/fail/fail/build-tracer/bin/dump-trace /home/fail/bin/ \
ln -s /home/fail/fail/build-tracer/bin/import-trace /home/fail/bin/; \ && ln -s /home/fail/fail/build-tracer/bin/convert-trace /home/fail/bin/ \
ln -s /home/fail/fail/build-tracer/bin/prune-trace /home/fail/bin/; \ && ln -s /home/fail/fail/tools/bochs-experiment-runner/bochs-experiment-runner.py /home/fail/bin/ \
ln -s /home/fail/fail/build-tracer/bin/dump-trace /home/fail/bin/; \ && chmod a+x /home/fail/bin/bochs-experiment-runner.py
ln -s /home/fail/fail/build-tracer/bin/convert-trace /home/fail/bin/; \
cp /home/fail/fail/tools/bochs-experiment-runner/bochs-experiment-runner.py /home/fail/bin/bochs-experiment-runner.py; \
chmod a+x /home/fail/bin/bochs-experiment-runner.py;
USER root USER root

59
flake.lock generated
View File

@ -1,59 +0,0 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1768032153,
"narHash": "sha256-6kD1MdY9fsE6FgSwdnx29hdH2UcBKs3/+JJleMShuJg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3146c6aa9995e7351a398e17470e15305e6e18ff",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

334
flake.nix
View File

@ -1,334 +0,0 @@
rec {
description = "FAIL* - Fault Injection Leveraged";
inputs = {
nixpkgs.url = "nixpkgs"; # Use nixpkgs from system registry
flake-utils.url = "github:numtide/flake-utils";
};
outputs = {
self,
nixpkgs,
flake-utils,
}:
# Create a shell (and possibly package) for each possible system, not only x86_64-linux
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {
inherit system;
config.allowUnfree = true;
overlays = [];
};
inherit (pkgs) lib stdenv;
# ===========================================================================================
# Define custom dependencies
# ===========================================================================================
# 64 bit C/C++ compilers that don't collide (use the same libc)
bintools = pkgs.wrapBintoolsWith {
bintools = pkgs.bintools.bintools; # Unwrapped bintools
libc = pkgs.glibc;
};
gcc = lib.hiPrio (pkgs.wrapCCWith {
cc = pkgs.gcc.cc; # Unwrapped gcc
libc = pkgs.glibc;
bintools = bintools;
});
clang = pkgs.wrapCCWith {
cc = pkgs.clang.cc; # Unwrapped clang
libc = pkgs.glibc;
bintools = bintools;
};
# Multilib C/C++ compilers that don't collide (use the same libc)
# bintools_multilib = pkgs.wrapBintoolsWith {
# bintools = pkgs.bintools.bintools; # Unwrapped bintools
# libc = pkgs.glibc_multi;
# };
# gcc_multilib = lib.hiPrio (pkgs.wrapCCWith {
# cc = pkgs.gcc.cc; # Unwrapped gcc
# libc = pkgs.glibc_multi;
# bintools = bintools_multilib;
# });
# clang_multilib = pkgs.wrapCCWith {
# cc = pkgs.clang.cc; # Unwrapped clang
# libc = pkgs.glibc_multi;
# bintools = bintools_multilib;
# };
aspectcxx = stdenv.mkDerivation rec {
pname = "aspectcxx";
version = "1.2";
src = pkgs.fetchurl {
url = "http://www.aspectc.org/releases/${version}/ac-bin-linux-x86-64bit-${version}.tar.gz";
sha256 = "sha256-8wOrPYC99E3aV5/Js2EBI4V/OoD9y10fYZt8ikPtvt4="; # 1.2
# sha256 = "sha256-gmqoZFkPvs60b6yuMHNtYvuGJXGnbwAYEiyXxp/fmPI="; # 2.0
# sha256 = "sha256-+rEflemXNwn4XZZwSOqCsr6/KFGV8wxW6PeXzHHUK0o="; # 2.5
};
nativeBuildInputs = [
pkgs.autoPatchelfHook
];
unpackPhase = ''
tar xvzf $src
'';
installPhase = ''
mkdir -p $out/bin
cp aspectc++/ac++ $out/bin/
cp aspectc++/ag++ $out/bin/
'';
};
libpcl = stdenv.mkDerivation rec {
pname = "libpcl";
version = "1.12-2";
src = pkgs.fetchurl {
url = "https://launchpadlibrarian.net/521269537/libpcl1_1.12-2_amd64.deb";
sha256 = "sha256-GL3mjPAccAtRMAJPnDMCHiDf6xNvGi4oUWylOIqBjP0=";
};
nativeBuildInputs = with pkgs; [
dpkg
autoPatchelfHook
];
unpackPhase = ''
dpkg-deb -x $src .
'';
installPhase = ''
mkdir -p $out
cp -r usr/* $out/
'';
};
libpcl-dev = stdenv.mkDerivation rec {
pname = "libpcl-dev";
version = "1.12-2";
src = pkgs.fetchurl {
url = "https://launchpadlibrarian.net/521269536/libpcl1-dev_${version}_amd64.deb";
sha256 = "sha256-Z1wP0K8hfV1f9ypee9XIx6H0JOTidhtXDBe82mlRaOg=";
};
nativeBuildInputs = with pkgs; [
dpkg
autoPatchelfHook
];
unpackPhase = ''
dpkg-deb -x $src .
'';
installPhase = ''
mkdir -p $out
cp -r usr/* $out/
# Hacky bullshit
cp ${libpcl}/lib/x86_64-linux-gnu/libpcl.so.1.0.11 $out/lib/x86_64-linux-gnu/libpcl.so.1.0.11
rm $out/share/doc/libpcl1-dev/changelog.Debian.gz
'';
};
# ===========================================================================================
# Specify dependencies
# https://nixos.org/manual/nixpkgs/stable/#ssec-stdenv-dependencies-overview
# Just for a "nix develop" shell, buildInputs can be used for everything.
# ===========================================================================================
# Add dependencies to nativeBuildInputs if they are executed during the build:
# - Those which are needed on $PATH during the build, for example cmake and pkg-config
# - Setup hooks, for example makeWrapper
# - Interpreters needed by patchShebangs for build scripts (with the --build flag), which can be the case for e.g. perl
nativeBuildInputs = with pkgs; [
# Languages:
bintools
gcc
# clang
aspectcxx
# C/C++:
gnumake
cmake
pkg-config
doxygen
];
# Add dependencies to buildInputs if they will end up copied or linked into the final output or otherwise used at runtime:
# - Libraries used by compilers, for example zlib
# - Interpreters needed by patchShebangs for scripts which are installed, which can be the case for e.g. perl
buildInputs = with pkgs; [
# C/C++:
libpcl-dev
libiberty
libelf
libdwarf
boost
llvmPackages_18.llvm
mariadb-connector-c
fontconfig
zlib
capstone
protobuf
binutils
# No clue what I need from those
xorg.libX11
xorg.libXext
xorg.libXrender
xorg.libXrandr
xorg.libXinerama
xorg.libXcursor
xorg.libXi
xorg.libXfixes
];
# ===========================================================================================
# Define buildable + installable packages
# ===========================================================================================
package = stdenv.mkDerivation {
inherit nativeBuildInputs buildInputs;
pname = "fail";
version = "1.0.1";
src = ./.;
enableParallelBuilding = true;
cmakeFlags = [
# Our CMake is too new :( Try it anyway, can still downgrade later...
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5"
# AspectC++
# "-DCMAKE_AGPP_FLAGS=--c_compiler${clang}/bin/clang++"
"-DCMAKE_AGPP_FLAGS=-D__NO_MATH_INLINES"
# "-DCMAKE_AGPP_FLAGS=--c_compiler${clang}/bin/clang++ -D__NO_MATH_INLINES -D__STRICT_ANSI__"
# Tell CMake where the libs are
"-DLibIberty_INCLUDE_DIRS=${pkgs.libiberty}/include"
"-DLibIberty_LIBRARIES=${pkgs.libiberty}/lib/libiberty.a"
"-DLIBELF_INCLUDE_DIRS=${pkgs.libelf}/include"
"-DLIBELF_LIBRARIES=${pkgs.libelf}/lib/libelf.a"
"-DLIBDWARF_INCLUDE_DIRS=${pkgs.libdwarf}/include"
"-DLIBDWARF_LIBRARIES=${pkgs.libdwarf}/lib/libdwarf.a"
"-DCAPSTONE_INCLUDE_DIR=${pkgs.capstone}/include"
"-DCAPSTONE_LIBRARY=${pkgs.capstone}/lib/libcapstone.a"
"-DLIBPCL_LIBRARIES=${libpcl-dev}/lib/libpcl.a"
];
installPhase = ''
mkdir -p $out/bin
mv ./fail $out/bin/
'';
};
in rec {
# Provide package for "nix build"
defaultPackage = package;
defaultApp = flake-utils.lib.mkApp {
drv = defaultPackage;
};
# Provide environment for "nix develop"
devShell = pkgs.mkShell {
inherit nativeBuildInputs buildInputs;
name = description;
# =========================================================================================
# Define environment variables
# =========================================================================================
# Custom dynamic libraries:
# LD_LIBRARY_PATH = builtins.concatStringsSep ":" [
# # Rust Bevy GUI app:
# # "${pkgs.xorg.libX11}/lib"
# # "${pkgs.xorg.libXcursor}/lib"
# # "${pkgs.xorg.libXrandr}/lib"
# # "${pkgs.xorg.libXi}/lib"
# # "${pkgs.libGL}/lib"
#
# # JavaFX app:
# # "${pkgs.libGL}/lib"
# # "${pkgs.gtk3}/lib"
# # "${pkgs.glib.out}/lib"
# # "${pkgs.xorg.libXtst}/lib"
# ];
# Dynamic libraries from buildinputs:
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath buildInputs;
# Set matplotlib backend
# MPLBACKEND = "TkAgg";
# =========================================================================================
# Define shell environment
# =========================================================================================
# Setup the shell when entering the "nix develop" environment (bash script).
shellHook = let
mkCmakeScript = type: let
typeLower = lib.toLower type;
in
pkgs.writers.writeFish "cmake-${typeLower}.fish" ''
cd $FLAKE_PROJECT_ROOT
echo "Removing build directory ./cmake-build-${typeLower}/"
rm -rf ./cmake-build-${typeLower}
echo "Creating build directory"
mkdir cmake-build-${typeLower}
cd cmake-build-${typeLower}
echo "Running cmake"
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE="${type}" -DCMAKE_EXPORT_COMPILE_COMMANDS="On" ..
echo "Linking compile_commands.json"
cd ..
ln -sf ./cmake-build-${typeLower}/compile_commands.json ./compile_commands.json
'';
cmakeDebug = mkCmakeScript "Debug";
cmakeRelease = mkCmakeScript "Release";
mkBuildScript = type: let
typeLower = lib.toLower type;
in
pkgs.writers.writeFish "cmake-build.fish" ''
cd $FLAKE_PROJECT_ROOT/cmake-build-${typeLower}
echo "Running cmake"
cmake --build .
'';
buildDebug = mkBuildScript "Debug";
buildRelease = mkBuildScript "Release";
# Use this to specify commands that should be ran after entering fish shell
initProjectShell = pkgs.writers.writeFish "init-shell.fish" ''
echo "Entering \"${description}\" environment..."
# Determine the project root, used e.g. in cmake scripts
set -g -x FLAKE_PROJECT_ROOT (git rev-parse --show-toplevel)
# C/C++:
# abbr -a cmake-debug "${cmakeDebug}"
# abbr -a cmake-release "${cmakeRelease}"
# abbr -a build-debug "${buildDebug}"
# abbr -a build-release "${buildRelease}"
'';
in
builtins.concatStringsSep "\n" [
# Launch into pure fish shell
''
exec "$(type -p fish)" -C "source ${initProjectShell} && abbr -a menu '${pkgs.bat}/bin/bat "${initProjectShell}"'"
''
];
};
});
}

View File

@ -1,5 +1,5 @@
[client] [client]
host=127.0.0.1 host=fail-db
user=fail user=fail
password=fail password=fail
database=fail database=fail

View File

@ -1,19 +0,0 @@
all:
docker build -t danceos/fail-base fail-base
docker build -t danceos/fail-generic-tracing fail-generic-tracing
docker build -t danceos/fail-demo fail-demo
run-fail-db:
docker run --name fail-db \
-e MYSQL_ROOT_PASSWORD=fail \
-e MYSQL_USER=fail \
-e MYSQL_PASSWORD=fail \
-e MYSQL_DATABASE=fail \
-d mysql
run-fail-demo:
docker run --name fail-demo -p 127.0.0.1:5000:5000 --link fail-db:mysql -d danceos/fail-demo
ssh-fail-demo:
ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no fail@$$(docker inspect --format "{{ .NetworkSettings.IPAddress }}" fail-demo)

View File

@ -1,73 +0,0 @@
# Set the base image to Ubuntu Xenial (16.04)
FROM ubuntu:xenial
MAINTAINER Christian Dietrich <stettberger@dokucode.de>
# Install Packages required to build FAIL*
RUN apt-get update
RUN apt-get install -y \
build-essential \
libmysqlclient-dev \
protobuf-compiler \
libprotobuf-dev \
libpcl1-dev \
libboost-thread-dev \
libboost-system-dev \
libboost-regex-dev \
libboost-coroutine-dev \
libboost-context-dev \
libdwarf-dev \
libelf-dev \
libfontconfig1-dev \
zlib1g-dev \
binutils-dev \
libiberty-dev \
llvm-3.9-dev \
clang-3.9 \
libsdl1.2-dev \
libgtk2.0-dev \
libwxgtk3.0-dev \
libncurses5-dev \
cmake \
cmake-curses-gui \
doxygen \
git \
screen \
wget \
openssh-server \
vim \
ranger
# Symlink clang++ to match docs
RUN ln -sf /usr/bin/clang++-3.9 /usr/bin/clang++
# Add a user for compiling FAIL*
RUN useradd fail; mkdir /home/fail; chown fail /home/fail
RUN echo 'fail:fail' | chpasswd; chsh fail --shell /bin/bash
RUN adduser fail sudo
# SSH login fix. Otherwise user is kicked off after login
RUN mkdir /var/run/sshd
RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
ENV NOTVISIBLE "in users profile"
RUN echo "export VISIBLE=now" >> /etc/profile
USER fail
ENV HOME /home/fail
WORKDIR /home/fail
# Get AspectC++ (originally v1.2) for 64 Bit
ARG acversion="2.0"
RUN wget http://www.aspectc.org/releases/"$acversion"/ac-bin-linux-x86-64bit-"$acversion".tar.gz
RUN tar xvzf ac-bin-linux-x86-64bit-"$acversion".tar.gz; mkdir bin; mv aspectc++/ac++ aspectc++/ag++ bin/; rm -rf aspectc++
ENV PATH /home/fail/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
# Clone FAIL*
RUN git clone https://github.com/danceos/fail.git
WORKDIR fail
USER root
EXPOSE 22
CMD ["/usr/sbin/sshd", "-D"]

View File

@ -1,9 +1,12 @@
from pprint import pprint from pprint import pprint
from . import details from . import details
from . import model from app.detaildealer import detaildealer
def scrub(table_name): def scrub(table_name):
return ''.join( chr for chr in table_name if chr.isalnum() or chr == '_' ) return "".join(chr for chr in table_name if chr.isalnum() or chr == "_")
class Resulttype: class Resulttype:
def __init__(self, name, count): def __init__(self, name, count):
@ -16,28 +19,27 @@ class Resulttype:
def getCount(self): def getCount(self):
return self.count return self.count
class Variant: class Variant:
def __init__(self, id, name, table, benchmark, detail): def __init__(self, id, name, table, benchmark, detail):
self.id = id self.id = id
self.dbname = name self.dbname = name
self.parenttable = table # TableDetails self.parenttable = table # TableDetails
self.details = detail # VariantDetails self.details = detail # VariantDetails
self.benchmark = benchmark # BenchmarkDetails self.benchmark = benchmark # BenchmarkDetails
self.results = {} self.results = {}
self.totalresults = 0 self.totalresults = 0
def getMapper(self): def getMapper(self):
mapper = self.benchmark.getMapper() mapper = self.benchmark.getMapper()
if not mapper: #try benchmark mapper if not mapper: # try benchmark mapper
mapper = self.details.getMapper() mapper = self.details.getMapper()
if not mapper: # of not there, try parent tables mapper if not mapper: # of not there, try parent tables mapper
mapper = self.parenttable.getMapper() mapper = self.parenttable.getMapper()
if not mapper: # no mapper found at all, try default mapper if not mapper: # no mapper found at all, try default mapper
mapper = model.detaildealer.getDefaultMapper() mapper = detaildealer.getDefaultMapper()
return mapper return mapper
def addResulttype(self, name, count): def addResulttype(self, name, count):
mapper = self.getMapper() mapper = self.getMapper()
label = mapper.getLabel(name) label = mapper.getLabel(name)
@ -70,17 +72,28 @@ class Variant:
return self.totalresults return self.totalresults
def __str__(self): def __str__(self):
ret = "Variant: " + self.getDetails().getTitle() + " - " + self.getBenchmarkDetails().getTitle() +" (id: " + str( self.id )+ ")" + " " ret = (
ret += "Total Results: " + str( self.totalresults ) + "\n" "Variant: "
+ self.getDetails().getTitle()
+ " - "
+ self.getBenchmarkDetails().getTitle()
+ " (id: "
+ str(self.id)
+ ")"
+ " "
)
ret += "Total Results: " + str(self.totalresults) + "\n"
for v in self.results: for v in self.results:
ret += "\t" + v.name + ": " + str( v.count ) + "\n" ret += "\t" + v.name + ": " + str(v.count) + "\n"
return ret return ret
__repr__ = __str__ __repr__ = __str__
'''A ResultTable contains n Variants'''
class ResultTable:
"""A ResultTable contains n Variants"""
class ResultTable:
def __init__(self, name, cfg): def __init__(self, name, cfg):
self.name = scrub(name) self.name = scrub(name)
self.details = cfg.getTable(name) self.details = cfg.getTable(name)
@ -89,7 +102,7 @@ class ResultTable:
def addVariant(self, var): def addVariant(self, var):
if var.getId() in self.variants: if var.getId() in self.variants:
return return
self.variants[var.getId()] = var # Add if not existing yet self.variants[var.getId()] = var # Add if not existing yet
def getVariant(self, id): def getVariant(self, id):
if id in self.variants: if id in self.variants:
@ -97,7 +110,7 @@ class ResultTable:
return None return None
def getVariantById(self, varid): def getVariantById(self, varid):
for k,v in self.variants.items(): for k, v in self.variants.items():
if int(v.getId()) == int(varid): if int(v.getId()) == int(varid):
return v return v
return None return None
@ -110,12 +123,16 @@ class ResultTable:
def __str__(self): def __str__(self):
ret = "Result: " + self.getDetails().getTitle() + "\n" ret = "Result: " + self.getDetails().getTitle() + "\n"
for k,v in self.variants.items(): for k, v in self.variants.items():
ret += "\t" + str(v) + "\n" ret += "\t" + str(v) + "\n"
return ret return ret
__repr__ = __str__ __repr__ = __str__
'''Overview has n ResultTables'''
"""Overview has n ResultTables"""
class Overview: class Overview:
def __init__(self): def __init__(self):
self.tables = {} self.tables = {}
@ -130,7 +147,7 @@ class Overview:
return self.tables.get(dbname, None) return self.tables.get(dbname, None)
def getVariantById(self, variant_id): def getVariantById(self, variant_id):
for key,table in self.tables.items(): for key, table in self.tables.items():
variant = table.getVariantById(variant_id) variant = table.getVariantById(variant_id)
if variant: if variant:
return variant return variant
@ -139,5 +156,3 @@ class Overview:
def length(self): def length(self):
return len(self.tables) return len(self.tables)

View File

@ -0,0 +1,4 @@
# Instantiate global detail dealer, will be initialized in reloadOverview
from app import details
detaildealer = details.DetailDealer()

View File

@ -1,31 +1,60 @@
#!/usr/bin/env python #!/usr/bin/env python
import MySQLdb
import MySQLdb.cursors
import yaml
import sys
import os.path import os.path
import yaml
import sys
import MySQLdb
import MySQLdb.cursors
from app.detaildealer import detaildealer
from pprint import pprint from pprint import pprint
from . import data from . import data, details
from . import details
"""Get command line options""" """Get command line options"""
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser() parser = OptionParser()
parser.add_option("-c", "--conf", type="string", help="MySQL config file", dest="config", default= os.path.join(os.path.expanduser("~"),".my.cnf")) parser.add_option(
parser.add_option("-s", "--host", type="string", help="Webserver hostname", dest="host", default="localhost") "-c",
parser.add_option("-d", "--details", type="string", help="Detailed information (YAML configuration file)", dest="details", default=None) "--conf",
parser.add_option("-p", "--port", type="string", help="Webserver port", dest="port", default="5000") type="string",
help="MySQL config file",
dest="config",
default=os.path.join(os.path.expanduser("~"), ".my.cnf"),
)
parser.add_option(
"-s",
"--host",
type="string",
help="Webserver hostname",
dest="host",
default="localhost",
)
parser.add_option(
"-d",
"--details",
type="string",
help="Detailed information (YAML configuration file)",
dest="details",
default=None,
)
parser.add_option(
"-p", "--port", type="string", help="Webserver port", dest="port", default="5000"
)
opts, args = parser.parse_args() opts, args = parser.parse_args()
"""Check if configuration files exist""" """Check if configuration files exist"""
def checkConfigFile(msg, fname): def checkConfigFile(msg, fname):
if not os.path.isfile(fname): if not os.path.isfile(fname):
sys.exit("Error: '" + fname + "' not found") sys.exit("Error: '" + fname + "' not found")
else: else:
print(msg, "->", fname) print(msg, "->", fname)
# Check sql config # Check sql config
sqlconfig = opts.config sqlconfig = opts.config
checkConfigFile("MySQL config", sqlconfig) checkConfigFile("MySQL config", sqlconfig)
@ -35,50 +64,74 @@ if opts.details:
checkConfigFile("Details", opts.details) checkConfigFile("Details", opts.details)
# Instantiate global detail dealer, will be initialized in reloadOverview # Instantiate global detail dealer, will be initialized in reloadOverview
detaildealer = details.DetailDealer() # detaildealer = details.DetailDealer()
"""Remove all characters from string except alphanuermics and _""" """Remove all characters from string except alphanuermics and _"""
def scrub(table_name): def scrub(table_name):
return ''.join( chr for chr in table_name if chr.isalnum() or chr == '_' ) return "".join(chr for chr in table_name if chr.isalnum() or chr == "_")
"""Global mysql handles""" """Global mysql handles"""
db = None db = None
cur = None cur = None
def loadSession(dbconf): def loadSession(dbconf):
global db global db
if db: if db:
db.close() db.close()
db = MySQLdb.connect(read_default_file=dbconf, cursorclass=MySQLdb.cursors.DictCursor) db = MySQLdb.connect(
read_default_file=dbconf, cursorclass=MySQLdb.cursors.DictCursor
)
return db.cursor() return db.cursor()
def closeSession(): def closeSession():
if cur: cur.close() if cur:
cur.close()
global db global db
db.close() db.close()
db = None db = None
'''Populate variant results for overview data''' """Populate variant results for overview data"""
def getVariants(cur, table): def getVariants(cur, table):
restbl = table.getDetails().getDBName() restbl = table.getDetails().getDBName()
cur.execute("""SELECT sum((t.time2 - t.time1 + 1) * width) AS total, resulttype,variant, v.id as variant_id, benchmark, details FROM variant v JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id GROUP BY v.id, resulttype, details""" % (restbl)) # % is used here, as a tablename must not be quoted cur.execute(
"""SELECT sum((t.time2 - t.time1 + 1) * width) AS total, resulttype,variant, v.id as variant_id, benchmark, details FROM variant v JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id GROUP BY v.id, resulttype, details"""
% (restbl)
) # % is used here, as a tablename must not be quoted
res = cur.fetchall() res = cur.fetchall()
rdic = {} rdic = {}
# Build dict with variant id as key # Build dict with variant id as key
for r in res: for r in res:
# if variant entry already exists: # if variant entry already exists:
variant = table.getVariant(int(r['variant_id'])) variant = table.getVariant(int(r["variant_id"]))
if not variant: # if variant did not exist yet, create it: if not variant: # if variant did not exist yet, create it:
variant_details = detaildealer.getVariant(restbl, r['variant']) variant_details = detaildealer.getVariant(restbl, r["variant"])
benchmark_details = detaildealer.getBenchmark(restbl, r['variant'], r['benchmark']) benchmark_details = detaildealer.getBenchmark(
restbl, r["variant"], r["benchmark"]
)
table_details = detaildealer.getTable(restbl) table_details = detaildealer.getTable(restbl)
variant = data.Variant(int(r['variant_id']), r['variant'], table_details, benchmark_details, variant_details) variant = data.Variant(
variant.addResulttype(r['resulttype'], r['total']) int(r["variant_id"]),
r["variant"],
table_details,
benchmark_details,
variant_details,
)
variant.addResulttype(r["resulttype"], r["total"])
table.addVariant(variant) table.addVariant(variant)
'''Get overview data for index page'''
"""Get overview data for index page"""
def reloadOverview(): def reloadOverview():
overview = data.Overview() overview = data.Overview()
detaildealer.reload(opts.details) detaildealer.reload(opts.details)
@ -89,33 +142,42 @@ def reloadOverview():
for rdic in result_tables: for rdic in result_tables:
# r is the tablename, -> result_FOOBAR # r is the tablename, -> result_FOOBAR
for key, tablename in rdic.items(): for key, tablename in rdic.items():
table = data.ResultTable(tablename,detaildealer) table = data.ResultTable(tablename, detaildealer)
getVariants(cur, table) getVariants(cur, table)
overview.add(table) overview.add(table)
# Check if objdump table exists # Check if objdump table exists
cur.execute("SHOW TABLES like 'objdump'") cur.execute("SHOW TABLES like 'objdump'")
objdump_exists = (len(cur.fetchall()) == 1) objdump_exists = len(cur.fetchall()) == 1
closeSession() closeSession()
return overview, objdump_exists return overview, objdump_exists
"""Load overview data at server startup""" """Load overview data at server startup"""
print("Loading overview data from database. This may take a while ...") print("Loading overview data from database. This may take a while ...")
overview_data, objdump_exists = reloadOverview() overview_data, objdump_exists = reloadOverview()
print("done.") print("done.")
## Get overview data for views.index() ## Get overview data for views.index()
def getOverview(): def getOverview():
return overview_data return overview_data
def objdumpExists(): def objdumpExists():
return objdump_exists return objdump_exists
"""Get Results for one variant id""" """Get Results for one variant id"""
def getVariantResult(table, variantid): def getVariantResult(table, variantid):
cur = loadSession(sqlconfig) cur = loadSession(sqlconfig)
restbl = scrub(table) restbl = scrub(table)
stmt = "SELECT resulttype, count(*) as total from %s r join fsppilot on r.pilot_id=fsppilot.id join variant on fsppilot.variant_id=variant.id" % (restbl) stmt = (
"SELECT resulttype, count(*) as total from %s r join fsppilot on r.pilot_id=fsppilot.id join variant on fsppilot.variant_id=variant.id"
% (restbl)
)
where = " WHERE variant.id = %s group by resulttype ORDER BY resulttype " where = " WHERE variant.id = %s group by resulttype ORDER BY resulttype "
stmt = stmt + where stmt = stmt + where
cur.execute(stmt, variantid) cur.execute(stmt, variantid)
@ -123,10 +185,13 @@ def getVariantResult(table, variantid):
closeSession() closeSession()
return res return res
'''Show objdump together with according injection result types.'''
"""Show objdump together with according injection result types."""
def getCode(result_table, variant_id, resultlabel=None): def getCode(result_table, variant_id, resultlabel=None):
result_table = scrub(result_table) result_table = scrub(result_table)
filt = '' filt = ""
if not variant_id or not result_table: if not variant_id or not result_table:
return None return None
variant = overview_data.getVariantById(variant_id) variant = overview_data.getVariantById(variant_id)
@ -137,15 +202,18 @@ def getCode(result_table, variant_id, resultlabel=None):
filt = " and ( " filt = " and ( "
for dbn in dbnames[:-1]: for dbn in dbnames[:-1]:
filt += "resulttype = '" + dbn + "' OR " filt += "resulttype = '" + dbn + "' OR "
filt += "resulttype = '" + dbnames[-1] +"' ) " filt += "resulttype = '" + dbnames[-1] + "' ) "
else: else:
filt = " and resulttype = '" + resultlabel + "' " filt = " and resulttype = '" + resultlabel + "' "
# I especially like this one: # I especially like this one:
select = "SELECT instr_address, opcode, disassemble, comment, sum(t.time2 - t.time1 + 1) as totals, GROUP_CONCAT(DISTINCT resulttype SEPARATOR ', ') as results FROM variant v " select = "SELECT instr_address, opcode, disassemble, comment, sum(t.time2 - t.time1 + 1) as totals, GROUP_CONCAT(DISTINCT resulttype SEPARATOR ', ') as results FROM variant v "
join = " JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id JOIN objdump ON objdump.variant_id = v.id AND objdump.instr_address = injection_instr_absolute " %(scrub(result_table)) join = (
where = "WHERE v.id = %s " " JOIN trace t ON v.id = t.variant_id JOIN fspgroup g ON g.variant_id = t.variant_id AND g.instr2 = t.instr2 AND g.data_address = t.data_address JOIN %s r ON r.pilot_id = g.pilot_id JOIN fsppilot p ON r.pilot_id = p.id JOIN objdump ON objdump.variant_id = v.id AND objdump.instr_address = injection_instr_absolute "
group = "GROUP BY injection_instr_absolute ORDER BY totals DESC " % (scrub(result_table))
)
where = "WHERE v.id = %s "
group = "GROUP BY injection_instr_absolute ORDER BY totals DESC "
cur = loadSession(sqlconfig) cur = loadSession(sqlconfig)
stmt = select + join + where + filt + group stmt = select + join + where + filt + group
@ -153,27 +221,35 @@ def getCode(result_table, variant_id, resultlabel=None):
dump = cur.fetchall() dump = cur.fetchall()
closeSession() closeSession()
resulttypes = variant.getResultLabels() resulttypes = variant.getResultLabels()
return dump, resulttypes return dump, resulttypes
def getCodeExcerpt(variant_id, instr_addr): def getCodeExcerpt(variant_id, instr_addr):
code = {} code = {}
limit = 8 limit = 8
cur = loadSession(sqlconfig) cur = loadSession(sqlconfig)
cur.execute( """(SELECT instr_address, opcode, disassemble, comment FROM objdump \ cur.execute(
"""(SELECT instr_address, opcode, disassemble, comment FROM objdump \
WHERE instr_address < %s AND variant_id = %s \ WHERE instr_address < %s AND variant_id = %s \
ORDER BY instr_address DESC LIMIT %s) \ ORDER BY instr_address DESC LIMIT %s) \
ORDER BY instr_address ASC""" , (instr_addr, variant_id, limit)) ORDER BY instr_address ASC""",
(instr_addr, variant_id, limit),
)
below = cur.fetchall() below = cur.fetchall()
code['below'] = below code["below"] = below
cur.execute("""SELECT instr_address, opcode, disassemble, comment FROM objdump \ cur.execute(
"""SELECT instr_address, opcode, disassemble, comment FROM objdump \
WHERE instr_address >= %s AND variant_id = %s \ WHERE instr_address >= %s AND variant_id = %s \
ORDER BY instr_address ASC LIMIT %s""", (instr_addr, variant_id, limit+1)) ORDER BY instr_address ASC LIMIT %s""",
(instr_addr, variant_id, limit + 1),
)
upper = cur.fetchall() upper = cur.fetchall()
code['upper'] = upper code["upper"] = upper
closeSession() closeSession()
return code return code
def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=None): def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=None):
restypefilter = None restypefilter = None
if resultlabel: if resultlabel:
@ -185,12 +261,15 @@ def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=No
restypefilter = " and ( " restypefilter = " and ( "
for dbn in dbnames[:-1]: for dbn in dbnames[:-1]:
restypefilter += "resulttype = '" + dbn + "' OR " restypefilter += "resulttype = '" + dbn + "' OR "
restypefilter += "resulttype = '" + dbnames[-1] +"' ) " restypefilter += "resulttype = '" + dbnames[-1] + "' ) "
select = "SELECT bitoffset as 'Bit Offset', hex(injection_instr_absolute) as 'Instruction Address', hex(original_value) as 'Original Value', hex(data_address) as 'Data Address', resulttype as 'Result Type', details as 'Details' from %s " % scrub(result_table) select = (
join = "JOIN fsppilot ON pilot_id = fsppilot.id " "SELECT bitoffset as 'Bit Offset', hex(injection_instr_absolute) as 'Instruction Address', hex(original_value) as 'Original Value', hex(data_address) as 'Data Address', resulttype as 'Result Type', details as 'Details' from %s "
where = "WHERE variant_id = %s and injection_instr_absolute = %s " % scrub(result_table)
order = "ORDER BY data_address, bitoffset" )
join = "JOIN fsppilot ON pilot_id = fsppilot.id "
where = "WHERE variant_id = %s and injection_instr_absolute = %s "
order = "ORDER BY data_address, bitoffset"
cur = loadSession(sqlconfig) cur = loadSession(sqlconfig)
if not restypefilter: if not restypefilter:
@ -204,8 +283,7 @@ def getResultsbyInstruction(result_table, variant_id, instr_addr, resultlabel=No
closeSession() closeSession()
return res return res
def showDBstatus(): def showDBstatus():
res = "TODO" res = "TODO"
return res return res

View File

@ -1,42 +1,53 @@
from flask import render_template,request from flask import render_template, request
from app import app from app import app
# import model # import model
# import data # import data
from . import model from . import model
from . import data from . import data
@app.route('/')
@app.route('/index') @app.route("/")
@app.route("/index")
def index(): def index():
reload_overview = request.args.get('reload', False) reload_overview = request.args.get("reload", False)
if reload_overview: if reload_overview:
print("Reloading overview...") print("Reloading overview...")
model.reloadOverview() model.reloadOverview()
return render_template("index.html", overview=model.getOverview(), objdump_there = model.objdumpExists()) return render_template(
"index.html", overview=model.getOverview(), objdump_there=model.objdumpExists()
)
@app.route('/code')
@app.route("/code")
def code(): def code():
variant_id = request.args.get('variant_id', None) variant_id = request.args.get("variant_id", None)
resulttype = request.args.get('resulttype', None) resulttype = request.args.get("resulttype", None)
table = request.args.get('table', None) table = request.args.get("table", None)
res,restypes = model.getCode(table, variant_id, resulttype) res, restypes = model.getCode(table, variant_id, resulttype)
var_dets = model.getOverview().getVariantById(variant_id) var_dets = model.getOverview().getVariantById(variant_id)
return render_template("code.html", results=res, resulttypes=restypes, variant_details=var_dets ) return render_template(
"code.html", results=res, resulttypes=restypes, variant_details=var_dets
)
@app.route('/instr_details')
@app.route("/instr_details")
def instr_details(): def instr_details():
table = request.args.get('table', None) table = request.args.get("table", None)
variant_id = request.args.get('variant_id', None) variant_id = request.args.get("variant_id", None)
instr_addr = request.args.get('instr_address', None) instr_addr = request.args.get("instr_address", None)
resulttype = request.args.get('resulttype', None) resulttype = request.args.get("resulttype", None)
codeexcerpt = model.getCodeExcerpt(variant_id, instr_addr) codeexcerpt = model.getCodeExcerpt(variant_id, instr_addr)
var_dets = model.getOverview().getVariantById(variant_id) var_dets = model.getOverview().getVariantById(variant_id)
results = model.getResultsbyInstruction(table, variant_id, instr_addr, resulttype) results = model.getResultsbyInstruction(table, variant_id, instr_addr, resulttype)
return render_template("instr_details.html", code=codeexcerpt, result=results, variant_details=var_dets) return render_template(
"instr_details.html", code=codeexcerpt, result=results, variant_details=var_dets
)
@app.route('/about')
@app.route("/about")
def about(): def about():
stat = model.showDBstatus() stat = model.showDBstatus()
return render_template("about.html", status=stat) return render_template("about.html", status=stat)

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
from app import app from app import app, model
from app import model
app.run(debug=False, port=int(model.opts.port), host=model.opts.host) app.run(debug=False, port=int(model.opts.port), host=model.opts.host)