Update Dockerfile, Readme and rules

Update evtx_dump binaries
Remove forwarding
Bump version to 2.40
This commit is contained in:
wagga40
2025-03-30 14:45:15 +02:00
parent a1c535bfeb
commit e497c9448f
32 changed files with 383882 additions and 385927 deletions

View File

@@ -1,53 +1,39 @@
# Since `evtx_dump` precompiled binaries are not shipped with musl support, we need to use the
# Debian-based Python image instead of the Alpine-based image, which increases the size of the
# final image (~70 MB overhead).
#
ARG PYTHON_VERSION="3.11-slim"
ARG PYTHON_VERSION="3.13-slim"
FROM "python:${PYTHON_VERSION}" AS stage
FROM python:${PYTHON_VERSION}
ARG ZIRCOLITE_INSTALL_PREFIX="/opt"
ARG ZIRCOLITE_REPOSITORY_URI="https://github.com/wagga40/Zircolite.git"
ARG ZIRCOLITE_REQUIREMENTS_FILE="requirements.full.txt"
WORKDIR ${ZIRCOLITE_INSTALL_PREFIX}/zircolite
# Copy requirements first to leverage Docker cache
COPY ${ZIRCOLITE_REQUIREMENTS_FILE} .
RUN pip install --no-cache-dir -r ${ZIRCOLITE_REQUIREMENTS_FILE}
# Install git only when needed for rule updates
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \
git && \
apt-get autoremove --purge --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR "${ZIRCOLITE_INSTALL_PREFIX}"
# Copy files in order of change frequency (least to most)
COPY README.md .
COPY docs/ docs/
COPY pics/ pics/
COPY templates/ templates/
COPY config/ config/
COPY bin/ bin/
COPY gui/ gui/
COPY rules/ rules/
COPY zircolite.py .
RUN git clone \
"${ZIRCOLITE_REPOSITORY_URI}" \
zircolite
LABEL author="wagga40" \
description="A standalone SIGMA-based detection tool for EVTX, Auditd and Sysmon for Linux logs." \
maintainer="wagga40"
WORKDIR "${ZIRCOLITE_INSTALL_PREFIX}/zircolite"
RUN chmod 0755 zircolite.py && \
python3 zircolite.py -U
RUN chmod 0755 \
zircolite.py
FROM "python:${PYTHON_VERSION}"
LABEL author="wagga40"
LABEL description="A standalone SIGMA-based detection tool for EVTX, Auditd and Sysmon for Linux logs."
LABEL maintainer="wagga40"
ARG ZIRCOLITE_INSTALL_PREFIX="/opt"
WORKDIR "${ZIRCOLITE_INSTALL_PREFIX}"
COPY --chown=root:root --from=stage \
"${ZIRCOLITE_INSTALL_PREFIX}/zircolite" \
zircolite
WORKDIR "${ZIRCOLITE_INSTALL_PREFIX}/zircolite"
RUN python3 -m pip install \
--requirement requirements.full.txt
RUN python3 zircolite.py -U
ENTRYPOINT [ "python3", "zircolite.py" ]
CMD [ "--help" ]
ENTRYPOINT ["python3", "zircolite.py"]
CMD ["--help"]

View File

@@ -3,7 +3,7 @@
DOCKER?=docker
DOCKER_BUILD_FLAGS?=
DOCKER_REGISTRY?=docker.io
DOCKER_TAG?=2.30.0
DOCKER_TAG?=2.40.0
GIT?=git
PY3?=python3
DATE=$(shell date +%s)

View File

@@ -1,6 +1,6 @@
# <p align="center">![](pics/zircolite_400.png)</p>
## Standalone SIGMA-based detection tool for EVTX, Auditd, Sysmon for linux, XML or JSONL/NDJSON Logs
## Standalone SIGMA-based detection tool for EVTX, Auditd, Sysmon for linux, XML, CSV or JSONL/NDJSON Logs
![](pics/Zircolite_v2.9.gif)
[![python](https://img.shields.io/badge/python-3.10-blue)](https://www.python.org/)
@@ -13,6 +13,7 @@
- Sysmon for Linux
- EVTXtract
- CSV and XML logs
- JSON Array logs
### Key Features
@@ -20,6 +21,7 @@
- **SIGMA Backend**: It is based on a SIGMA backend (SQLite) and does not use internal SIGMA-to-something conversion.
- **Advanced Log Manipulation**: It can manipulate input logs by splitting fields and applying transformations, allowing for more flexible and powerful log analysis.
- **Flexible Export**: Zircolite can export results to multiple formats using Jinja [templates](templates), including JSON, CSV, JSONL, Splunk, Elastic, Zinc, Timesketch, and more.
- **Multiple Input Formats**: Support for various log formats including EVTX, JSON Lines, JSON Arrays, CSV, XML, and more.
**You can use Zircolite directly in Python or use the binaries provided in the [releases](https://github.com/wagga40/Zircolite/releases).**
@@ -31,7 +33,7 @@ The project has only been tested with Python 3.10. If you only want to use base
The use of [evtx_dump](https://github.com/omerbenamram/evtx) is **optional but required by default (because it is -for now- much faster)**, If you do not want to use it you have to use the `--noexternal` option. The tool is provided if you clone the Zircolite repository (the official repository is [here](https://github.com/omerbenamram/evtx)).
:warning: On some systems (Mac, Arm, ...) the `evtx` library may need Rust and Cargo to be installed.
:warning: On some systems (Mac, Arm, ...) the `evtx` Python library may need Rust and Cargo to be installed.
## Quick start
@@ -52,21 +54,30 @@ If your EVTX files have the extension ".evtx" :
python3 zircolite.py --evtx sysmon.evtx --ruleset rules/rules_windows_sysmon_pysigma.json
```
- The `--evtx` argument can be a file or a folder. If it is a folder, all EVTX files in the current folder and subfolders will be selected.
- The SYSMON ruleset used is a default one, intended for analyzing logs from endpoints with SYSMON installed.
### Auditd / Sysmon for Linux / JSONL or NDJSON logs :
### Other Log Formats:
```shell
# For Auditd logs
python3 zircolite.py --events auditd.log --ruleset rules/rules_linux.json --auditd
# For Sysmon for Linux logs
python3 zircolite.py --events sysmon.log --ruleset rules/rules_linux.json --sysmon4linux
# For JSONL or NDJSON logs
python3 zircolite.py --events <JSON_FOLDER_OR_FILE> --ruleset rules/rules_windows_sysmon_pysigma.json --jsononly
# For JSON Array logs
python3 zircolite.py --events <JSON_FOLDER_OR_FILE> --ruleset rules/rules_windows_sysmon_pysigma.json --json-array
# For CSV logs
python3 zircolite.py --events <CSV_FOLDER_OR_FILE> --ruleset rules/rules_windows_sysmon_pysigma.json --csv-input
# For XML logs
python3 zircolite.py --events <XML_FOLDER_OR_FILE> --ruleset rules/rules_windows_sysmon_pysigma.json --xml-input
```
- The `--events` argument can be a file or a folder. If it is a folder, all event files in the current folder and subfolders will be selected.
- The `--events` argument can be a file or a folder. If it is a folder, all EVTX files in the current folder and subfolders will be selected.
- The SYSMON ruleset used is a default one, intended for analyzing logs from endpoints with SYSMON installed.
> [!TIP]
> If you want to try the tool you can test with [EVTX-ATTACK-SAMPLES](https://github.com/sbousseaden/EVTX-ATTACK-SAMPLES) (EVTX Files).

130
bin/create_universal_bin_macos.sh Executable file
View File

@@ -0,0 +1,130 @@
#!/bin/bash
# ===================================================
# Universal Binary Creator for macOS
# Creates a universal binary from x86_64 and arm64 binaries
# ===================================================
# Function for formatted message display
print_msg() {
local type=$1
local message=$2
case "$type" in
"ERROR") echo -e "\033[1;31mERROR:\033[0m $message" ;;
"INFO") echo -e "\033[1;32mINFO:\033[0m $message" ;;
"WARNING") echo -e "\033[1;33mWARNING:\033[0m $message" ;;
"USAGE") echo -e "\033[1;34mUSAGE:\033[0m $message" ;;
"OPTIONS") echo -e "\033[1;34mOPTIONS:\033[0m $message" ;;
"NOTE") echo -e "\033[1;34mNOTE:\033[0m $message" ;;
"PROCESS") echo -e "\033[1;34mPROCESS:\033[0m $message" ;;
"SUCCESS") echo -e "\033[1;32mSUCCESS:\033[0m $message" ;;
"DRY_RUN") echo -e "\033[1;33m[DRY RUN]\033[0m $message" ;;
*) echo "$message" ;;
esac
}
# Display title
echo "====================================================="
echo " Universal Binary Creator for macOS "
echo "====================================================="
# Check for required tools
if ! command -v lipo &> /dev/null; then
print_msg "ERROR" "'lipo' command not found. Please install Xcode command line tools."
exit 1
fi
# Initialize dry-run flag
DRY_RUN=false
# Parse options
while getopts "d" opt; do
case $opt in
d) DRY_RUN=true ;;
*) ;;
esac
done
shift $((OPTIND-1))
# Validate command line arguments
if [ "$#" -lt 2 ] || [ "$#" -gt 3 ]; then
print_msg "USAGE" "$0 [-d] <binary1> <binary2> [<output_universal_binary>]"
print_msg "OPTIONS" ""
echo " -d Dry run mode (show commands without executing them)"
print_msg "NOTE" "The script will automatically detect the architecture of each binary."
echo " If no output binary name is provided, a random name will be generated."
exit 1
fi
# Store input and output paths
BINARY1="$1"
BINARY2="$2"
OUTPUT_BINARY="$3"
# Generate random output name if not provided
if [ -z "$OUTPUT_BINARY" ]; then
RANDOM_SUFFIX=$(cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 8 | head -n 1)
OUTPUT_BINARY="universal_binary_${RANDOM_SUFFIX}"
print_msg "WARNING" "No output name provided. Using random name: $OUTPUT_BINARY"
fi
# Validate input files exist
for BINARY in "$BINARY1" "$BINARY2"; do
if [ ! -f "$BINARY" ]; then
print_msg "ERROR" "Binary '$BINARY' not found!"
exit 1
fi
done
# Detect architectures of input binaries
ARCH1=$(lipo -archs "$BINARY1")
ARCH2=$(lipo -archs "$BINARY2")
# Check if we have both required architectures
if [[ ! "$ARCH1 $ARCH2" =~ "x86_64" ]] || [[ ! "$ARCH1 $ARCH2" =~ "arm64" ]]; then
print_msg "ERROR" "Missing required architecture!"
echo "Binary 1 ($BINARY1): $ARCH1"
echo "Binary 2 ($BINARY2): $ARCH2"
echo "Both x86_64 and arm64 architectures are required."
exit 1
fi
# Identify which binary is which architecture
X86_BINARY=""
ARM_BINARY=""
if [[ "$ARCH1" =~ "x86_64" ]]; then X86_BINARY="$BINARY1"; fi
if [[ "$ARCH1" =~ "arm64" ]]; then ARM_BINARY="$BINARY1"; fi
if [[ "$ARCH2" =~ "x86_64" ]]; then X86_BINARY="$BINARY2"; fi
if [[ "$ARCH2" =~ "arm64" ]]; then ARM_BINARY="$BINARY2"; fi
print_msg "INFO" "Detected x86_64 binary: $X86_BINARY"
print_msg "INFO" "Detected arm64 binary: $ARM_BINARY"
# Ensure output directory exists
OUTPUT_DIR=$(dirname "$OUTPUT_BINARY")
if [ ! -d "$OUTPUT_DIR" ] && [ "$OUTPUT_DIR" != "." ]; then
if [ "$DRY_RUN" = true ]; then
print_msg "DRY_RUN" "Would create directory: $OUTPUT_DIR"
else
mkdir -p "$OUTPUT_DIR"
fi
fi
# Create the universal binary
print_msg "PROCESS" "Creating universal binary..."
if [ "$DRY_RUN" = true ]; then
print_msg "DRY_RUN" "Would execute: lipo -create -output \"$OUTPUT_BINARY\" \"$X86_BINARY\" \"$ARM_BINARY\""
print_msg "DRY_RUN" "Would execute: chmod +x \"$OUTPUT_BINARY\""
print_msg "DRY_RUN" "Universal binary would be created: $OUTPUT_BINARY"
else
if lipo -create -output "$OUTPUT_BINARY" "$X86_BINARY" "$ARM_BINARY"; then
chmod +x "$OUTPUT_BINARY"
print_msg "SUCCESS" "Universal binary created successfully: $OUTPUT_BINARY"
lipo -info "$OUTPUT_BINARY"
else
print_msg "ERROR" "Failed to create universal binary."
exit 1
fi
fi

Binary file not shown.

BIN
bin/evtx_dump_lin_arm Executable file

Binary file not shown.

Binary file not shown.

BIN
bin/evtx_dump_win.exe Normal file → Executable file

Binary file not shown.

185
bin/package_evtx_binaries.sh Executable file
View File

@@ -0,0 +1,185 @@
#!/bin/bash
set -e
# Function for formatted message display
print_msg() {
local type=$1
local message=$2
case "$type" in
"ERROR") echo -e "\033[1;31mERROR:\033[0m $message" ;;
"INFO") echo -e "\033[1;32mINFO:\033[0m $message" ;;
"WARNING") echo -e "\033[1;33mWARNING:\033[0m $message" ;;
"USAGE") echo -e "\033[1;34mUSAGE:\033[0m $message" ;;
"OPTIONS") echo -e "\033[1;34mOPTIONS:\033[0m $message" ;;
"NOTE") echo -e "\033[1;34mNOTE:\033[0m $message" ;;
"PROCESS") echo -e "\033[1;34mPROCESS:\033[0m $message" ;;
"SUCCESS") echo -e "\033[1;32mSUCCESS:\033[0m $message" ;;
"DRY_RUN") echo -e "\033[1;33m[DRY RUN]\033[0m $message" ;;
*) echo "$message" ;;
esac
}
# Function to determine latest release
get_latest_release() {
curl --silent "https://api.github.com/repos/omerbenamram/evtx/releases/latest" |
grep '"tag_name":' |
sed -E 's/.*"([^"]+)".*/\1/'
}
# Final binary names
MACOS_FINAL_BINARY_NAME="evtx_dump_mac"
LINUX_FINAL_BINARY_NAME="evtx_dump_lin"
LINUX_ARM_FINAL_BINARY_NAME="evtx_dump_lin_arm"
WINDOWS_FINAL_BINARY_NAME="evtx_dump_win.exe"
# Check if linux cli tools are installed
if ! command -v curl &> /dev/null; then
print_msg "ERROR" "curl could not be found"
exit 1
fi
# Initialize force flag
FORCE_REMOVE=false
# Parse options '-f' to force remove existing files
while getopts "f" opt; do
case $opt in
f) FORCE_REMOVE=true ;;
*) ;;
esac
done
shift $((OPTIND-1))
# Check if destination files already exist
if [ -f "$LINUX_FINAL_BINARY_NAME" ] || [ -f "$LINUX_ARM_FINAL_BINARY_NAME" ] || [ -f "$WINDOWS_FINAL_BINARY_NAME" ] || [ -f "$MACOS_FINAL_BINARY_NAME" ]; then
if [ "$FORCE_REMOVE" = true ]; then
print_msg "PROCESS" "Force removing existing files..."
rm -f "$LINUX_FINAL_BINARY_NAME" "$LINUX_ARM_FINAL_BINARY_NAME" "$WINDOWS_FINAL_BINARY_NAME" "$MACOS_FINAL_BINARY_NAME"
else
print_msg "WARNING" "Destination files already exist. Do you want to remove them? (y/n)"
read -r response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]; then
print_msg "PROCESS" "Removing existing files..."
rm -f "$LINUX_FINAL_BINARY_NAME" "$LINUX_ARM_FINAL_BINARY_NAME" "$WINDOWS_FINAL_BINARY_NAME" "$MACOS_FINAL_BINARY_NAME"
else
print_msg "ERROR" "Operation cancelled by user"
exit 1
fi
fi
fi
# Check if destination directories already exist, if so remove them
if [ -d "$INSTALL_DIR" ]; then
print_msg "PROCESS" "Removing existing destination directory: $INSTALL_DIR"
rm -rf $INSTALL_DIR || print_msg "ERROR" "Failed to remove install directory: $INSTALL_DIR"
exit 1
fi
# Get the latest release version
LATEST_VERSION=$(get_latest_release)
print_msg "INFO" "Latest release: $LATEST_VERSION"
# Create a temporary directory
TEMP_DIR=$(mktemp -d)
print_msg "INFO" "Created temporary directory: $TEMP_DIR"
# Download all assets from the latest release
print_msg "PROCESS" "Downloading binaries from $LATEST_VERSION..."
ASSETS_URL="https://api.github.com/repos/omerbenamram/evtx/releases/latest"
DOWNLOAD_URLS=$(curl -s $ASSETS_URL | grep "browser_download_url" | cut -d '"' -f 4)
# Download each binary
for url in $DOWNLOAD_URLS; do
filename=$(basename $url)
# Skip musl binaries
if [[ "$filename" == *"musl"* ]]; then
print_msg "INFO" "Skipping musl binary: $filename"
continue
fi
print_msg "PROCESS" "Downloading $filename..."
curl -s -L -o "$TEMP_DIR/$filename" "$url"
done
# Create a directory for the binaries if it doesn't exist
INSTALL_DIR="./evtx_binaries"
mkdir -p $INSTALL_DIR
# Move the binaries to the install directory
print_msg "PROCESS" "Moving binaries to $INSTALL_DIR..."
mv $TEMP_DIR/* $INSTALL_DIR/
# Clean up
rmdir $TEMP_DIR
print_msg "INFO" "Temporary directory removed"
# Make binaries executable
chmod +x $INSTALL_DIR/*
print_msg "SUCCESS" "All binaries downloaded and made executable in $INSTALL_DIR"
# Detect and handle macOS binaries
print_msg "PROCESS" "Detecting macOS binaries for universal binary creation..."
# Find macOS binaries and rename all binaries in one pass
MACOS_X86_BINARY=""
MACOS_ARM_BINARY=""
UNIVERSAL_BINARY="$INSTALL_DIR/$MACOS_FINAL_BINARY_NAME"
print_msg "PROCESS" "Renaming binaries with platform-specific names..."
for binary in $INSTALL_DIR/*; do
# Skip if not a file
[ -f "$binary" ] || continue
# Use file command to detect binary type
FILE_INFO=$(file "$binary")
# Process based on binary type
if [[ "$FILE_INFO" == *"ELF"*"x86-64"* ]]; then
# Linux x86_64
mv "$binary" "$INSTALL_DIR/$LINUX_FINAL_BINARY_NAME"
print_msg "INFO" "Renamed $(basename "$binary") to $LINUX_FINAL_BINARY_NAME"
elif [[ "$FILE_INFO" == *"ELF"*"aarch64"* ]]; then
# Linux ARM64
mv "$binary" "$INSTALL_DIR/$LINUX_ARM_FINAL_BINARY_NAME"
print_msg "INFO" "Renamed $(basename "$binary") to $LINUX_ARM_FINAL_BINARY_NAME"
elif [[ "$FILE_INFO" == *"PE"*"executable"* ]] || [[ "$binary" == *".exe" ]]; then
# Windows
mv "$binary" "$INSTALL_DIR/$WINDOWS_FINAL_BINARY_NAME"
print_msg "INFO" "Renamed $(basename "$binary") to $WINDOWS_FINAL_BINARY_NAME"
elif [[ "$FILE_INFO" == *"Mach-O"* ]]; then
if [[ "$FILE_INFO" == *"x86_64"* ]]; then
print_msg "INFO" "Found macOS x86_64 binary: $(basename "$binary")"
MACOS_X86_BINARY="$binary"
elif [[ "$FILE_INFO" == *"arm64"* ]]; then
print_msg "INFO" "Found macOS arm64 binary: $(basename "$binary")"
MACOS_ARM_BINARY="$binary"
fi
fi
done
# Create universal binary if both architectures are found
if [ -n "$MACOS_X86_BINARY" ] && [ -n "$MACOS_ARM_BINARY" ]; then
print_msg "PROCESS" "Creating universal binary from x86_64 and arm64 binaries..."
./create_universal_bin_macos.sh "$MACOS_X86_BINARY" "$MACOS_ARM_BINARY" "$UNIVERSAL_BINARY"
# Remove the architecture-specific binaries
rm -f "$MACOS_X86_BINARY" "$MACOS_ARM_BINARY"
print_msg "INFO" "Created universal macOS binary: evtx_dump_mac"
elif [ -n "$MACOS_X86_BINARY" ]; then
# If only x86_64 is available
mv "$MACOS_X86_BINARY" "$UNIVERSAL_BINARY"
print_msg "INFO" "Only x86_64 macOS binary available, renamed to evtx_dump_mac"
elif [ -n "$MACOS_ARM_BINARY" ]; then
# If only arm64 is available
mv "$MACOS_ARM_BINARY" "$UNIVERSAL_BINARY"
print_msg "INFO" "Only arm64 macOS binary available, renamed to evtx_dump_mac"
fi
# Put all the binaries in the root directory
mv $INSTALL_DIR/* .
# Remove the install directory
rm -rf $INSTALL_DIR || print_msg "ERROR" "Failed to remove install directory: $INSTALL_DIR"
print_msg "SUCCESS" "All binaries downloaded and made executable in $INSTALL_DIR"

View File

@@ -143,79 +143,6 @@ python3 zircolite.py --evtx logs/ \
Sometimes, SIGMA rules can be very noisy (and generate a lot of false positives) but you still want to keep them in your rulesets. It is possible to filter rules that returns too mich detected events with the option `--limit <MAX_NUMBER>`. **Please note that when using this option, the rules are not skipped the results are just ignored** but this is useful when forwarding events to Splunk.
## Forwarding detected events
> [!WARNING]
> Forwarding is DEPRECATED and will likely be disabled in a future release
Zircolite provide multiple ways to forward events to a collector :
- the HTTP forwarder : this is a very simple forwarder and pretty much a **"toy"** example and should be used when you have nothing else. An **example** server called is available in the [tools](../tools/zircolite_server/) directory
- the Splunk HEC Forwarder : it allows to forward all detected events to a Splunk instance using **HTTP Event Collector**
- the ELK ES client : it allows to forward all detected events to an ELK instance
There are two modes to forward the events :
- By default all events are forwarded after the detection process
- The argument `--stream` allow to forward events during the detection process
If you forward your events to a central collector you can disable local logging with the Zircolite `--nolog` argument.
### Forward events to a HTTP server
If you have multiple endpoints to scan, it is useful to send the detected events to a central collector. As of v1.2, Zircolite can forward detected events to an HTTP server :
```shell
python3 zircolite.py --evtx sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \
--remote "http://address:port/uri"
```
An **example** server called is available in the [tools](https://github.com/wagga40/Zircolite/tree/master/tools/zircolite_server/) directory.
### Forward events to a Splunk instance via HEC
As of v1.3.5, Zircolite can forward detections to a Splunk instance with Splunk **HTTP Event Collector**.
1. Configure HEC on you Splunk instance : [check here](https://docs.splunk.com/Documentation/Splunk/8.2.0/Data/UsetheHTTPEventCollector)
2. Get your token and you are ready to go :
```shell
python3 zircolite.py --evtx /sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \
--remote "https://x.x.x.x:8088" --token "xxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \
[--index myindex]
```
Since Splunk HEC default to the first associated index, `--index` is optional but can be used to specify the choosen index among the available ones.
> [!WARNING]
> On Windows do not forget to put quotes
### Forward to ELK
As of version 2.8.0, Zircolite can forward events to an ELK stack using the ES client.
```shell
python3 zircolite.py --evtx /sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \
--remote "https://x.x.x.x:8088" --index "zircolite-whatever" \
--eslogin "yourlogin" --espass "yourpass"
```
> [!NOTE]
> the `--eslogin` and `--espass` arguments are optional.
> [!WARNING]
> **Elastic is not handling logs the way Splunk does. Since Zircolite is flattening the field names in the JSON output some fields, especially when working with EVTX files, can have different types between Channels, logsources etc. So when Elastic uses automatic field mapping, mapping errors may prevent events insertion into Elastic.**
#### No local logs
When you forward detected events to an server, sometimes you don't want any log file left on the system you have run Zircolite on. It is possible with the `--nolog` option.
### Forwarding all events
Zircolite is able to forward all events and not just the detected events to Splunk, ELK or a custom HTTP Server. you just to use the `--forwardall` argument. Please note that this ability forward events as JSON and not specific `Windows` sourcetype.
> [!WARNING]
> **Elastic is not handling logs the way Splunk does. Since Zircolite is flattening the field names in the JSON output some fields, especially when working with EVTX files, can have different types between Channels, logsources etc. So when Elastic uses automatic field mapping, mapping errors may prevent events insertion into Elastic.**
## Templating and Formatting
Zircolite provides a templating system based on Jinja 2. It allows you to change the output format to suits your needs (Splunk or ELK integration, Grep-able output...). There are some templates available in the [Templates directory](https://github.com/wagga40/Zircolite/tree/master/templates) of the repository : Splunk, Timesketch, ... To use the template system, use these arguments :
@@ -278,97 +205,6 @@ Then you just have to open `index.html` in your favorite browser and click on a
> [!WARNING]
> When packaging with PyInstaller or Nuitka some AV may not like your package.
## Using With DFIR Orc
**DFIR Orc** is a Forensics artefact collection tool for systems running Microsoft Windows (pretty much like [Kape](https://www.kroll.com/en/services/cyber-risk/incident-response-litigation-support/kroll-artifact-parser-extractor-kape) or [artifactcollector](https://forensicanalysis.github.io/documentation/manual/usage/acquisition/)). For more detailed explanation, please check here : [https://dfir-orc.github.io](https://dfir-orc.github.io).
[ZikyHD](https://github.com/ZikyHD) has done a pretty good job at integrating **Zircolite** with **DFIR Orc** in this repository : [https://github.com/Zircocorp/dfir-orc-config](https://github.com/Zircocorp/dfir-orc-config).
Basically, if you want to integrate Zircolite with **DFIR Orc** :
- Clone the [DFIR Orc Config repository](https://github.com/DFIR-ORC/dfir-orc-config) : `git clone https://github.com/Zircocorp/dfir-orc-config.git`
- Create a `DFIR-ORC_config.xml` (or add to an existing one) in the `config` directory containing :
```xml
<?xml version="1.0" encoding="utf-8"?>
<wolf childdebug="no" command_timeout="1200">
<log disposition="truncate">DFIR-ORC_{SystemType}_{FullComputerName}_{TimeStamp}.log</log>
<outline disposition="truncate">DFIR-ORC_{SystemType}_{FullComputerName}_{TimeStamp}.json
</outline>
<!-- BEGIN ZIRCOLITE SPECIFIC CONFIGURATION-->
<!-- This part creates a specific archive for Zircolite -->
<archive name="DFIR-ORC_{SystemType}_{FullComputerName}_Zircolite.7z"
keyword="Zircolite" concurrency="1"
repeat="Once"
compression="fast"
archive_timeout="120" >
<restrictions ElapsedTimeLimit="480" />
<command keyword="GetZircoliteSysmon" winver="6.2+">
<execute name="zircolite_win10_nuitka.exe"
run="7z:#Tools|zircolite_win10_nuitka.exe"/>
<input name='rules_windows_generic.json'
source='res:#rules_windows_generic.json'
argument='-r {FileName}' />
<input name='fieldMappings.json'
source='res:#fieldMappings.json'
argument='-c {FileName}' />
<argument> --cores 1 --noexternal -e C:\windows\System32\winevt\Logs</argument>
<output name="detected_events.json" source="File" argument="-o {FileName}" />
<output name="zircolite.log" source="File" argument="-l {FileName}" />
</command>
</archive>
<!-- /END ZIRCOLITE SPECIFIC CONFIGURATION-->
</wolf>
```
> [!NOTE]
> Please note that if you add this configuration to an existing one, you only need to keep the part between `<!-- BEGIN ... -->` and `<!-- /END ... -->` blocks.
- Put your custom or default mapping file `zircolite_win10_nuitka.exe ` (the default one is in the Zircolite repository `config` directory) `rules_windows_generic.json` (the default one is in the Zircolite repository `rules` directory) in the the `config` directory.
- Put **Zircolite** [binary](https://github.com/wagga40/Zircolite/releases) (in this example `zircolite_win10_nuitka.exe`) and **DFIR Orc** [binaries](https://github.com/DFIR-ORC/dfir-orc/releases) (x86 and x64) in the the `tools` directory.
- Create a `DFIR-ORC_Embed.xml` (or add to an existing one) in the `config` directory containing :
```xml
<?xml version="1.0" encoding="utf-8"?>
<toolembed>
<input>.\tools\DFIR-Orc_x86.exe</input>
<output>.\output\%ORC_OUTPUT%</output>
<run64 args="WolfLauncher" >7z:#Tools|DFIR-Orc_x64.exe</run64>
<run32 args="WolfLauncher" >self:#</run32>
<file name="WOLFLAUNCHER_CONFIG"
path=".\%ORC_CONFIG_FOLDER%\DFIR-ORC_config.xml"/>
<!-- BEGIN ZIRCOLITE SPECIFIC CONFIGURATION-->
<file name="rules_windows_generic.json"
path=".\%ORC_CONFIG_FOLDER%\rules_windows_generic.json" />
<file name="fieldMappings.json"
path=".\%ORC_CONFIG_FOLDER%\fieldMappings.json" />
<!-- /END ZIRCOLITE SPECIFIC CONFIGURATION-->
<archive name="Tools" format="7z" compression="Ultra">
<file name="DFIR-Orc_x64.exe"
path=".\tools\DFIR-Orc_x64.exe"/>
<!-- BEGIN ZIRCOLITE SPECIFIC CONFIGURATION-->
<file name="zircolite_win10_nuitka.exe"
path=".\tools\zircolite_win10_nuitka.exe"/>
<!-- /END ZIRCOLITE SPECIFIC CONFIGURATION-->
</archive>
</toolembed>
```
> [!NOTE]
> Please note that if you add this configuration to an existing one, you only need to keep the part between `<!-- BEGIN ... -->` and `<!-- /END ... -->` blocks.
- Now you need to generate the **DFIR Orc** binary by executing `.\configure.ps1` at the root of the repository
- The final output will be in the `output` directory
## Other tools
Some other tools (mostly untested) have included a way to run Zircolite :

View File

@@ -9,14 +9,13 @@
## Project structure
```text
├── Makefile # Very basic Makefile
├── Makefile # Very basic Makefile
├── Readme.md # Do I need to explain ?
├── bin # Directory containing all external binaries (evtx_dump)
├── config # Directory containing the config files
├── config # Directory containing the config files
├── docs # Directory containing the documentation
├── pics # Pictures directory - not really relevant
├── rules # Sample rules you can use
├── templates # Jinja2 templates
├── tools # Directory containing all tools (genRules, zircolite_server)
└── zircolite.py # Zircolite !
```

View File

@@ -1,7 +1,7 @@
# Usage
> [!NOTE]
> If you use the packaged version of Zircolite don't forget to replace `python3 zircolite.py` in the examples by the packaged binary name.
> If you use the packaged version of Zircolite, remember to replace `python3 zircolite.py` in the examples with the packaged binary name.
## Requirements and Installation
@@ -234,7 +234,7 @@ python3 zircolite.py --evtx <EVTX_FOLDER> --ruleset <CONVERTED_SIGMA_RULES> \
--dbfile output.db
```
If you need to re-execute Zircolite, you can do it directly using the SQLite database as the EVTX source (with `--evtx <SAVED_SQLITE_DB_PATH>` and `--dbonly`) and avoid to convert the EVTX, post-process the EVTX and insert data to database. **Using this technique can save a lot of time... But you will be unable to use the `--forwardall`option**
If you need to re-execute Zircolite, you can do it directly using the SQLite database as the EVTX source (with `--evtx <SAVED_SQLITE_DB_PATH>` and `--dbonly`) and avoid to convert the EVTX, post-process the EVTX and insert data to database. **Using this technique can save a lot of time**
## Rulesets / Rules
@@ -708,13 +708,6 @@ For example :
- "Suspicious Eventlog Clear or Configuration Using Wevtutil" : **very noisy** on fresh environment (labs etc.), commonly generate a lot of useless detections
- Notepad Making Network Connection : **can slow very significantly** the execution of Zircolite
## Generate embedded versions
***Removed***
- You can use DFIR Orc to package Zircolite, check [here](Advanced.md#using-with-dfir-orc)
- [Kape](https://www.kroll.com/en/services/cyber-risk/incident-response-litigation-support/kroll-artifact-parser-extractor-kape) also has a module for Zircolite : [here](https://github.com/EricZimmerman/KapeFiles/tree/master/Modules/Apps/GitHub)
## Docker
Zircolite is also packaged as a Docker image (cf. [wagga40/zircolite](https://hub.docker.com/r/wagga40/zircolite) on Docker Hub), which embeds all dependencies (e.g. `evtx_dump`) and provides a platform-independant way of using the tool. Please note this image is not updated with the last rulesets !

Binary file not shown.

View File

@@ -1,10 +1,11 @@
* Usage
* [Requirements and Installation](Usage.md#requirements-and-installation)
* [Basic usage](Usage.md#basic-usage)
* [Rulesets / Rules](Usage.md#rulesets--rules)
* [Pipelines](Usage.md#pipelines)
* [Field mappings, field exclusions, value exclusions, field aliases and field splitting](Usage.md#field-mappings-field-exclusions-value-exclusions-field-aliases-and-field-splitting)
* [Field Transforms](Usage.md#field-transforms)
* [Generate your own rulesets](Usage.md#generate-your-own-rulesets)
* [Generate embedded versions](Usage.md#generate-embedded-versions)
* [Docker](Usage.md#docker)
* Advanced use
@@ -15,7 +16,6 @@
* [Templating and Formatting](Advanced.md#templating-and-formatting)
* [Mini GUI](Advanced.md#mini-gui)
* [Packaging Zircolite](Advanced.md#packaging-zircolite)
* [Using With DFIR Orc](Advanced.md#using-with-dfir-orc)
* Internals
* [Zircolite architecture](Internals.md#zircolite-architecture)

View File

@@ -2,17 +2,14 @@ orjson>=3.9.15
xxhash
colorama>=0.4.4
tqdm>=4.58.0
urllib3>=1.26.2
requests>=2.24.0
jinja2>=2.11.3
evtx>=0.6.2
aiohttp[speedups]~=3.9.2
lxml
elasticsearch[async]>=7.8.0
pysigma>=0.10.10
pysigma-pipeline-sysmon>=1.0.3
pysigma-pipeline-windows>=1.1.1
pysigma-backend-sqlite>=0.1.1
pyyaml
chardet
RestrictedPython
RestrictedPython

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +0,0 @@
# Zircolite Server
This is an **example** HTTP server to collect Zircolite results across the network in a unique point.
:warning: **THIS IS NOT A PRODUCTION-READY NOR A SECURE TOOL**

View File

@@ -1,2 +0,0 @@
{% for match in data["matches"] %}{"host":{{ data["host"]|tojson }},"title":{{ data["title"]|tojson }},"description":{{ data["description"]|tojson }},"sigma":{{ data["sigma"]|tojson }},"rule_level":{{ data["rule_level"]|tojson }},"tags":{{ data["tags"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %}}
{% endfor %}

View File

@@ -1,3 +0,0 @@
flask>=1.1.2
jinja2>=2.11.3
werkzeug>=3.0.3 # not directly required, pinned by Snyk to avoid a vulnerability

View File

@@ -1,28 +0,0 @@
#!python3
# -*- coding: utf-8 -*-
# Zircolite **example** server
# Make you own if you want to use it in production
from flask import Flask, request
from jinja2 import Template
import base64
import json
app = Flask(__name__)
tmpl = open("jsonl.tmpl", 'r', encoding='utf-8')
template = Template(tmpl.read())
tmpl.close()
@app.route('/logs',methods=['POST'])
def logs():
try:
with open("results.json", 'a') as f:
f.write(template.render(data=request.get_json()))
except Exception as e:
return {"status": "400"}
return {"status": "200"}
if __name__ == '__main__':
app.run(host='0.0.0.0', port = 8080, debug=True)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff