test: add BATS test suite (79 tests)
- tests/common.bats: PROJECT_ROOT detection, is_cmd, timestamp, data_dir, logging functions, color handling, require_root - tests/detect.bats: GPU sysfs reads with mock sysfs tree, kernel param parsing (word boundary, dot escaping, edge positions), recommended GTT/pages computation (64GB, 128GB, tiny, zero), firmware bad detection, stack detection - tests/format.bats: human_bytes (0, KiB, MiB, GiB boundaries, 64GiB), human_mib (sub-GiB, exact-GiB, recommended values, empty input) - tests/benchmark_compare.bats: improvement/regression display, empty results, missing files, usage output, config change detection - tests/log_metrics.bats: CSV header, data format, field count, input validation, unknown argument handling - tests/test_helper.sh: mock sysfs tree builder, bats-assert/support setup Makefile: add 'make test' target Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
84
tests/benchmark_compare.bats
Normal file
84
tests/benchmark_compare.bats
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env bats
|
||||
# Tests for scripts/benchmark/compare.sh — result comparison logic
|
||||
|
||||
load test_helper.sh
|
||||
|
||||
setup() {
|
||||
source_lib common.sh
|
||||
source_lib format.sh
|
||||
BEFORE_DIR="$(mktemp -d)"
|
||||
AFTER_DIR="$(mktemp -d)"
|
||||
}
|
||||
|
||||
teardown() {
|
||||
rm -rf "$BEFORE_DIR" "$AFTER_DIR"
|
||||
}
|
||||
|
||||
write_summary() {
|
||||
local dir="$1" json="$2"
|
||||
echo "$json" > "$dir/summary.json"
|
||||
}
|
||||
|
||||
write_system_state() {
|
||||
local dir="$1" json="$2"
|
||||
echo "$json" > "$dir/system-state.json"
|
||||
}
|
||||
|
||||
# ── Basic comparison ─────────────────────────────────────
|
||||
|
||||
@test "compare: shows improvement when after > before" {
|
||||
write_summary "$BEFORE_DIR" '{"results":[{"model":"qwen3","backend":"Vulkan","test":"pp512","tokens_per_sec":500.0,"file":"test.log","size":"4GB","raw":"500.0"}]}'
|
||||
write_summary "$AFTER_DIR" '{"results":[{"model":"qwen3","backend":"Vulkan","test":"pp512","tokens_per_sec":600.0,"file":"test.log","size":"4GB","raw":"600.0"}]}'
|
||||
write_system_state "$BEFORE_DIR" '{"memory":{"vram_total_bytes":0},"kernel":{},"tuned_profile":"throughput-performance"}'
|
||||
write_system_state "$AFTER_DIR" '{"memory":{"vram_total_bytes":0},"kernel":{},"tuned_profile":"throughput-performance"}'
|
||||
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh" "$BEFORE_DIR" "$AFTER_DIR"
|
||||
assert_success
|
||||
assert_output --partial "500.0"
|
||||
assert_output --partial "600.0"
|
||||
}
|
||||
|
||||
@test "compare: shows regression when after < before" {
|
||||
write_summary "$BEFORE_DIR" '{"results":[{"model":"qwen3","backend":"Vulkan","test":"tg128","tokens_per_sec":15.0,"file":"test.log","size":"4GB","raw":"15.0"}]}'
|
||||
write_summary "$AFTER_DIR" '{"results":[{"model":"qwen3","backend":"Vulkan","test":"tg128","tokens_per_sec":12.0,"file":"test.log","size":"4GB","raw":"12.0"}]}'
|
||||
write_system_state "$BEFORE_DIR" '{"memory":{},"kernel":{},"tuned_profile":""}'
|
||||
write_system_state "$AFTER_DIR" '{"memory":{},"kernel":{},"tuned_profile":""}'
|
||||
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh" "$BEFORE_DIR" "$AFTER_DIR"
|
||||
assert_success
|
||||
assert_output --partial "12.0"
|
||||
}
|
||||
|
||||
@test "compare: handles empty results gracefully" {
|
||||
write_summary "$BEFORE_DIR" '{"results":[]}'
|
||||
write_summary "$AFTER_DIR" '{"results":[]}'
|
||||
write_system_state "$BEFORE_DIR" '{"memory":{},"kernel":{},"tuned_profile":""}'
|
||||
write_system_state "$AFTER_DIR" '{"memory":{},"kernel":{},"tuned_profile":""}'
|
||||
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh" "$BEFORE_DIR" "$AFTER_DIR"
|
||||
assert_success
|
||||
assert_output --partial "No comparable results"
|
||||
}
|
||||
|
||||
@test "compare: fails without summary.json" {
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh" "$BEFORE_DIR" "$AFTER_DIR"
|
||||
assert_failure
|
||||
assert_output --partial "No summary.json"
|
||||
}
|
||||
|
||||
@test "compare: shows usage when called without args" {
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh"
|
||||
assert_failure
|
||||
assert_output --partial "Usage"
|
||||
}
|
||||
|
||||
@test "compare: detects config changes between runs" {
|
||||
write_summary "$BEFORE_DIR" '{"results":[{"model":"m","backend":"b","test":"t","tokens_per_sec":1.0,"file":"f","size":"s","raw":"1.0"}]}'
|
||||
write_summary "$AFTER_DIR" '{"results":[{"model":"m","backend":"b","test":"t","tokens_per_sec":2.0,"file":"f","size":"s","raw":"2.0"}]}'
|
||||
write_system_state "$BEFORE_DIR" '{"memory":{"vram_total_bytes":34359738368},"kernel":{"param_iommu":"","param_gttsize":"","param_pages_limit":""},"tuned_profile":"throughput-performance"}'
|
||||
write_system_state "$AFTER_DIR" '{"memory":{"vram_total_bytes":536870912},"kernel":{"param_iommu":"pt","param_gttsize":"61440","param_pages_limit":"15728640"},"tuned_profile":"accelerator-performance"}'
|
||||
|
||||
run bash "$PROJECT_ROOT/scripts/benchmark/compare.sh" "$BEFORE_DIR" "$AFTER_DIR"
|
||||
assert_success
|
||||
assert_output --partial "Configuration changes"
|
||||
}
|
||||
121
tests/common.bats
Normal file
121
tests/common.bats
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env bats
|
||||
# Tests for lib/common.sh — core utilities
|
||||
|
||||
load test_helper.sh
|
||||
|
||||
setup() {
|
||||
source_lib common.sh
|
||||
}
|
||||
|
||||
# ── PROJECT_ROOT detection ───────────────────────────────
|
||||
|
||||
@test "PROJECT_ROOT points to a directory containing Makefile" {
|
||||
[ -f "$PROJECT_ROOT/Makefile" ]
|
||||
}
|
||||
|
||||
@test "PROJECT_ROOT is not the filesystem root" {
|
||||
[ "$PROJECT_ROOT" != "/" ]
|
||||
}
|
||||
|
||||
# ── is_cmd ───────────────────────────────────────────────
|
||||
|
||||
@test "is_cmd: bash exists" {
|
||||
run is_cmd bash
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "is_cmd: nonexistent command fails" {
|
||||
run is_cmd this_command_does_not_exist_xyz123
|
||||
assert_failure
|
||||
}
|
||||
|
||||
@test "is_cmd: empty argument fails" {
|
||||
run is_cmd ""
|
||||
assert_failure
|
||||
}
|
||||
|
||||
# ── timestamp ────────────────────────────────────────────
|
||||
|
||||
@test "timestamp: returns YYYYMMDD-HHMMSS format" {
|
||||
run timestamp
|
||||
assert_success
|
||||
[[ "$output" =~ ^[0-9]{8}-[0-9]{6}$ ]]
|
||||
}
|
||||
|
||||
@test "timestamp: returns a reasonable date (not epoch)" {
|
||||
local ts
|
||||
ts="$(timestamp)"
|
||||
local year="${ts:0:4}"
|
||||
(( year >= 2025 ))
|
||||
}
|
||||
|
||||
# ── data_dir ─────────────────────────────────────────────
|
||||
|
||||
@test "data_dir: creates directory and returns path" {
|
||||
local dir
|
||||
dir="$(data_dir "test-tmp-$$")"
|
||||
[ -d "$dir" ]
|
||||
rmdir "$dir"
|
||||
}
|
||||
|
||||
@test "data_dir: default returns data/ under project root" {
|
||||
local dir
|
||||
dir="$(data_dir ".")"
|
||||
[[ "$dir" == "$PROJECT_ROOT/data/." ]]
|
||||
}
|
||||
|
||||
# ── logging functions produce output ─────────────────────
|
||||
|
||||
@test "log_info: produces output" {
|
||||
run log_info "test message"
|
||||
assert_success
|
||||
assert_output --partial "test message"
|
||||
}
|
||||
|
||||
@test "log_success: produces output" {
|
||||
run log_success "it worked"
|
||||
assert_success
|
||||
assert_output --partial "it worked"
|
||||
}
|
||||
|
||||
@test "log_warn: produces output" {
|
||||
run log_warn "warning"
|
||||
assert_success
|
||||
assert_output --partial "warning"
|
||||
}
|
||||
|
||||
@test "log_error: produces output on stderr" {
|
||||
run log_error "failure"
|
||||
assert_success
|
||||
# log_error writes to stderr but bats captures both
|
||||
assert_output --partial "failure"
|
||||
}
|
||||
|
||||
@test "log_header: produces output with delimiters" {
|
||||
run log_header "Section Title"
|
||||
assert_success
|
||||
assert_output --partial "Section Title"
|
||||
}
|
||||
|
||||
# ── Color handling ───────────────────────────────────────
|
||||
|
||||
@test "colors are empty strings when not a terminal" {
|
||||
export TERM=dumb
|
||||
# Re-source to pick up non-terminal detection
|
||||
unset RED GREEN YELLOW BLUE CYAN BOLD DIM RESET
|
||||
source_lib common.sh
|
||||
# When stdout is not a tty (as in bats), colors should be empty
|
||||
[ -z "$RED" ] || [ "$RED" = "" ]
|
||||
}
|
||||
|
||||
# ── require_root ─────────────────────────────────────────
|
||||
|
||||
@test "require_root: exits with error when not root" {
|
||||
# We're not running tests as root
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
skip "Test must not run as root"
|
||||
fi
|
||||
run require_root
|
||||
assert_failure
|
||||
assert_output --partial "root"
|
||||
}
|
||||
241
tests/detect.bats
Normal file
241
tests/detect.bats
Normal file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env bats
|
||||
# Tests for lib/detect.sh — hardware and config detection
|
||||
|
||||
load test_helper.sh
|
||||
|
||||
setup() {
|
||||
source_lib common.sh
|
||||
source_lib format.sh
|
||||
source_lib detect.sh
|
||||
setup_mock_sysfs
|
||||
# Override GPU_SYSFS AFTER sourcing detect.sh (which sets it at load time)
|
||||
GPU_SYSFS="$MOCK_SYSFS/class/drm/card0/device"
|
||||
}
|
||||
|
||||
teardown() {
|
||||
teardown_mock_sysfs
|
||||
}
|
||||
|
||||
# ── GPU sysfs reads ──────────────────────────────────────
|
||||
|
||||
@test "detect_vram_total: reads mocked value" {
|
||||
echo "34359738368" > "$GPU_SYSFS/mem_info_vram_total"
|
||||
run detect_vram_total
|
||||
assert_output "34359738368"
|
||||
}
|
||||
|
||||
@test "detect_vram_total: returns 0 when sysfs file missing" {
|
||||
rm -f "$GPU_SYSFS/mem_info_vram_total"
|
||||
run detect_vram_total
|
||||
assert_output "0"
|
||||
}
|
||||
|
||||
@test "detect_gtt_total: reads mocked value" {
|
||||
echo "64424509440" > "$GPU_SYSFS/mem_info_gtt_total"
|
||||
run detect_gtt_total
|
||||
assert_output "64424509440"
|
||||
}
|
||||
|
||||
@test "detect_gpu_busy: reads percentage" {
|
||||
echo "42" > "$GPU_SYSFS/gpu_busy_percent"
|
||||
run detect_gpu_busy
|
||||
assert_output "42"
|
||||
}
|
||||
|
||||
@test "detect_gpu_temp: reads millidegrees" {
|
||||
echo "55000" > "$GPU_SYSFS/hwmon/hwmon0/temp1_input"
|
||||
run detect_gpu_temp
|
||||
assert_output "55000"
|
||||
}
|
||||
|
||||
@test "detect_gpu_power: reads microwatts" {
|
||||
echo "30000000" > "$GPU_SYSFS/hwmon/hwmon0/power1_average"
|
||||
run detect_gpu_power
|
||||
assert_output "30000000"
|
||||
}
|
||||
|
||||
@test "detect_gpu_device_id: reads and strips 0x prefix" {
|
||||
echo "0x1586" > "$GPU_SYSFS/device"
|
||||
run detect_gpu_device_id
|
||||
assert_output "1586"
|
||||
}
|
||||
|
||||
@test "detect_gpu_busy: returns 0 when sysfs file missing" {
|
||||
rm -f "$GPU_SYSFS/gpu_busy_percent"
|
||||
run detect_gpu_busy
|
||||
assert_output "0"
|
||||
}
|
||||
|
||||
@test "detect_gpu_temp: returns 0 when hwmon missing" {
|
||||
rm -f "$GPU_SYSFS/hwmon/hwmon0/temp1_input"
|
||||
run detect_gpu_temp
|
||||
assert_output "0"
|
||||
}
|
||||
|
||||
# ── Kernel param detection ───────────────────────────────
|
||||
# These tests redefine detect_kernel_param inline to control /proc/cmdline content
|
||||
|
||||
_mock_kernel_param() {
|
||||
local param="$1" cmdline="$2"
|
||||
local pattern="${param//./\\.}"
|
||||
if [[ "$cmdline" =~ (^|[[:space:]])${pattern}=([^ ]+) ]]; then
|
||||
echo "${BASH_REMATCH[2]}"
|
||||
elif [[ "$cmdline" =~ (^|[[:space:]])${pattern}([[:space:]]|$) ]]; then
|
||||
echo "present"
|
||||
fi
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: extracts iommu=pt" {
|
||||
run _mock_kernel_param 'iommu' "BOOT_IMAGE=/boot/vmlinuz root=UUID=abc ro iommu=pt quiet"
|
||||
assert_output "pt"
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: extracts amdgpu.gttsize value" {
|
||||
run _mock_kernel_param 'amdgpu.gttsize' "BOOT_IMAGE=/boot/vmlinuz iommu=pt amdgpu.gttsize=61440 ttm.pages_limit=15728640 quiet"
|
||||
assert_output "61440"
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: returns empty when param missing" {
|
||||
run _mock_kernel_param 'iommu' "BOOT_IMAGE=/boot/vmlinuz root=UUID=abc ro quiet"
|
||||
assert_output ""
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: iommu does NOT match amd_iommu (word boundary)" {
|
||||
run _mock_kernel_param 'iommu' "BOOT_IMAGE=/boot/vmlinuz amd_iommu=off quiet"
|
||||
assert_output ""
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: amdgpu.gttsize does NOT match xamdgpu.gttsize" {
|
||||
run _mock_kernel_param 'amdgpu.gttsize' "BOOT_IMAGE=/boot/vmlinuz xamdgpu.gttsize=99999 quiet"
|
||||
assert_output ""
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: dot in param name is literal not wildcard" {
|
||||
run _mock_kernel_param 'amdgpu.gttsize' "BOOT_IMAGE=/boot/vmlinuz amdgpuXgttsize=99999 quiet"
|
||||
assert_output ""
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: param at start of cmdline (no leading space)" {
|
||||
run _mock_kernel_param 'iommu' "iommu=pt root=UUID=abc ro quiet"
|
||||
assert_output "pt"
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: param at end of cmdline (no trailing space)" {
|
||||
run _mock_kernel_param 'iommu' "BOOT_IMAGE=/boot/vmlinuz iommu=pt"
|
||||
assert_output "pt"
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: boolean param without value" {
|
||||
run _mock_kernel_param 'quiet' "BOOT_IMAGE=/boot/vmlinuz iommu=pt quiet"
|
||||
assert_output "present"
|
||||
}
|
||||
|
||||
@test "detect_kernel_param: param with equals in value" {
|
||||
run _mock_kernel_param 'root' "BOOT_IMAGE=/boot/vmlinuz root=UUID=abc-def-123"
|
||||
assert_output "UUID=abc-def-123"
|
||||
}
|
||||
|
||||
# ── Recommended values ───────────────────────────────────
|
||||
|
||||
@test "recommended_gttsize_mib: 64 GiB system" {
|
||||
# Override detection functions AFTER source
|
||||
detect_system_ram_kb() { echo "33554432"; } # 32 GiB
|
||||
detect_vram_total() { echo "34359738368"; } # 32 GiB
|
||||
run recommended_gttsize_mib
|
||||
# total = (33554432 + 33554432) KB = 67108864 KB → 64 GiB → 60 GiB GTT → 61440 MiB
|
||||
assert_output "61440"
|
||||
}
|
||||
|
||||
@test "recommended_gttsize_mib: 128 GiB system" {
|
||||
detect_system_ram_kb() { echo "130023424"; } # ~124 GiB
|
||||
detect_vram_total() { echo "536870912"; } # 0.5 GiB
|
||||
run recommended_gttsize_mib
|
||||
# ~124.5 GiB total → integer: 124 GiB → 120 GiB GTT → 122880 MiB
|
||||
assert_output "122880"
|
||||
}
|
||||
|
||||
@test "recommended_gttsize_mib: tiny system (2 GiB) floors at 1 GiB" {
|
||||
detect_system_ram_kb() { echo "2097152"; }
|
||||
detect_vram_total() { echo "0"; }
|
||||
run recommended_gttsize_mib
|
||||
assert_output "1024"
|
||||
}
|
||||
|
||||
@test "recommended_gttsize_mib: zero RAM floors at 1 GiB" {
|
||||
detect_system_ram_kb() { echo "0"; }
|
||||
detect_vram_total() { echo "0"; }
|
||||
run recommended_gttsize_mib
|
||||
assert_output "1024"
|
||||
}
|
||||
|
||||
@test "recommended_gttsize_mib: exactly 4 GiB system floors at 1 GiB" {
|
||||
detect_system_ram_kb() { echo "4194304"; } # 4 GiB
|
||||
detect_vram_total() { echo "0"; }
|
||||
run recommended_gttsize_mib
|
||||
# 4 - 4 = 0 → floored to 1 → 1024
|
||||
assert_output "1024"
|
||||
}
|
||||
|
||||
@test "recommended_gttsize_mib: 5 GiB system → 1 GiB GTT" {
|
||||
detect_system_ram_kb() { echo "5242880"; } # 5 GiB
|
||||
detect_vram_total() { echo "0"; }
|
||||
run recommended_gttsize_mib
|
||||
# 5 - 4 = 1 → 1024 MiB
|
||||
assert_output "1024"
|
||||
}
|
||||
|
||||
@test "recommended_pages_limit: matches gttsize * 256" {
|
||||
detect_system_ram_kb() { echo "33554432"; }
|
||||
detect_vram_total() { echo "34359738368"; }
|
||||
local gttsize
|
||||
gttsize="$(recommended_gttsize_mib)"
|
||||
run recommended_pages_limit
|
||||
assert_output "$(( gttsize * 256 ))"
|
||||
}
|
||||
|
||||
# ── Firmware detection ───────────────────────────────────
|
||||
|
||||
@test "detect_firmware_bad: returns true for known bad version" {
|
||||
detect_firmware_version() { echo "20251125-1"; }
|
||||
run detect_firmware_bad
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "detect_firmware_bad: returns false for good version" {
|
||||
detect_firmware_version() { echo "20260309-1"; }
|
||||
run detect_firmware_bad
|
||||
assert_failure
|
||||
}
|
||||
|
||||
@test "detect_firmware_bad: returns false for empty version" {
|
||||
detect_firmware_version() { echo "unknown"; }
|
||||
run detect_firmware_bad
|
||||
assert_failure
|
||||
}
|
||||
|
||||
# ── Stack detection ──────────────────────────────────────
|
||||
|
||||
@test "detect_stack_ollama: reports missing when not installed" {
|
||||
is_cmd() { return 1; }
|
||||
run detect_stack_ollama
|
||||
assert_output "missing"
|
||||
}
|
||||
|
||||
@test "detect_stack_ollama: reports installed when available" {
|
||||
is_cmd() { [[ "$1" == "ollama" ]] && return 0 || return 1; }
|
||||
run detect_stack_ollama
|
||||
assert_output "installed"
|
||||
}
|
||||
|
||||
# ── detect_system_ram_kb ─────────────────────────────────
|
||||
|
||||
@test "detect_system_ram_kb: returns 0 on missing /proc/meminfo" {
|
||||
# Temporarily override to read from nonexistent file
|
||||
detect_system_ram_kb() {
|
||||
local kb
|
||||
kb="$(grep MemTotal /nonexistent/meminfo 2>/dev/null | awk '{print $2}')"
|
||||
echo "${kb:-0}"
|
||||
}
|
||||
run detect_system_ram_kb
|
||||
assert_output "0"
|
||||
}
|
||||
104
tests/format.bats
Normal file
104
tests/format.bats
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env bats
|
||||
# Tests for lib/format.sh — pure formatting functions
|
||||
|
||||
load test_helper.sh
|
||||
|
||||
setup() {
|
||||
source_lib common.sh
|
||||
source_lib format.sh
|
||||
}
|
||||
|
||||
# ── human_bytes ──────────────────────────────────────────
|
||||
|
||||
@test "human_bytes: 0 bytes" {
|
||||
run human_bytes 0
|
||||
assert_output "0 B"
|
||||
}
|
||||
|
||||
@test "human_bytes: small value (500 bytes)" {
|
||||
run human_bytes 500
|
||||
assert_output "500 B"
|
||||
}
|
||||
|
||||
@test "human_bytes: exactly 1 KiB" {
|
||||
run human_bytes 1024
|
||||
assert_output "1 KiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: exactly 1 MiB" {
|
||||
run human_bytes 1048576
|
||||
assert_output "1 MiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: exactly 1 GiB" {
|
||||
run human_bytes 1073741824
|
||||
assert_output "1.0 GiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: 32 GiB (typical VRAM)" {
|
||||
run human_bytes 34359738368
|
||||
assert_output "32.0 GiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: 512 MiB (optimal VRAM)" {
|
||||
run human_bytes 536870912
|
||||
assert_output "512 MiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: sub-GiB value formats with leading zero (0.5 GiB)" {
|
||||
# 0.5 GiB = 536870912 bytes — should NOT be "0.5 GiB" since < 1 GiB uses MiB
|
||||
run human_bytes 536870912
|
||||
assert_output "512 MiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: 1.5 GiB has correct decimal" {
|
||||
run human_bytes 1610612736
|
||||
assert_output "1.5 GiB"
|
||||
}
|
||||
|
||||
@test "human_bytes: empty/missing argument defaults to 0" {
|
||||
run human_bytes ""
|
||||
assert_output "0 B"
|
||||
}
|
||||
|
||||
@test "human_bytes: 64 GiB" {
|
||||
run human_bytes 68719476736
|
||||
assert_output "64.0 GiB"
|
||||
}
|
||||
|
||||
# ── human_mib ────────────────────────────────────────────
|
||||
|
||||
@test "human_mib: 0 MiB" {
|
||||
run human_mib 0
|
||||
assert_output "0 MiB"
|
||||
}
|
||||
|
||||
@test "human_mib: 512 MiB (stays as MiB)" {
|
||||
run human_mib 512
|
||||
assert_output "512 MiB"
|
||||
}
|
||||
|
||||
@test "human_mib: 1023 MiB (just under GiB threshold)" {
|
||||
run human_mib 1023
|
||||
assert_output "1023 MiB"
|
||||
}
|
||||
|
||||
@test "human_mib: 1024 MiB = 1.0 GiB" {
|
||||
run human_mib 1024
|
||||
assert_output "1.0 GiB"
|
||||
}
|
||||
|
||||
@test "human_mib: 60416 MiB (recommended GTT for 64GB system)" {
|
||||
run human_mib 60416
|
||||
assert_output "59.0 GiB"
|
||||
}
|
||||
|
||||
@test "human_mib: 61440 MiB = 60.0 GiB" {
|
||||
run human_mib 61440
|
||||
assert_output "60.0 GiB"
|
||||
}
|
||||
|
||||
@test "human_mib: empty argument defaults to 0" {
|
||||
run human_mib ""
|
||||
assert_output "0 MiB"
|
||||
}
|
||||
71
tests/log_metrics.bats
Normal file
71
tests/log_metrics.bats
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env bats
|
||||
# Tests for scripts/monitor/log-metrics.sh — metric collection
|
||||
|
||||
load test_helper.sh
|
||||
|
||||
setup() {
|
||||
source_lib common.sh
|
||||
setup_mock_sysfs
|
||||
OUTPUT_FILE="$(mktemp)"
|
||||
}
|
||||
|
||||
teardown() {
|
||||
teardown_mock_sysfs
|
||||
rm -f "$OUTPUT_FILE"
|
||||
}
|
||||
|
||||
@test "log-metrics: produces CSV with correct header" {
|
||||
run bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--duration 2 --interval 1 --output "$OUTPUT_FILE"
|
||||
assert_success
|
||||
head -1 "$OUTPUT_FILE" | grep -q "timestamp,gpu_busy_pct,vram_used_mib,gtt_used_mib,gpu_temp_c,gpu_power_w,cpu_pct,ram_used_mib"
|
||||
}
|
||||
|
||||
@test "log-metrics: produces at least 1 data row in 3 seconds" {
|
||||
bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--duration 3 --interval 1 --output "$OUTPUT_FILE" 2>/dev/null
|
||||
local lines
|
||||
lines=$(wc -l < "$OUTPUT_FILE")
|
||||
(( lines >= 2 )) # header + at least 1 data row
|
||||
}
|
||||
|
||||
@test "log-metrics: data rows have 8 comma-separated fields" {
|
||||
bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--duration 3 --interval 1 --output "$OUTPUT_FILE" 2>/dev/null
|
||||
# Check second line (first data row)
|
||||
local row
|
||||
row=$(sed -n '2p' "$OUTPUT_FILE")
|
||||
local field_count
|
||||
field_count=$(echo "$row" | awk -F, '{print NF}')
|
||||
[ "$field_count" -eq 8 ]
|
||||
}
|
||||
|
||||
@test "log-metrics: rejects non-numeric interval" {
|
||||
run bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--interval abc --duration 1 --output "$OUTPUT_FILE"
|
||||
assert_failure
|
||||
assert_output --partial "positive integer"
|
||||
}
|
||||
|
||||
@test "log-metrics: rejects non-numeric duration" {
|
||||
run bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--duration abc --interval 1 --output "$OUTPUT_FILE"
|
||||
assert_failure
|
||||
assert_output --partial "positive integer"
|
||||
}
|
||||
|
||||
@test "log-metrics: creates output file in specified path" {
|
||||
local custom_output
|
||||
custom_output="$(mktemp -d)/custom-metrics.csv"
|
||||
bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--duration 2 --interval 1 --output "$custom_output" 2>/dev/null
|
||||
[ -f "$custom_output" ]
|
||||
rm -f "$custom_output"
|
||||
rmdir "$(dirname "$custom_output")"
|
||||
}
|
||||
|
||||
@test "log-metrics: warns on unknown argument" {
|
||||
run bash "$PROJECT_ROOT/scripts/monitor/log-metrics.sh" \
|
||||
--bogus-flag --duration 1 --interval 1 --output "$OUTPUT_FILE"
|
||||
assert_output --partial "Unknown argument"
|
||||
}
|
||||
53
tests/test_helper.sh
Normal file
53
tests/test_helper.sh
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared test helper — sets up mocked environment for BATS tests
|
||||
|
||||
BATS_SUPPORT="${BATS_SUPPORT:-$HOME/.local/share/bats-support}"
|
||||
BATS_ASSERT="${BATS_ASSERT:-$HOME/.local/share/bats-assert}"
|
||||
BATS_FILE="${BATS_FILE:-$HOME/.local/share/bats-file}"
|
||||
|
||||
load "$BATS_SUPPORT/load.bash"
|
||||
load "$BATS_ASSERT/load.bash"
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BATS_TEST_FILENAME}")/.." && pwd)"
|
||||
FIXTURES_DIR="$PROJECT_ROOT/tests/fixtures"
|
||||
|
||||
# Create a temporary mock sysfs tree for GPU detection tests
|
||||
setup_mock_sysfs() {
|
||||
MOCK_SYSFS="$(mktemp -d)"
|
||||
local gpu_dir="$MOCK_SYSFS/class/drm/card0/device"
|
||||
mkdir -p "$gpu_dir/hwmon/hwmon0"
|
||||
|
||||
echo "0x1002" > "$gpu_dir/vendor"
|
||||
echo "0x1586" > "$gpu_dir/device"
|
||||
echo "${MOCK_VRAM_TOTAL:-536870912}" > "$gpu_dir/mem_info_vram_total"
|
||||
echo "${MOCK_VRAM_USED:-104857600}" > "$gpu_dir/mem_info_vram_used"
|
||||
echo "${MOCK_GTT_TOTAL:-64424509440}" > "$gpu_dir/mem_info_gtt_total"
|
||||
echo "${MOCK_GTT_USED:-209715200}" > "$gpu_dir/mem_info_gtt_used"
|
||||
echo "${MOCK_GPU_BUSY:-5}" > "$gpu_dir/gpu_busy_percent"
|
||||
echo "${MOCK_GPU_TEMP:-50000}" > "$gpu_dir/hwmon/hwmon0/temp1_input"
|
||||
echo "${MOCK_GPU_POWER:-25000000}" > "$gpu_dir/hwmon/hwmon0/power1_average"
|
||||
|
||||
export GPU_SYSFS="$gpu_dir"
|
||||
}
|
||||
|
||||
teardown_mock_sysfs() {
|
||||
[[ -n "${MOCK_SYSFS:-}" ]] && rm -rf "$MOCK_SYSFS"
|
||||
}
|
||||
|
||||
# Create a mock /proc/cmdline
|
||||
setup_mock_cmdline() {
|
||||
MOCK_PROC="$(mktemp -d)"
|
||||
echo "${MOCK_CMDLINE:-BOOT_IMAGE=/boot/vmlinuz root=UUID=abc ro quiet}" > "$MOCK_PROC/cmdline"
|
||||
}
|
||||
|
||||
teardown_mock_proc() {
|
||||
[[ -n "${MOCK_PROC:-}" ]] && rm -rf "$MOCK_PROC"
|
||||
}
|
||||
|
||||
# Source a lib file with PROJECT_ROOT set correctly
|
||||
source_lib() {
|
||||
local lib="$1"
|
||||
# Disable color for predictable test output
|
||||
export TERM=dumb
|
||||
source "$PROJECT_ROOT/lib/$lib"
|
||||
}
|
||||
Reference in New Issue
Block a user