feat(test): add downstream acceptance and fault harness artifacts

This commit packages the standalone task-14 acceptance and task-15 fault-suite execution toolchain for downstream validation.

It includes all runnable harness scripts, helper utilities, and generated evidence captures so downstream behavior can be reproduced and reviewed independently from docs and core implementation.

Bundling these assets separately allows QA/automation workflows to validate runtime changes without dragging operational notes or release-gate documentation into the same review unit.

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
2026-03-05 20:32:12 +08:00
parent 56e874ab6d
commit 991f7ded34
6 changed files with 1894 additions and 0 deletions
+384
View File
@@ -0,0 +1,384 @@
#!/usr/bin/env bash
set -u -o pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
STREAMER_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
WORKTREE_ROOT="$(cd "${STREAMER_ROOT}/../.." && pwd)"
BUILD_DIR="${STREAMER_ROOT}/build"
EVIDENCE_ROOT="${WORKTREE_ROOT}/.sisyphus/evidence"
TASK_EVIDENCE_DIR="${EVIDENCE_ROOT}/task-14-acceptance"
SUMMARY_HELPER="${SCRIPT_DIR}/acceptance_summary_helper.py"
RUN_ID=""
RUN_DIR=""
MANIFEST_TSV="${RUN_DIR}/rows.tsv"
SUMMARY_JSON="${RUN_DIR}/summary.json"
LATEST_SUMMARY_JSON="${EVIDENCE_ROOT}/task-14-acceptance-summary.json"
EVIDENCE_TEXT="${EVIDENCE_ROOT}/task-14-acceptance.txt"
STARTED_AT_UTC="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
mkdir -p "${TASK_EVIDENCE_DIR}"
allocate_run_dir() {
local attempts=0
while (( attempts < 50 )); do
local candidate_id
candidate_id="$(date +"%Y%m%dT%H%M%S")-$(date +"%N")-p$$-$RANDOM"
local candidate_dir="${TASK_EVIDENCE_DIR}/${candidate_id}"
if mkdir "${candidate_dir}" 2>/dev/null; then
RUN_ID="${candidate_id}"
RUN_DIR="${candidate_dir}"
MANIFEST_TSV="${RUN_DIR}/rows.tsv"
SUMMARY_JSON="${RUN_DIR}/summary.json"
return 0
fi
attempts=$((attempts + 1))
sleep 0.01
done
echo "failed to allocate unique acceptance run directory" >&2
return 1
}
allocate_run_dir || exit 1
echo -e "order\trow_id\tname\tprotocol\tcodec\trtmp_mode\tstatus\treason\tduration_ms\tsim_rc\tstreamer_rc\ttester_rc\tsim_log\tstreamer_log\ttester_log\tsdp_path" > "${MANIFEST_TSV}"
cleanup_pids=()
cleanup_all() {
for pid in "${cleanup_pids[@]:-}"; do
if [[ -n "${pid}" ]] && kill -0 "${pid}" 2>/dev/null; then
kill "${pid}" 2>/dev/null || true
fi
done
}
trap cleanup_all EXIT
binary_exists() {
local path="$1"
[[ -x "${path}" ]]
}
wait_pid() {
local pid="$1"
local timeout_s="$2"
local elapsed=0
while kill -0 "${pid}" 2>/dev/null; do
if (( elapsed >= timeout_s )); then
kill "${pid}" 2>/dev/null || true
wait "${pid}" 2>/dev/null || true
return 124
fi
sleep 1
elapsed=$((elapsed + 1))
done
wait "${pid}" 2>/dev/null
return $?
}
append_manifest_row() {
local order="$1"
local row_id="$2"
local name="$3"
local protocol="$4"
local codec="$5"
local rtmp_mode="$6"
local status="$7"
local reason="$8"
local duration_ms="$9"
local sim_rc="${10}"
local streamer_rc="${11}"
local tester_rc="${12}"
local sim_log="${13}"
local streamer_log="${14}"
local tester_log="${15}"
local sdp_path="${16}"
echo -e "${order}\t${row_id}\t${name}\t${protocol}\t${codec}\t${rtmp_mode}\t${status}\t${reason}\t${duration_ms}\t${sim_rc}\t${streamer_rc}\t${tester_rc}\t${sim_log}\t${streamer_log}\t${tester_log}\t${sdp_path}" >> "${MANIFEST_TSV}"
}
run_matrix_row() {
local order="$1"
local row_id="$2"
local name="$3"
local protocol="$4"
local codec="$5"
local rtmp_mode="$6"
local row_dir="${RUN_DIR}/${order}-${row_id}"
mkdir -p "${row_dir}"
local sim_log="${row_dir}/sim.log"
local streamer_log="${row_dir}/streamer.log"
local tester_log="${row_dir}/tester.log"
local sdp_path=""
local shm_name="cvmmap_accept_${row_id}_${RUN_ID}"
local zmq_endpoint="ipc:///tmp/cvmmap_accept_${row_id}_${RUN_ID}.ipc"
local sim_label="acc_${order}_${protocol}_${codec}"
local sim_cmd=(
"${BUILD_DIR}/cvmmap_sim"
--shm-name "${shm_name}"
--zmq-endpoint "${zmq_endpoint}"
--label "${sim_label}"
--frames 320
--fps 200
--width 640
--height 360
)
local streamer_cmd=(
"${BUILD_DIR}/cvmmap_streamer"
--run-mode pipeline
--codec "${codec}"
--shm-name "${shm_name}"
--zmq-endpoint "${zmq_endpoint}"
--queue-size 1
--gop 30
--b-frames 0
--ingest-max-frames 120
--ingest-idle-timeout-ms 6000
)
local tester_cmd=()
if [[ "${protocol}" == "rtp" ]]; then
local rtp_port
local payload_type
if [[ "${codec}" == "h264" ]]; then
rtp_port=51040
payload_type=96
else
rtp_port=51042
payload_type=98
fi
sdp_path="${row_dir}/stream.sdp"
streamer_cmd+=(
--rtp
--rtp-endpoint "127.0.0.1:${rtp_port}"
--rtp-payload-type "${payload_type}"
--rtp-sdp "${sdp_path}"
)
tester_cmd=(
"${BUILD_DIR}/rtp_receiver_tester"
--port "${rtp_port}"
--expect-pt "${payload_type}"
--packet-threshold 1
--timeout-ms 10000
)
else
local rtmp_port
local tester_mode
case "${row_id}" in
rtmp_h264)
rtmp_port=19360
tester_mode="h264"
;;
rtmp_h265_enhanced)
rtmp_port=19362
tester_mode="h265-enhanced"
;;
rtmp_h265_domestic)
rtmp_port=19364
tester_mode="h265-domestic"
;;
*)
rtmp_port=19366
tester_mode="h264"
;;
esac
streamer_cmd+=(
--rtmp
--rtmp-url "rtmp://127.0.0.1:${rtmp_port}/live/${row_id}"
--rtmp-mode "${rtmp_mode}"
)
tester_cmd=(
"${BUILD_DIR}/rtmp_stub_tester"
--mode "${tester_mode}"
--listen-host 127.0.0.1
--listen-port "${rtmp_port}"
--video-threshold 1
--timeout-ms 10000
)
fi
local row_start_ms row_end_ms duration_ms
row_start_ms="$(date +%s%3N)"
"${tester_cmd[@]}" > "${tester_log}" 2>&1 &
local tester_pid=$!
cleanup_pids+=("${tester_pid}")
sleep 1
"${sim_cmd[@]}" > "${sim_log}" 2>&1 &
local sim_pid=$!
cleanup_pids+=("${sim_pid}")
sleep 1
"${streamer_cmd[@]}" > "${streamer_log}" 2>&1
local streamer_rc=$?
wait_pid "${tester_pid}" 15
local tester_rc=$?
wait_pid "${sim_pid}" 15
local sim_rc=$?
row_end_ms="$(date +%s%3N)"
duration_ms=$((row_end_ms - row_start_ms))
local status="PASS"
local reason="all-processes-ok"
if (( sim_rc != 0 || streamer_rc != 0 || tester_rc != 0 )); then
status="FAIL"
reason="sim_rc=${sim_rc},streamer_rc=${streamer_rc},tester_rc=${tester_rc}"
fi
append_manifest_row \
"${order}" \
"${row_id}" \
"${name}" \
"${protocol}" \
"${codec}" \
"${rtmp_mode}" \
"${status}" \
"${reason}" \
"${duration_ms}" \
"${sim_rc}" \
"${streamer_rc}" \
"${tester_rc}" \
"${sim_log}" \
"${streamer_log}" \
"${tester_log}" \
"${sdp_path}"
printf "[%s] %s => %s (%s)\n" "${row_id}" "${name}" "${status}" "${reason}"
}
main() {
local required=(
"${BUILD_DIR}/cvmmap_sim"
"${BUILD_DIR}/cvmmap_streamer"
"${BUILD_DIR}/rtp_receiver_tester"
"${BUILD_DIR}/rtmp_stub_tester"
)
local missing=()
for bin in "${required[@]}"; do
if ! binary_exists "${bin}"; then
missing+=("${bin}")
fi
done
if (( ${#missing[@]} > 0 )); then
for idx in 1 2 3 4 5; do
append_manifest_row \
"${idx}" \
"preflight_${idx}" \
"preflight missing binary" \
"preflight" \
"n/a" \
"" \
"SKIP" \
"missing binaries: ${missing[*]}" \
"0" \
"-1" \
"-1" \
"-1" \
"" \
"" \
"" \
""
done
else
run_matrix_row 1 "rtp_h264" "RTP + H.264" "rtp" "h264" ""
run_matrix_row 2 "rtp_h265" "RTP + H.265" "rtp" "h265" ""
run_matrix_row 3 "rtmp_h264" "RTMP + H.264" "rtmp" "h264" "enhanced"
run_matrix_row 4 "rtmp_h265_enhanced" "RTMP + H.265 enhanced" "rtmp" "h265" "enhanced"
run_matrix_row 5 "rtmp_h265_domestic" "RTMP + H.265 domestic" "rtmp" "h265" "domestic"
fi
local finished_at_utc
finished_at_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
python3 "${SUMMARY_HELPER}" \
--manifest "${MANIFEST_TSV}" \
--output "${SUMMARY_JSON}" \
--run-id "${RUN_ID}" \
--run-dir "${RUN_DIR}" \
--started-at "${STARTED_AT_UTC}" \
--finished-at "${finished_at_utc}"
local summary_rc=$?
cp -f "${SUMMARY_JSON}" "${LATEST_SUMMARY_JSON}" 2>/dev/null || true
{
echo "task=14"
echo "run_id=${RUN_ID}"
echo "run_dir=${RUN_DIR}"
echo "manifest=${MANIFEST_TSV}"
echo "summary_json=${SUMMARY_JSON}"
echo "latest_summary_json=${LATEST_SUMMARY_JSON}"
echo "started_at=${STARTED_AT_UTC}"
echo "finished_at=${finished_at_utc}"
} > "${EVIDENCE_TEXT}"
if (( summary_rc != 0 )); then
echo "summary helper failed with rc=${summary_rc}" >&2
return 1
fi
local pass_count fail_count skip_count total_count
pass_count="$(python3 - <<'PY' "${SUMMARY_JSON}"
import json
import sys
data = json.load(open(sys.argv[1], "r", encoding="utf-8"))
counts = data.get("counts", {})
print(counts.get("pass", 0))
PY
)"
fail_count="$(python3 - <<'PY' "${SUMMARY_JSON}"
import json
import sys
data = json.load(open(sys.argv[1], "r", encoding="utf-8"))
counts = data.get("counts", {})
print(counts.get("fail", 0))
PY
)"
skip_count="$(python3 - <<'PY' "${SUMMARY_JSON}"
import json
import sys
data = json.load(open(sys.argv[1], "r", encoding="utf-8"))
counts = data.get("counts", {})
print(counts.get("skip", 0))
PY
)"
total_count="$(python3 - <<'PY' "${SUMMARY_JSON}"
import json
import sys
data = json.load(open(sys.argv[1], "r", encoding="utf-8"))
counts = data.get("counts", {})
print(counts.get("total", 0))
PY
)"
echo "summary: total=${total_count} pass=${pass_count} fail=${fail_count} skip=${skip_count}"
echo "json: ${SUMMARY_JSON}"
if [[ "${total_count}" == "5" && "${pass_count}" == "5" && "${fail_count}" == "0" && "${skip_count}" == "0" ]]; then
return 0
fi
return 1
}
main "$@"