refactor(streamer): remove gstreamer and legacy rtmp paths

This commit is contained in:
2026-03-11 16:43:29 +08:00
parent ed3f32ff6e
commit 782af9481c
22 changed files with 817 additions and 3339 deletions
+22 -73
View File
@@ -14,10 +14,6 @@
#include <utility>
#include <vector>
#ifndef CVMMAP_STREAMER_HAS_GSTREAMER
#define CVMMAP_STREAMER_HAS_GSTREAMER 0
#endif
namespace cvmmap_streamer {
namespace {
@@ -36,10 +32,16 @@ std::string trim_copy(std::string value) {
}
std::string normalize_cli_error(std::string raw_message) {
if (raw_message.find("The following argument was not expected:") != std::string::npos) {
if (
raw_message.find("The following argument was not expected:") != std::string::npos ||
raw_message.find("The following arguments were not expected:") != std::string::npos) {
const auto pos = raw_message.find(':');
if (pos != std::string::npos && pos + 1 < raw_message.size()) {
return "unknown argument: " + trim_copy(raw_message.substr(pos + 1));
const auto argument = trim_copy(raw_message.substr(pos + 1));
if (argument.rfind("--rtmp-mode", 0) == 0) {
return "unknown argument: --rtmp-mode (removed; RTMP always uses enhanced mode)";
}
return "unknown argument: " + argument;
}
return "unknown argument";
}
@@ -113,16 +115,6 @@ std::expected<RunMode, std::string> parse_run_mode(std::string_view raw) {
return std::unexpected("invalid run mode: '" + std::string(raw) + "' (expected: pipeline|ingest)");
}
std::expected<RtmpMode, std::string> parse_rtmp_mode(std::string_view raw) {
if (raw == "enhanced") {
return RtmpMode::Enhanced;
}
if (raw == "domestic") {
return RtmpMode::Domestic;
}
return std::unexpected("invalid rtmp mode: '" + std::string(raw) + "' (expected: enhanced|domestic)");
}
std::expected<RtmpTransportType, std::string> parse_rtmp_transport(std::string_view raw) {
if (raw == "libavformat") {
return RtmpTransportType::Libavformat;
@@ -131,10 +123,10 @@ std::expected<RtmpTransportType, std::string> parse_rtmp_transport(std::string_v
return RtmpTransportType::FfmpegProcess;
}
if (raw == "legacy_custom" || raw == "legacy-custom") {
return RtmpTransportType::LegacyCustom;
return std::unexpected(
"invalid rtmp transport: '" + std::string(raw) + "' was removed; use libavformat or ffmpeg_process");
}
return std::unexpected(
"invalid rtmp transport: '" + std::string(raw) + "' (expected: libavformat|ffmpeg_process|legacy_custom)");
return std::unexpected("invalid rtmp transport: '" + std::string(raw) + "' (expected: libavformat|ffmpeg_process)");
}
std::expected<EncoderBackendType, std::string> parse_encoder_backend(std::string_view raw) {
@@ -145,9 +137,9 @@ std::expected<EncoderBackendType, std::string> parse_encoder_backend(std::string
return EncoderBackendType::FFmpeg;
}
if (raw == "gstreamer_legacy" || raw == "gstreamer-legacy") {
return EncoderBackendType::GStreamerLegacy;
return std::unexpected("invalid encoder backend: '" + std::string(raw) + "' was removed; use ffmpeg");
}
return std::unexpected("invalid encoder backend: '" + std::string(raw) + "' (expected: auto|ffmpeg|gstreamer_legacy)");
return std::unexpected("invalid encoder backend: '" + std::string(raw) + "' (expected: auto|ffmpeg)");
}
std::expected<EncoderDeviceType, std::string> parse_encoder_device(std::string_view raw) {
@@ -352,13 +344,9 @@ std::expected<void, std::string> apply_toml_file(RuntimeConfig &config, const st
if (auto value = toml_value<std::string>(table, "outputs.rtmp.ffmpeg_path")) {
config.outputs.rtmp.ffmpeg_path = *value;
}
if (auto value = toml_value<std::string>(table, "outputs.rtmp.mode")) {
auto parsed = parse_rtmp_mode(*value);
if (!parsed) {
return std::unexpected(parsed.error());
if (auto value = toml_value<std::string>(table, "outputs.rtmp.mode")) {
return std::unexpected("invalid RTMP config: outputs.rtmp.mode was removed; RTMP always uses enhanced mode");
}
config.outputs.rtmp.mode = *parsed;
}
if (auto value = toml_value<bool>(table, "record.mcap.enabled")) {
config.record.mcap.enabled = *value;
}
@@ -502,8 +490,6 @@ std::string_view to_string(RtmpMode mode) {
switch (mode) {
case RtmpMode::Enhanced:
return "enhanced";
case RtmpMode::Domestic:
return "domestic";
}
return "unknown";
}
@@ -514,8 +500,6 @@ std::string_view to_string(RtmpTransportType transport) {
return "libavformat";
case RtmpTransportType::FfmpegProcess:
return "ffmpeg_process";
case RtmpTransportType::LegacyCustom:
return "legacy_custom";
}
return "unknown";
}
@@ -526,8 +510,6 @@ std::string_view to_string(EncoderBackendType backend) {
return "auto";
case EncoderBackendType::FFmpeg:
return "ffmpeg";
case EncoderBackendType::GStreamerLegacy:
return "gstreamer_legacy";
}
return "unknown";
}
@@ -565,7 +547,6 @@ std::expected<RuntimeConfig, std::string> parse_runtime_config(int argc, char **
std::string codec_raw{};
std::string encoder_backend_raw{};
std::string encoder_device_raw{};
std::string rtmp_mode_raw{};
std::string rtmp_transport_raw{};
std::string rtmp_ffmpeg_path_raw{};
std::vector<std::string> rtmp_urls_raw{};
@@ -605,7 +586,6 @@ std::expected<RuntimeConfig, std::string> parse_runtime_config(int argc, char **
app.add_option("--rtmp-url", rtmp_urls_raw);
app.add_option("--rtmp-transport", rtmp_transport_raw);
app.add_option("--rtmp-ffmpeg", rtmp_ffmpeg_path_raw);
app.add_option("--rtmp-mode", rtmp_mode_raw);
app.add_flag("--rtp", rtp_enabled);
app.add_option("--rtp-endpoint", rtp_endpoint_raw);
app.add_option("--rtp-payload-type", rtp_payload_type_raw);
@@ -692,14 +672,6 @@ std::expected<RuntimeConfig, std::string> parse_runtime_config(int argc, char **
if (!rtmp_ffmpeg_path_raw.empty()) {
config.outputs.rtmp.ffmpeg_path = rtmp_ffmpeg_path_raw;
}
if (!rtmp_mode_raw.empty()) {
auto parsed = parse_rtmp_mode(rtmp_mode_raw);
if (!parsed) {
return std::unexpected(parsed.error());
}
config.outputs.rtmp.mode = *parsed;
}
config.outputs.rtp.enabled = config.outputs.rtp.enabled || rtp_enabled;
if (!rtp_endpoint_raw.empty()) {
config.outputs.rtp.enabled = true;
@@ -831,27 +803,14 @@ std::expected<void, std::string> validate_runtime_config(const RuntimeConfig &co
return std::unexpected("invalid RTMP config: URL must not be empty");
}
}
if (config.encoder.backend == EncoderBackendType::GStreamerLegacy && config.record.mcap.enabled) {
return std::unexpected("invalid backend/output matrix: MCAP recording requires the ffmpeg encoded access-unit path");
}
if (config.outputs.rtmp.enabled) {
if (config.outputs.rtmp.transport == RtmpTransportType::LegacyCustom) {
if (config.outputs.rtmp.mode == RtmpMode::Domestic && config.encoder.codec != CodecType::H265) {
return std::unexpected("invalid mode matrix: domestic RTMP mode requires codec h265");
}
if (config.encoder.backend != EncoderBackendType::GStreamerLegacy) {
return std::unexpected("invalid backend/output matrix: legacy_custom RTMP requires encoder.backend=gstreamer_legacy");
}
} else {
if (config.outputs.rtmp.mode != RtmpMode::Enhanced) {
return std::unexpected("invalid RTMP config: non-legacy RTMP transports only support rtmp.mode=enhanced");
}
if (config.encoder.backend != EncoderBackendType::FFmpeg) {
return std::unexpected("invalid backend/output matrix: RTMP transports libavformat and ffmpeg_process require encoder.backend=ffmpeg");
}
if (config.outputs.rtmp.transport == RtmpTransportType::FfmpegProcess && config.outputs.rtmp.ffmpeg_path.empty()) {
return std::unexpected("invalid RTMP config: ffmpeg_process transport requires a non-empty ffmpeg path");
}
if (config.encoder.backend == EncoderBackendType::Auto) {
// auto resolves to FFmpeg; nothing else is supported.
} else if (config.encoder.backend != EncoderBackendType::FFmpeg) {
return std::unexpected("invalid backend/output matrix: RTMP requires encoder.backend=ffmpeg or auto");
}
if (config.outputs.rtmp.transport == RtmpTransportType::FfmpegProcess && config.outputs.rtmp.ffmpeg_path.empty()) {
return std::unexpected("invalid RTMP config: ffmpeg_process transport requires a non-empty ffmpeg path");
}
}
@@ -891,15 +850,6 @@ std::expected<void, std::string> validate_runtime_config(const RuntimeConfig &co
return std::unexpected("invalid ingest config: ingest_idle_timeout_ms must be >= 1");
}
#if !CVMMAP_STREAMER_HAS_GSTREAMER
if (config.encoder.backend == EncoderBackendType::GStreamerLegacy) {
return std::unexpected("invalid backend config: gstreamer_legacy backend requested but GStreamer support is not compiled");
}
if (config.outputs.rtmp.enabled && config.outputs.rtmp.transport == RtmpTransportType::LegacyCustom) {
return std::unexpected("invalid output config: legacy_custom RTMP requires GStreamer support, which is not compiled");
}
#endif
return {};
}
@@ -914,7 +864,6 @@ std::string summarize_runtime_config(const RuntimeConfig &config) {
ss << ", encoder.b_frames=" << config.encoder.b_frames;
ss << ", rtmp.enabled=" << (config.outputs.rtmp.enabled ? "true" : "false");
ss << ", rtmp.transport=" << to_string(config.outputs.rtmp.transport);
ss << ", rtmp.mode=" << to_string(config.outputs.rtmp.mode);
ss << ", rtmp.urls=" << config.outputs.rtmp.urls.size();
ss << ", rtp.enabled=" << (config.outputs.rtp.enabled ? "true" : "false");
ss << ", rtp.endpoint=" << (config.outputs.rtp.endpoint ? *config.outputs.rtp.endpoint : "<unset>");
-18
View File
@@ -3,29 +3,11 @@
namespace cvmmap_streamer::encode {
EncoderBackend make_ffmpeg_backend();
EncoderBackend make_gstreamer_legacy_backend();
Result<EncoderBackend> make_encoder_backend(const RuntimeConfig &config) {
switch (config.encoder.backend) {
case EncoderBackendType::FFmpeg:
return make_ffmpeg_backend();
case EncoderBackendType::GStreamerLegacy: {
auto backend = make_gstreamer_legacy_backend();
if (!backend) {
return unexpected_error(ERR_BACKEND_UNAVAILABLE, "legacy GStreamer backend is not compiled in this build");
}
return backend;
}
case EncoderBackendType::Auto:
if (config.outputs.rtmp.enabled && config.outputs.rtmp.transport == RtmpTransportType::LegacyCustom) {
auto backend = make_gstreamer_legacy_backend();
if (!backend) {
return unexpected_error(
ERR_BACKEND_UNAVAILABLE,
"legacy_custom RTMP requires the GStreamer backend, but it is not compiled");
}
return backend;
}
return make_ffmpeg_backend();
}
-457
View File
@@ -1,457 +0,0 @@
#include "cvmmap_streamer/encode/encoder_backend.hpp"
#include <array>
#include <cstring>
#include <mutex>
#include <optional>
#include <string>
#include <string_view>
#include <utility>
#include <vector>
#include <spdlog/spdlog.h>
#if __has_include(<gst/app/gstappsink.h>) && __has_include(<gst/app/gstappsrc.h>) && __has_include(<gst/gst.h>)
#define CVMMAP_STREAMER_HAS_GSTREAMER 1
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
#include <gst/gst.h>
#else
#define CVMMAP_STREAMER_HAS_GSTREAMER 0
#endif
namespace cvmmap_streamer::encode {
namespace {
#if CVMMAP_STREAMER_HAS_GSTREAMER
[[nodiscard]]
Result<const char *> pixel_format_to_caps(ipc::PixelFormat format) {
switch (format) {
case ipc::PixelFormat::BGR:
return "BGR";
case ipc::PixelFormat::RGB:
return "RGB";
case ipc::PixelFormat::BGRA:
return "BGRA";
case ipc::PixelFormat::RGBA:
return "RGBA";
case ipc::PixelFormat::GRAY:
return "GRAY8";
default:
return unexpected_error(ERR_UNSUPPORTED, "unsupported raw pixel format for legacy GStreamer backend");
}
}
void ensure_gst_initialized() {
static std::once_flag gst_init_flag;
std::call_once(gst_init_flag, []() {
gst_init(nullptr, nullptr);
spdlog::info("GStreamer initialized: {}", gst_version_string());
});
}
[[nodiscard]]
std::string selected_parser_name(CodecType codec) {
return codec == CodecType::H265 ? "h265parse" : "h264parse";
}
struct EncoderChoice {
std::string encoder_name;
std::string parser_name;
bool is_nvenc{false};
};
[[nodiscard]]
std::vector<std::string_view> encoder_candidates(CodecType codec, bool prefer_nvenc) {
if (codec == CodecType::H265) {
if (prefer_nvenc) {
return {"nvh265enc", "x265enc", "avenc_libx265"};
}
return {"x265enc", "avenc_libx265", "nvh265enc"};
}
if (prefer_nvenc) {
return {"nvh264enc", "x264enc", "openh264enc", "avenc_h264"};
}
return {"x264enc", "openh264enc", "avenc_h264", "nvh264enc"};
}
[[nodiscard]]
Result<EncoderChoice> pick_encoder_choice(CodecType codec, bool prefer_nvenc) {
const std::string parser_name = selected_parser_name(codec);
if (gst_element_factory_find(parser_name.c_str()) == nullptr) {
return unexpected_error(
ERR_BACKEND_UNAVAILABLE,
"required GStreamer parser element '" + parser_name + "' is unavailable");
}
for (const auto candidate : encoder_candidates(codec, prefer_nvenc)) {
if (gst_element_factory_find(candidate.data()) == nullptr) {
continue;
}
EncoderChoice choice{};
choice.encoder_name = std::string(candidate);
choice.parser_name = parser_name;
choice.is_nvenc = choice.encoder_name.starts_with("nvh");
return choice;
}
return unexpected_error(ERR_BACKEND_UNAVAILABLE, "no usable GStreamer encoder available");
}
[[nodiscard]]
std::string encoder_input_format(const std::string &encoder_name) {
if (encoder_name == "x265enc" || encoder_name == "openh264enc") {
return "I420";
}
return "NV12";
}
[[nodiscard]]
bool has_property(GObject *object, const char *name) {
if (object == nullptr || name == nullptr) {
return false;
}
return g_object_class_find_property(G_OBJECT_GET_CLASS(object), name) != nullptr;
}
[[nodiscard]]
bool set_property_arg_if_exists(GObject *object, const char *name, const std::string &value) {
if (!has_property(object, name)) {
return false;
}
gst_util_set_object_arg(object, name, value.c_str());
return true;
}
class GstreamerLegacyBackend {
public:
GstreamerLegacyBackend() = default;
~GstreamerLegacyBackend() {
shutdown();
}
[[nodiscard]]
std::string_view backend_name() const {
return "gstreamer_legacy";
}
[[nodiscard]]
bool using_hardware() const {
return using_hardware_;
}
[[nodiscard]]
Status init(const RuntimeConfig &config, const ipc::FrameInfo &frame_info) {
shutdown();
config_ = &config;
frame_info_ = frame_info;
ensure_gst_initialized();
bool prefer_nvenc = config.encoder.device != EncoderDeviceType::Software;
auto encoder_choice = pick_encoder_choice(config.encoder.codec, prefer_nvenc);
if (!encoder_choice && prefer_nvenc && config.encoder.device == EncoderDeviceType::Auto) {
encoder_choice = pick_encoder_choice(config.encoder.codec, false);
}
if (!encoder_choice) {
return std::unexpected(encoder_choice.error());
}
using_hardware_ = encoder_choice->is_nvenc;
active_encoder_name_ = encoder_choice->encoder_name;
active_parser_name_ = encoder_choice->parser_name;
auto pixel_format = pixel_format_to_caps(frame_info.pixel_format);
if (!pixel_format) {
return std::unexpected(pixel_format.error());
}
const std::string codec_caps =
config.encoder.codec == CodecType::H265
? "video/x-h265,stream-format=byte-stream,alignment=au"
: "video/x-h264,stream-format=byte-stream,alignment=au";
const std::string pipeline_desc =
std::string("appsrc name=ingest_src is-live=true format=time do-timestamp=true block=false ") +
"! queue leaky=downstream max-size-buffers=1 max-size-bytes=0 max-size-time=0 " +
"! videoconvert " +
"! video/x-raw,format=" + encoder_input_format(active_encoder_name_) + " " +
"! " + active_encoder_name_ + " name=encoder " +
"! " + active_parser_name_ + " name=parser config-interval=-1 disable-passthrough=true " +
"! " + codec_caps + " " +
"! appsink name=encoded_sink emit-signals=false sync=false drop=true max-buffers=1";
GError *error = nullptr;
pipeline_ = gst_parse_launch(pipeline_desc.c_str(), &error);
if (error != nullptr) {
const std::string message = "failed to create GStreamer pipeline: " + std::string(error->message);
g_error_free(error);
return unexpected_error(ERR_EXTERNAL_LIBRARY, message);
}
if (pipeline_ == nullptr) {
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to create GStreamer pipeline");
}
appsrc_ = gst_bin_get_by_name(GST_BIN(pipeline_), "ingest_src");
appsink_ = gst_bin_get_by_name(GST_BIN(pipeline_), "encoded_sink");
encoder_ = gst_bin_get_by_name(GST_BIN(pipeline_), "encoder");
if (appsrc_ == nullptr || appsink_ == nullptr || encoder_ == nullptr) {
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to locate GStreamer pipeline elements");
}
const auto caps_string =
"video/x-raw,format=(string)" +
std::string(*pixel_format) +
",width=(int)" +
std::to_string(frame_info.width) +
",height=(int)" +
std::to_string(frame_info.height) +
",framerate=(fraction)30/1";
GstCaps *caps = gst_caps_from_string(caps_string.c_str());
if (caps == nullptr) {
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to create GStreamer caps: " + caps_string);
}
gst_app_src_set_caps(GST_APP_SRC(appsrc_), caps);
gst_caps_unref(caps);
gst_app_src_set_stream_type(GST_APP_SRC(appsrc_), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_max_buffers(GST_APP_SRC(appsrc_), 1);
(void)set_property_arg_if_exists(G_OBJECT(appsrc_), "leaky-type", "downstream");
(void)set_property_arg_if_exists(G_OBJECT(appsrc_), "block", "false");
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "bframes", std::to_string(config.encoder.b_frames));
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "rc-lookahead", "0");
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "lookahead", "0");
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "zerolatency", "true");
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "gop-size", std::to_string(config.encoder.gop));
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "iframeinterval", std::to_string(config.encoder.gop));
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "preset", "llhq");
(void)set_property_arg_if_exists(G_OBJECT(encoder_), "tune", "zerolatency");
bus_ = gst_element_get_bus(pipeline_);
if (gst_element_set_state(pipeline_, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to set GStreamer pipeline to PLAYING");
}
spdlog::info(
"ENCODER_PATH codec={} mode={} encoder={} backend=gstreamer_legacy",
to_string(config.encoder.codec),
using_hardware_ ? "hardware" : "software",
active_encoder_name_);
return {};
}
[[nodiscard]]
Result<EncodedStreamInfo> stream_info() const {
if (config_ == nullptr) {
return unexpected_error(ERR_NOT_READY, "legacy GStreamer backend stream info is unavailable before initialization");
}
EncodedStreamInfo info{};
info.codec = config_->encoder.codec;
info.width = frame_info_.width;
info.height = frame_info_.height;
return info;
}
[[nodiscard]]
Status poll() {
if (bus_ == nullptr) {
return {};
}
while (auto *message = gst_bus_pop_filtered(
bus_,
static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_WARNING))) {
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_WARNING) {
GError *warning = nullptr;
gchar *debug = nullptr;
gst_message_parse_warning(message, &warning, &debug);
spdlog::warn(
"legacy backend warning: {} ({})",
warning != nullptr ? warning->message : "unknown",
debug != nullptr ? debug : "no-debug");
if (warning != nullptr) {
g_error_free(warning);
}
if (debug != nullptr) {
g_free(debug);
}
gst_message_unref(message);
continue;
}
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
gst_message_unref(message);
return unexpected_error(ERR_END_OF_STREAM, "legacy backend reached EOS");
}
GError *error = nullptr;
gchar *debug = nullptr;
gst_message_parse_error(message, &error, &debug);
const std::string message_text =
"legacy backend error: " +
std::string(error != nullptr ? error->message : "unknown") +
" (" +
std::string(debug != nullptr ? debug : "no-debug") +
")";
if (error != nullptr) {
g_error_free(error);
}
if (debug != nullptr) {
g_free(debug);
}
gst_message_unref(message);
return unexpected_error(ERR_EXTERNAL_LIBRARY, message_text);
}
return {};
}
[[nodiscard]]
Status push_frame(const RawVideoFrame &frame) {
if (appsrc_ == nullptr) {
return unexpected_error(ERR_NOT_READY, "legacy backend appsrc is null");
}
auto *buffer = gst_buffer_new_allocate(nullptr, frame.bytes.size(), nullptr);
if (buffer == nullptr) {
return unexpected_error(ERR_ALLOCATION_FAILED, "failed to allocate GStreamer buffer");
}
GstMapInfo map{};
if (!gst_buffer_map(buffer, &map, GST_MAP_WRITE)) {
gst_buffer_unref(buffer);
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to map GStreamer buffer");
}
std::memcpy(map.data, frame.bytes.data(), frame.bytes.size());
gst_buffer_unmap(buffer, &map);
if (!first_source_timestamp_ns_) {
first_source_timestamp_ns_ = frame.source_timestamp_ns;
}
const auto pts_ns =
frame.source_timestamp_ns >= *first_source_timestamp_ns_
? frame.source_timestamp_ns - *first_source_timestamp_ns_
: 0ull;
GST_BUFFER_PTS(buffer) = static_cast<GstClockTime>(pts_ns);
GST_BUFFER_DTS(buffer) = static_cast<GstClockTime>(pts_ns);
const auto flow = gst_app_src_push_buffer(GST_APP_SRC(appsrc_), buffer);
if (flow != GST_FLOW_OK) {
return unexpected_error(
ERR_EXTERNAL_LIBRARY,
"legacy backend push failed with flow=" + std::to_string(static_cast<int>(flow)));
}
return {};
}
[[nodiscard]]
Result<std::vector<EncodedAccessUnit>> drain() {
return pull_samples();
}
[[nodiscard]]
Result<std::vector<EncodedAccessUnit>> flush() {
if (appsrc_ != nullptr) {
(void)gst_app_src_end_of_stream(GST_APP_SRC(appsrc_));
}
return pull_samples();
}
void shutdown() {
if (pipeline_ != nullptr) {
gst_element_set_state(pipeline_, GST_STATE_NULL);
}
if (bus_ != nullptr) {
gst_object_unref(bus_);
bus_ = nullptr;
}
if (appsrc_ != nullptr) {
gst_object_unref(appsrc_);
appsrc_ = nullptr;
}
if (appsink_ != nullptr) {
gst_object_unref(appsink_);
appsink_ = nullptr;
}
if (encoder_ != nullptr) {
gst_object_unref(encoder_);
encoder_ = nullptr;
}
if (pipeline_ != nullptr) {
gst_object_unref(pipeline_);
pipeline_ = nullptr;
}
active_encoder_name_.clear();
active_parser_name_.clear();
frame_info_ = ipc::FrameInfo{};
first_source_timestamp_ns_.reset();
using_hardware_ = false;
}
private:
[[nodiscard]]
Result<std::vector<EncodedAccessUnit>> pull_samples() {
std::vector<EncodedAccessUnit> access_units{};
if (appsink_ == nullptr || config_ == nullptr) {
return access_units;
}
while (auto *sample = gst_app_sink_try_pull_sample(GST_APP_SINK(appsink_), 0)) {
auto *buffer = gst_sample_get_buffer(sample);
if (buffer == nullptr) {
gst_sample_unref(sample);
continue;
}
GstMapInfo map{};
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
gst_sample_unref(sample);
return unexpected_error(ERR_EXTERNAL_LIBRARY, "failed to map legacy encoded buffer");
}
EncodedAccessUnit access_unit{};
access_unit.codec = config_->encoder.codec;
const auto pts = GST_BUFFER_PTS(buffer);
if (pts != GST_CLOCK_TIME_NONE) {
access_unit.stream_pts_ns = static_cast<std::uint64_t>(pts);
}
access_unit.source_timestamp_ns = first_source_timestamp_ns_.value_or(0) + access_unit.stream_pts_ns;
access_unit.keyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
access_unit.annexb_bytes.assign(map.data, map.data + map.size);
access_units.push_back(std::move(access_unit));
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
}
return access_units;
}
const RuntimeConfig *config_{nullptr};
ipc::FrameInfo frame_info_{};
GstElement *pipeline_{nullptr};
GstElement *appsrc_{nullptr};
GstElement *appsink_{nullptr};
GstElement *encoder_{nullptr};
GstBus *bus_{nullptr};
std::optional<std::uint64_t> first_source_timestamp_ns_{};
bool using_hardware_{false};
std::string active_encoder_name_{};
std::string active_parser_name_{};
};
#endif
}
EncoderBackend make_gstreamer_legacy_backend() {
#if CVMMAP_STREAMER_HAS_GSTREAMER
return pro::make_proxy<EncoderBackendFacade, GstreamerLegacyBackend>();
#else
return {};
#endif
}
}
+4 -5
View File
@@ -19,7 +19,7 @@ constexpr std::array<std::string_view, 32> kHelpLines{
" --input-uri <uri>\tcvmmap source URI (example: cvmmap://default)",
" --run-mode <mode>\tpipeline|ingest",
" --codec <codec>\th264|h265",
" --encoder-backend <backend>\tauto|ffmpeg|gstreamer_legacy",
" --encoder-backend <backend>\tauto|ffmpeg",
" --encoder-device <device>\tauto|nvidia|software",
" --gop <frames>\tencoder GOP length",
" --b-frames <count>\tencoder B-frame count",
@@ -29,10 +29,9 @@ constexpr std::array<std::string_view, 32> kHelpLines{
" --rtp-sdp <path>\twrite optional SDP sidecar",
" --rtmp\t\tenable RTMP output",
" --rtmp-url <url>\tadd RTMP destination (repeatable)",
" --rtmp-transport <mode>\tlibavformat|ffmpeg_process|legacy_custom",
" --rtmp-ffmpeg <path>\tffmpeg binary for ffmpeg_process transport",
" --rtmp-mode <mode>\tenhanced|domestic",
" --mcap\t\tenable MCAP recording",
" --rtmp-transport <mode>\tlibavformat|ffmpeg_process",
" --rtmp-ffmpeg <path>\tffmpeg binary for ffmpeg_process transport",
" --mcap\t\tenable MCAP recording",
" --mcap-path <path>\tMCAP output file",
" --mcap-topic <topic>\tMCAP topic name",
" --mcap-frame-id <id>\tFoxglove CompressedVideo frame_id",
-36
View File
@@ -1,7 +1,5 @@
#include "cvmmap_streamer/protocol/rtmp_output.hpp"
#include "cvmmap_streamer/protocol/rtmp_publisher.hpp"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
@@ -226,33 +224,6 @@ Status write_all(int fd, std::span<const std::uint8_t> bytes) {
return {};
}
class LegacyCustomRtmpOutput {
public:
explicit LegacyCustomRtmpOutput(RtmpPublisher &&publisher)
: publisher_(std::move(publisher)) {}
[[nodiscard]]
std::string_view backend_name() const {
return "legacy_custom";
}
[[nodiscard]]
Status publish_access_unit(const encode::EncodedAccessUnit &access_unit) {
auto publish = publisher_.publish_access_unit(access_unit.annexb_bytes, access_unit.stream_pts_ns);
if (!publish) {
return unexpected_error(ERR_PROTOCOL, publish.error());
}
return {};
}
void log_metrics() const {
publisher_.log_metrics();
}
private:
RtmpPublisher publisher_{};
};
class LibavformatRtmpOutput {
public:
struct Session {
@@ -681,13 +652,6 @@ Result<RtmpOutput> make_rtmp_output(
return LibavformatRtmpOutput::create(config, stream_info);
case RtmpTransportType::FfmpegProcess:
return FfmpegProcessRtmpOutput::create(config, stream_info);
case RtmpTransportType::LegacyCustom: {
auto publisher = RtmpPublisher::create(config);
if (!publisher) {
return unexpected_error(ERR_PROTOCOL, publisher.error());
}
return pro::make_proxy<RtmpOutputFacade, LegacyCustomRtmpOutput>(std::move(*publisher));
}
}
return unexpected_error(ERR_INTERNAL, "unknown RTMP transport");
File diff suppressed because it is too large Load Diff
+1 -8
View File
@@ -55,7 +55,7 @@ std::expected<Config, int> parse_args(int argc, char **argv) {
CLI::App app{"rtmp_output_tester - publish synthetic encoded video to RTMP using the configured sink"};
app.add_option("--rtmp-url", config.rtmp_url, "RTMP destination URL")->required();
app.add_option("--transport", config.transport, "RTMP transport backend (libavformat|ffmpeg_process)")
->check(CLI::IsMember({"libavformat", "ffmpeg_process", "legacy_custom"}));
->check(CLI::IsMember({"libavformat", "ffmpeg_process"}));
app.add_option("--codec", config.codec, "Video codec (h264|h265)")
->check(CLI::IsMember({"h264", "h265"}));
app.add_option("--encoder-device", config.encoder_device, "Encoder device (auto|nvidia|software)")
@@ -95,9 +95,6 @@ std::expected<cvmmap_streamer::RtmpTransportType, std::string> parse_transport(s
if (raw == "ffmpeg_process") {
return cvmmap_streamer::RtmpTransportType::FfmpegProcess;
}
if (raw == "legacy_custom") {
return cvmmap_streamer::RtmpTransportType::LegacyCustom;
}
return std::unexpected("unsupported transport");
}
@@ -163,10 +160,6 @@ int main(int argc, char **argv) {
config.outputs.rtmp.transport = *transport;
config.outputs.rtmp.ffmpeg_path = args->ffmpeg_path;
if (config.outputs.rtmp.transport == cvmmap_streamer::RtmpTransportType::LegacyCustom) {
config.encoder.backend = cvmmap_streamer::EncoderBackendType::GStreamerLegacy;
}
cvmmap_streamer::ipc::FrameInfo frame_info{
.width = static_cast<std::uint16_t>(args->width),
.height = static_cast<std::uint16_t>(args->height),
+3 -50
View File
@@ -61,14 +61,12 @@ enum class ExitCode : int {
enum class ExpectMode {
H264,
H265Enhanced,
H265Domestic,
};
enum class VideoSignal {
Unknown,
H264,
H265Enhanced,
H265Domestic,
};
struct Config {
@@ -97,7 +95,6 @@ struct Stats {
std::uint32_t h264_video_messages{0};
std::uint32_t h265_enhanced_video_messages{0};
std::uint32_t h265_domestic_video_messages{0};
std::uint32_t unknown_video_messages{0};
bool mode_mismatch{false};
@@ -184,8 +181,6 @@ std::string_view to_string(ExpectMode mode) {
return "h264";
case ExpectMode::H265Enhanced:
return "h265-enhanced";
case ExpectMode::H265Domestic:
return "h265-domestic";
default:
return "unknown";
}
@@ -198,8 +193,6 @@ std::string_view to_string(VideoSignal signal) {
return "h264";
case VideoSignal::H265Enhanced:
return "h265-enhanced";
case VideoSignal::H265Domestic:
return "h265-domestic";
case VideoSignal::Unknown:
default:
return "unknown";
@@ -214,11 +207,8 @@ std::expected<ExpectMode, std::string> parse_mode(std::string_view raw) {
if (raw == "h265-enhanced") {
return ExpectMode::H265Enhanced;
}
if (raw == "h265-domestic") {
return ExpectMode::H265Domestic;
}
return std::unexpected(std::format(
"invalid mode '{}'; expected: h264 | h265-enhanced | h265-domestic",
"invalid mode '{}'; expected: h264 | h265-enhanced",
raw));
}
@@ -227,7 +217,7 @@ std::expected<Config, std::string> parse_args(int argc, char **argv) {
Config config;
std::string mode_raw;
std::string self_test_send_mode_raw;
const std::vector<std::string> accepted_modes{"h264", "h265-enhanced", "h265-domestic"};
const std::vector<std::string> accepted_modes{"h264", "h265-enhanced"};
CLI::App app{"rtmp_stub_tester - standalone RTMP ingest validator"};
app.allow_extras(false);
@@ -1176,9 +1166,6 @@ VideoSignal classify_video_packet(std::span<const std::uint8_t> payload) {
if (codec_id == 7) {
return VideoSignal::H264;
}
if (codec_id == 12) {
return VideoSignal::H265Domestic;
}
if ((first & 0x80) != 0 && payload.size() >= 5) {
const std::array<std::uint8_t, 4> hvc1{'h', 'v', 'c', '1'};
@@ -1206,9 +1193,6 @@ void update_mode_stats(
case VideoSignal::H265Enhanced:
stats.h265_enhanced_video_messages++;
break;
case VideoSignal::H265Domestic:
stats.h265_domestic_video_messages++;
break;
case VideoSignal::Unknown:
default:
stats.unknown_video_messages++;
@@ -1226,9 +1210,6 @@ void update_mode_stats(
if (expected == ExpectMode::H265Enhanced && actual != VideoSignal::H265Enhanced) {
mismatch = true;
}
if (expected == ExpectMode::H265Domestic && actual != VideoSignal::H265Domestic) {
mismatch = true;
}
if (!mismatch) {
return;
@@ -1252,8 +1233,6 @@ std::uint32_t matching_count(const Stats &stats, ExpectMode mode) {
return stats.h264_video_messages;
case ExpectMode::H265Enhanced:
return stats.h265_enhanced_video_messages;
case ExpectMode::H265Domestic:
return stats.h265_domestic_video_messages;
default:
return 0;
}
@@ -1663,31 +1642,6 @@ send_client_video_mode_packets(int fd, std::uint32_t chunk_size, std::uint32_t s
};
break;
}
case ExpectMode::H265Domestic: {
config_payload = {
0x1c,
0x00,
0x00,
0x00,
0x00,
0x01,
0x01,
0x60,
};
frame_payload = {
0x2c,
0x01,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x01,
0x26,
};
break;
}
default:
return std::unexpected("unsupported self-test mode");
}
@@ -1778,10 +1732,9 @@ void print_summary(const Config &config, const Stats &stats) {
stats.total_video_messages,
stats.total_data_messages,
stats.set_chunk_size_messages);
spdlog::info("Video signaling counts: h264={}, h265-enhanced={}, h265-domestic={}, unknown={}",
spdlog::info("Video signaling counts: h264={}, h265-enhanced={}, unknown={}",
stats.h264_video_messages,
stats.h265_enhanced_video_messages,
stats.h265_domestic_video_messages,
stats.unknown_video_messages);
spdlog::info("Matching count for expected mode: {} (threshold={})",
matching_count(stats, config.expect_mode),
+214
View File
@@ -0,0 +1,214 @@
#include "cvmmap_streamer/config/runtime_config.hpp"
#include "cvmmap_streamer/encode/encoder_backend.hpp"
#include "cvmmap_streamer/ipc/contracts.hpp"
#include "cvmmap_streamer/protocol/rtp_publisher.hpp"
#include <CLI/CLI.hpp>
#include <chrono>
#include <cstdint>
#include <expected>
#include <string>
#include <thread>
#include <vector>
#include <spdlog/spdlog.h>
namespace {
enum class TesterExitCode : int {
Success = 0,
InvalidArgument = 2,
BackendSelectionError = 3,
BackendInitError = 4,
PublisherInitError = 5,
PushError = 6,
DrainError = 7,
FlushError = 8,
};
[[nodiscard]]
constexpr int exit_code(TesterExitCode code) {
return static_cast<int>(code);
}
struct Config {
std::string host{"127.0.0.1"};
std::uint16_t port{5004};
std::uint32_t payload_type{96};
std::string codec{"h264"};
std::string encoder_device{"software"};
std::string sdp_path{};
std::uint32_t frames{48};
std::uint32_t width{320};
std::uint32_t height{240};
std::uint32_t frame_interval_ms{33};
};
[[nodiscard]]
std::expected<Config, int> parse_args(int argc, char **argv) {
Config config{};
CLI::App app{"rtp_output_tester - publish synthetic encoded video to RTP using the FFmpeg encoder path"};
app.add_option("--host", config.host, "RTP destination host")->required();
app.add_option("--port", config.port, "RTP destination port")->required()->check(CLI::Range(1, 65535));
app.add_option("--payload-type", config.payload_type, "RTP payload type (96-127)")->check(CLI::Range(96, 127));
app.add_option("--codec", config.codec, "Video codec (h264|h265)")
->check(CLI::IsMember({"h264", "h265"}));
app.add_option("--encoder-device", config.encoder_device, "Encoder device (auto|nvidia|software)")
->check(CLI::IsMember({"auto", "nvidia", "software"}));
app.add_option("--sdp-path", config.sdp_path, "Optional SDP output path");
app.add_option("--frames", config.frames, "Number of frames to publish")->check(CLI::PositiveNumber);
app.add_option("--width", config.width, "Frame width")->check(CLI::PositiveNumber);
app.add_option("--height", config.height, "Frame height")->check(CLI::PositiveNumber);
app.add_option("--frame-interval-ms", config.frame_interval_ms, "Frame interval in milliseconds")->check(CLI::PositiveNumber);
try {
app.parse(argc, argv);
} catch (const CLI::ParseError &e) {
return std::unexpected(app.exit(e));
}
return config;
}
[[nodiscard]]
std::expected<cvmmap_streamer::CodecType, std::string> parse_codec(std::string_view raw) {
if (raw == "h264") {
return cvmmap_streamer::CodecType::H264;
}
if (raw == "h265") {
return cvmmap_streamer::CodecType::H265;
}
return std::unexpected("unsupported codec");
}
[[nodiscard]]
std::expected<cvmmap_streamer::EncoderDeviceType, std::string> parse_encoder_device(std::string_view raw) {
if (raw == "auto") {
return cvmmap_streamer::EncoderDeviceType::Auto;
}
if (raw == "nvidia") {
return cvmmap_streamer::EncoderDeviceType::Nvidia;
}
if (raw == "software") {
return cvmmap_streamer::EncoderDeviceType::Software;
}
return std::unexpected("unsupported encoder device");
}
void fill_pattern(std::vector<std::uint8_t> &buffer, std::uint32_t width, std::uint32_t height, std::uint32_t frame_index) {
for (std::uint32_t y = 0; y < height; ++y) {
for (std::uint32_t x = 0; x < width; ++x) {
const std::size_t pixel = static_cast<std::size_t>(y) * width * 3 + static_cast<std::size_t>(x) * 3;
buffer[pixel + 0] = static_cast<std::uint8_t>((x + frame_index * 3) & 0xffu);
buffer[pixel + 1] = static_cast<std::uint8_t>((y * 2 + frame_index * 5) & 0xffu);
buffer[pixel + 2] = static_cast<std::uint8_t>(((x + y) / 2 + frame_index * 7) & 0xffu);
}
}
}
}
int main(int argc, char **argv) {
auto args = parse_args(argc, argv);
if (!args) {
return args.error();
}
auto codec = parse_codec(args->codec);
if (!codec) {
spdlog::error("{}", codec.error());
return exit_code(TesterExitCode::InvalidArgument);
}
auto encoder_device = parse_encoder_device(args->encoder_device);
if (!encoder_device) {
spdlog::error("{}", encoder_device.error());
return exit_code(TesterExitCode::InvalidArgument);
}
cvmmap_streamer::RuntimeConfig config = cvmmap_streamer::RuntimeConfig::defaults();
config.encoder.backend = cvmmap_streamer::EncoderBackendType::FFmpeg;
config.encoder.device = *encoder_device;
config.encoder.codec = *codec;
config.encoder.gop = 15;
config.encoder.b_frames = 0;
config.outputs.rtp.enabled = true;
config.outputs.rtp.endpoint = args->host + ":" + std::to_string(args->port);
config.outputs.rtp.host = args->host;
config.outputs.rtp.port = args->port;
config.outputs.rtp.payload_type = static_cast<std::uint8_t>(args->payload_type);
if (!args->sdp_path.empty()) {
config.outputs.rtp.sdp_path = args->sdp_path;
}
cvmmap_streamer::ipc::FrameInfo frame_info{
.width = static_cast<std::uint16_t>(args->width),
.height = static_cast<std::uint16_t>(args->height),
.channels = 3,
.depth = cvmmap_streamer::ipc::Depth::U8,
.pixel_format = cvmmap_streamer::ipc::PixelFormat::BGR,
.buffer_size = args->width * args->height * 3,
};
auto backend = cvmmap_streamer::encode::make_encoder_backend(config);
if (!backend) {
spdlog::error("failed to select encoder backend: {}", cvmmap_streamer::format_error(backend.error()));
return exit_code(TesterExitCode::BackendSelectionError);
}
auto init = (*backend)->init(config, frame_info);
if (!init) {
spdlog::error("failed to initialize encoder backend: {}", cvmmap_streamer::format_error(init.error()));
return exit_code(TesterExitCode::BackendInitError);
}
auto publisher = cvmmap_streamer::protocol::UdpRtpPublisher::create(config);
if (!publisher) {
spdlog::error("failed to initialize RTP publisher: {}", publisher.error());
return exit_code(TesterExitCode::PublisherInitError);
}
std::vector<std::uint8_t> frame_bytes(frame_info.buffer_size, 0);
const auto frame_interval = std::chrono::milliseconds(args->frame_interval_ms);
std::uint64_t timestamp_ns{0};
for (std::uint32_t frame_index = 0; frame_index < args->frames; ++frame_index) {
fill_pattern(frame_bytes, args->width, args->height, frame_index);
auto push = (*backend)->push_frame(cvmmap_streamer::encode::RawVideoFrame{
.info = frame_info,
.source_timestamp_ns = timestamp_ns,
.bytes = std::span<const std::uint8_t>(frame_bytes.data(), frame_bytes.size()),
});
if (!push) {
spdlog::error("encoder push failed at frame {}: {}", frame_index, cvmmap_streamer::format_error(push.error()));
return exit_code(TesterExitCode::PushError);
}
auto drained = (*backend)->drain();
if (!drained) {
spdlog::error("encoder drain failed at frame {}: {}", frame_index, cvmmap_streamer::format_error(drained.error()));
return exit_code(TesterExitCode::DrainError);
}
for (const auto &access_unit : *drained) {
publisher->publish_access_unit(access_unit.annexb_bytes, access_unit.stream_pts_ns);
}
std::this_thread::sleep_for(frame_interval);
timestamp_ns += static_cast<std::uint64_t>(args->frame_interval_ms) * 1'000'000ull;
}
auto flushed = (*backend)->flush();
if (!flushed) {
spdlog::error("encoder flush failed: {}", cvmmap_streamer::format_error(flushed.error()));
return exit_code(TesterExitCode::FlushError);
}
for (const auto &access_unit : *flushed) {
publisher->publish_access_unit(access_unit.annexb_bytes, access_unit.stream_pts_ns);
}
publisher->log_metrics();
(*backend)->shutdown();
return exit_code(TesterExitCode::Success);
}