10000 [camera] Use common configs to create a GStreamer pipeline by yufengwangca · Pull Request #39009 · project-chip/connectedhomeip · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

[camera] Use common configs to create a GStreamer pipeline #39009

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions examples/camera-app/linux/include/camera-device.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@ static constexpr uint32_t kMaxEncodedPixelRate = 27648000; // 720p at 30fp
static constexpr uint8_t kMicrophoneMinLevel = 1;
static constexpr uint8_t kMicrophoneMaxLevel = 254; // Spec constraint
static constexpr uint8_t kMicrophoneMaxChannelCount = 8; // Spec Constraint in AudioStreamAllocate
static constexpr uint16_t kMinResolutionWidth = 256; // Low SD resolution
static constexpr uint16_t kMinResolutionHeight = 144; // Low SD resolution
static constexpr uint16_t kMinResolutionWidth = 640; // Low SD resolution
static constexpr uint16_t kMinResolutionHeight = 360; // Low SD resolution
static constexpr uint16_t kMaxResolutionWidth = 1920; // 1080p resolution
static constexpr uint16_t kMaxResolutionHeight = 1080; // 1080p resolution
static constexpr uint16_t kSnapshotStreamFrameRate = 30;
static constexpr uint16_t kMaxVideoFrameRate = 120;
static constexpr uint16_t kMinVideoFrameRate = 15;
static constexpr uint16_t kMinVideoFrameRate = 30;
static constexpr uint32_t kMinBitRateBps = 10000; // 10 kbps
static constexpr uint32_t kMaxBitRateBps = 2000000; // 2 mbps
static constexpr uint32_t kMinFragLenMsec = 1000; // 1 sec
Expand Down
211 changes: 92 additions & 119 deletions examples/camera-app/linux/src/camera-device.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,93 +127,78 @@ GstElement * CameraDevice::CreateSnapshotPipeline(const std::string & device, in
if (!pipeline || !source || !jpeg_caps || !videorate || !videorate_caps || !queue || !filesink)
{
ChipLogError(Camera, "Not all elements could be created.");
gst_object_unref(pipeline);

if (pipeline)
gst_object_unref(pipeline);
if (source)
gst_object_unref(source);
if (jpeg_caps)
gst_object_unref(jpeg_caps);
if (videorate)
gst_object_unref(videorate);
if (videorate_caps)
gst_object_unref(videorate_caps);
if (queue)
gst_object_unref(queue);
if (filesink)
gst_object_unref(filesink);

error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

// Set the source device and caps
g_object_set(source, "device", device.c_str(), "do-timestamp", TRUE, nullptr);

GstCaps * caps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate",
GST_TYPE_FRACTION, framerate, 1, "quality", G_TYPE_INT, quality, nullptr);
g_object_set(jpeg_caps, "caps", caps, nullptr);
gst_caps_unref(caps);

// Set the output file location
g_object_set(filesink, "location", filename.c_str(), nullptr);

// Add elements to the pipeline
gst_bin_add_many(GST_BIN(pipeline), source, jpeg_caps, videorate, videorate_caps, queue, filesink, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, jpeg_caps, videorate, videorate_caps, queue, filesink, nullptr);

// Link the elements
if (gst_element_link_many(source, jpeg_caps, videorate, videorate_caps, queue, filesink, NULL) != TRUE)
if (gst_element_link_many(source, jpeg_caps, videorate, videorate_caps, queue, filesink, nullptr) != TRUE)
{
ChipLogError(Camera, "Elements could not be linked.");

// The pipeline will unref all added elements automatically when you unref the pipeline.
gst_object_unref(pipeline);
error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

// Set the source device and caps
g_object_set(source, "device", device.c_str(), "do-timestamp", TRUE, NULL);

GstCaps * caps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate",
GST_TYPE_FRACTION, framerate, 1, "quality", G_TYPE_INT, quality, NULL);
g_object_set(jpeg_caps, "caps", caps, NULL);
gst_caps_unref(caps);

// Set the output file location
g_object_set(filesink, "location", filename.c_str(), NULL);

return pipeline;
}

// Helper function to create a GStreamer pipeline
// Helper function to create a GStreamer pipeline that ingests MJPEG frames coming
// from the camera, converted to H.264, and sent out on UDP port over RTP/UDP.
GstElement * CameraDevice::CreateVideoPipeline(const std::string & device, int width, int height, int framerate,
CameraError & error)
{
GstElement * pipeline = nullptr;
GstElement * pipeline = gst_pipeline_new("video-pipeline");
GstElement * capsfilter = gst_element_factory_make("capsfilter", "mjpeg_caps");
GstElement * jpegdec = gst_element_factory_make("jpegdec", "jpegdec");
GstElement * videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
GstElement * x264enc = gst_element_factory_make("x264enc", "encoder");
GstElement * rtph264pay = gst_element_factory_make("rtph264pay", "rtph264");
GstElement * udpsink = gst_element_factory_make("udpsink", "udpsink");
GstElement * source = nullptr;
GstElement * capsfilter = nullptr;
GstElement * videoconvert = nullptr;
GstElement * x264enc = nullptr;
GstElement * rtph264pay = nullptr;
GstElement * udpsink = nullptr;

// Create the pipeline elements
pipeline = gst_pipeline_new("video-pipeline");

// Create elements
#ifdef AV_STREAM_GST_USE_TEST_SRC
source = gst_element_factory_make("videotestsrc", "source");
#else
source = gst_element_factory_make("v4l2src", "source");
#endif
capsfilter = gst_element_factory_make("capsfilter", "filter");
videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
x264enc = gst_element_factory_make("x264enc", "encoder");
rtph264pay = gst_element_factory_make("rtph264pay", "rtph264");
udpsink = gst_element_factory_make("udpsink", "udpsink");

if (pipeline == nullptr || source == nullptr || capsfilter == nullptr || videoconvert == nullptr || x264enc == nullptr ||
rtph264pay == nullptr || udpsink == nullptr)
if (!pipeline || !source || !capsfilter || !jpegdec || !videoconvert || !x264enc || !rtph264pay || !udpsink)
{
ChipLogError(Camera, "Not all elements could be created.");
if (pipeline)
gst_object_unref(pipeline);
if (source)
gst_object_unref(source);
if (capsfilter)
gst_object_unref(capsfilter);
if (videoconvert)
gst_object_unref(videoconvert);
if (x264enc)
gst_object_unref(x264enc);
if (rtph264pay)
gst_object_unref(rtph264pay);
if (udpsink)
gst_object_unref(udpsink);
error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

// Add elements to the pipeline
gst_bin_add_many(GST_BIN(pipeline), source, capsfilter, videoconvert, x264enc, rtph264pay, udpsink, NULL);

// Link the elements
if (!gst_element_link_many(source, capsfilter, videoconvert, x264enc, rtph264pay, udpsink, NULL))
{
ChipLogError(Camera, "Elements could not be linked.");
if (pipeline)
gst_object_unref(pipeline);
if (source)
Expand All @@ -222,68 +207,76 @@ GstElement * CameraDevice::CreateVideoPipeline(const std::string & device, int w
gst_object_unref(capsfilter);
if (videoconvert)
gst_object_unref(videoconvert);
if (jpegdec)
gst_object_unref(jpegdec);
if (x264enc)
gst_object_unref(x264enc);
if (rtph264pay)
gst_object_unref(rtph264pay);
if (udpsink)
gst_object_unref(udpsink);

error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

// Create GstCaps for the video source
GstCaps * caps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "format",
G_TYPE_STRING, "NV12", // Adjust format as needed
"framerate", GST_TYPE_FRACTION, framerate, 1, NULL);
// Caps asking the camera for MJPEG at the requested resolution / rate
GstCaps * caps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate",
GST_TYPE_FRACTION, framerate, 1, nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);

// Set video test src pattern
// Configure source device
#ifdef AV_STREAM_GST_USE_TEST_SRC
g_object_set(source, "pattern", kBallAnimationPattern, NULL);
g_object_set(source, "pattern", kBallAnimationPattern, nullptr);
#else
g_object_set(source, "device", device.c_str(), nullptr);
#endif

// Set the caps on the capsfilter element
g_object_set(capsfilter, "caps", caps, NULL);
// Configure encoder / sink for low‑latency RTP
gst_util_set_object_arg(G_OBJECT(x264enc), "tune", "zerolatency");
g_object_set(udpsink, "host", STREAM_GST_DEST_IP, "port", VIDEO_STREAM_GST_DEST_PORT, "sync", FALSE, "async", FALSE, nullptr);

// Set udpsink properties
g_object_set(udpsink, "host", STREAM_GST_DEST_IP, "port", VIDEO_STREAM_GST_DEST_PORT, NULL);
// Build pipeline: v4l2src → capsfilter → jpegdec → videoconvert → x264enc → rtph264pay → udpsink
gst_bin_add_many(GST_BIN(pipeline), source, capsfilter, jpegdec, videoconvert, x264enc, rtph264pay, udpsink, nullptr);

// Unref the caps to free memory
gst_caps_unref(caps);
// Link the elements
if (!gst_element_link_many(source, capsfilter, jpegdec, videoconvert, x264enc, rtph264pay, udpsink, nullptr))
{
ChipLogError(Camera, "CreateVideoPipeline: link failed");

// The bin (pipeline) will unref all added elements automatically when you unref the bin.
gst_object_unref(pipeline);
error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

return pipeline;
}

// Helper function to create a GStreamer pipeline
GstElement * CameraDevice::CreateAudioPipeline(const std::string & device, int channels, int sampleRate, CameraError & error)
{
GstElement * pipeline = nullptr;
GstElement * source = nullptr;
GstElement * capsfilter = nullptr;
GstElement * audioconvert = nullptr;
GstElement * opusenc = nullptr;
GstElement * rtpopuspay = nullptr;
GstElement * udpsink = nullptr;
GstElement * pipeline = gst_pipeline_new("audio-pipeline");

// Create the pipeline elements
pipeline = gst_pipeline_new("audio-pipeline");
GstElement * capsfilter = gst_element_factory_make("capsfilter", "filter");
GstElement * audioconvert = gst_element_factory_make("audioconvert", "audio-convert");
GstElement * opusenc = gst_element_factory_make("opusenc", "opus-encoder");
GstElement * rtpopuspay = gst_element_factory_make("rtpopuspay", "rtpopuspay");
GstElement * udpsink = gst_element_factory_make("udpsink", "udpsink");

GstElement * source = nullptr;
// Create elements
#ifdef AV_STREAM_GST_USE_TEST_SRC
source = gst_element_factory_make("audiotestsrc", "source");
#else
source = gst_element_factory_make("pulsesrc", "source");
#endif
capsfilter = gst_element_factory_make("capsfilter", "filter");
audioconvert = gst_element_factory_make("audioconvert", "audio-convert");
opusenc = gst_element_factory_make("opusenc", "opus-encoder");
rtpopuspay = gst_element_factory_make("rtpopuspay", "rtpopuspay");
udpsink = gst_element_factory_make("udpsink", "udpsink");

if (pipeline == nullptr || source == nullptr || capsfilter == nullptr || audioconvert == nullptr || opusenc == nullptr ||
rtpopuspay == nullptr || udpsink == nullptr)
if (!pipeline || !source || !capsfilter || !audioconvert || !opusenc || !rtpopuspay || !udpsink)
{
ChipLogError(Camera, "Not all elements could be created.");

if (pipeline)
gst_object_unref(pipeline);
if (source)
Expand All @@ -298,43 +291,29 @@ GstElement * CameraDevice::CreateAudioPipeline(const std::string & device, int c
gst_object_unref(rtpopuspay);
if (udpsink)
gst_object_unref(udpsink);

error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}

// Create GstCaps for the audio source
GstCaps * caps = gst_caps_new_simple("audio/x-raw", "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, sampleRate, NULL);

// Set the caps on the capsfilter element
g_object_set(capsfilter, "caps", caps, NULL);
GstCaps * caps = gst_caps_new_simple("audio/x-raw", "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, sampleRate, nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);

// Set udpsink properties
g_object_set(udpsink, "host", STREAM_GST_DEST_IP, "port", AUDIO_STREAM_GST_DEST_PORT, NULL);

// Unref the caps to free memory
gst_caps_unref(caps);
g_object_set(udpsink, "host", STREAM_GST_DEST_IP, "port", AUDIO_STREAM_GST_DEST_PORT, nullptr);

// Add elements to the pipeline
gst_bin_add_many(GST_BIN(pipeline), source, capsfilter, audioconvert, opusenc, rtpopuspay, udpsink, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, capsfilter, audioconvert, opusenc, rtpopuspay, udpsink, nullptr);

// Link elements
if (!gst_element_link_many(source, capsfilter, audioconvert, opusenc, rtpopuspay, udpsink, NULL))
if (!gst_element_link_many(source, capsfilter, audioconvert, opusenc, rtpopuspay, udpsink, nullptr))
{
ChipLogError(Camera, "Elements could not be linked.");
if (pipeline)
gst_object_unref(pipeline);
if (source)
gst_object_unref(source);
if (capsfilter)
gst_object_unref(capsfilter);
if (audioconvert)
gst_object_unref(audioconvert);
if (opusenc)
gst_object_unref(opusenc);
if (rtpopuspay)
gst_object_unref(rtpopuspay);
if (udpsink)
gst_object_unref(udpsink);

// The pipeline will unref all added elements automatically when you unref the pipeline.
gst_object_unref(pipeline);
error = CameraError::ERROR_INIT_FAILED;
return nullptr;
}
Expand Down Expand Up @@ -428,18 +407,12 @@ CameraError CameraDevice::StartVideoStream(uint16_t streamID)
return CameraError::ERROR_VIDEO_STREAM_START_FAILED;
}

// Start the pipeline
GstStateChangeReturn result = gst_element_set_state(videoPipeline, GST_STATE_PLAYING);
if (result == GST_STATE_CHANGE_FAILURE)
{
ChipLogError(Camera, "Failed to start video pipeline.");
gst_object_unref(videoPipeline);
it->videoContext = nullptr;
return CameraError::ERROR_VIDEO_STREAM_START_FAILED;
}
ChipLogProgress(Camera, "Starting video stream (id=%u): %u×%u @ %ufps", streamID, it->videoStreamParams.minResolution.width,
it->videoStreamParams.minResolution.height, it->videoStreamParams.minFrameRate);

// Start the pipeline
result = gst_element_set_state(videoPipeline, GST_STATE_PLAYING);
ChipLogProgress(Camera, "Requesting PLAYING …");
GstStateChangeReturn result = gst_element_set_state(videoPipeline, GST_STATE_PLAYING);
if (result == GST_STATE_CHANGE_FAILURE)
{
ChipLogError(Camera, "Failed to start video pipeline.");
Expand All @@ -450,7 +423,7 @@ CameraError CameraDevice::StartVideoStream(uint16_t streamID)

// Wait for the pipeline to reach the PLAYING state
GstState state;
gst_element_get_state(videoPipeline, &state, nullptr, GST_CLOCK_TIME_NONE);
gst_element_get_state(videoPipeline, &state, nullptr, 5 * GST_SECOND);
if (state != GST_STATE_PLAYING)
{
ChipLogError(Camera, "Video pipeline did not reach PLAYING state.");
Expand Down
Loading
0