Add -snapshot.height, -stream.height and -video.height

This commit is contained in:
Kamil Trzciński
2022-10-29 14:30:43 +02:00
committed by Kamil Trzcinski
parent c18561c9f8
commit ca08adb62f
29 changed files with 553 additions and 514 deletions

View File

@ -10,8 +10,6 @@
#include "device/device.h"
#include "util/ffmpeg/remuxer.h"
buffer_lock_t *http_h264_buffer_for_res(http_worker_t *worker);
static const char *const VIDEO_HEADER =
"HTTP/1.0 200 OK\r\n"
"Access-Control-Allow-Origin: *\r\n"
@ -135,7 +133,7 @@ static void http_ffmpeg_video(http_worker_t *worker, FILE *stream, const char *c
#endif
int n = buffer_lock_write_loop(
http_h264_buffer_for_res(worker),
&video_lock,
0,
0,
(buffer_write_fn)http_ffmpeg_video_buf_part,

View File

@ -16,14 +16,6 @@ static const char *const VIDEO_HEADER =
"Content-Type: application/octet-stream\r\n"
"\r\n";
buffer_lock_t *http_h264_buffer_for_res(http_worker_t *worker)
{
if (strstr(worker->request_params, HTTP_LOW_RES_PARAM) && http_h264_lowres.buf_list)
return &http_h264_lowres;
else
return &http_h264;
}
typedef struct {
FILE *stream;
bool wrote_header;
@ -60,7 +52,7 @@ void http_h264_video(http_worker_t *worker, FILE *stream)
{
http_video_status_t status = { stream };
int n = buffer_lock_write_loop(http_h264_buffer_for_res(worker), 0, 0, (buffer_write_fn)http_video_buf_part, &status);
int n = buffer_lock_write_loop(&video_lock, 0, 0, (buffer_write_fn)http_video_buf_part, &status);
if (status.wrote_header) {
return;

View File

@ -20,14 +20,6 @@ static const char *const STREAM_PART = "Content-Type: " CONTENT_TYPE "\r\n" CONT
static const char *const STREAM_BOUNDARY = "\r\n"
"--" PART_BOUNDARY "\r\n";
buffer_lock_t *http_jpeg_buffer_for_res(http_worker_t *worker)
{
if (strstr(worker->request_params, HTTP_LOW_RES_PARAM) && http_jpeg_lowres.buf_list)
return &http_jpeg_lowres;
else
return &http_jpeg;
}
int http_snapshot_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FILE *stream)
{
fprintf(stream, "HTTP/1.1 200 OK\r\n");
@ -40,7 +32,7 @@ int http_snapshot_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FI
void http_snapshot(http_worker_t *worker, FILE *stream)
{
int n = buffer_lock_write_loop(http_jpeg_buffer_for_res(worker), 1, 0, (buffer_write_fn)http_snapshot_buf_part, stream);
int n = buffer_lock_write_loop(&snapshot_lock, 1, 0, (buffer_write_fn)http_snapshot_buf_part, stream);
if (n <= 0) {
http_500(stream, NULL);
@ -68,7 +60,7 @@ int http_stream_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FILE
void http_stream(http_worker_t *worker, FILE *stream)
{
int n = buffer_lock_write_loop(http_jpeg_buffer_for_res(worker), 0, 0, (buffer_write_fn)http_stream_buf_part, stream);
int n = buffer_lock_write_loop(&stream_lock, 0, 0, (buffer_write_fn)http_stream_buf_part, stream);
if (n == 0) {
http_500(stream, NULL);

View File

@ -1,7 +1,5 @@
#include "device/buffer_lock.h"
DEFINE_BUFFER_LOCK(http_h264, 0);
DEFINE_BUFFER_LOCK(http_h264_lowres, 0);
DEFINE_BUFFER_LOCK(http_jpeg, 1000);
DEFINE_BUFFER_LOCK(http_jpeg_lowres, 1000);
DEFINE_BUFFER_LOCK(snapshot_lock, 1000);
DEFINE_BUFFER_LOCK(stream_lock, 0);
DEFINE_BUFFER_LOCK(video_lock, 0);

View File

@ -5,11 +5,9 @@
struct http_worker_s;
struct buffer_s;
extern struct buffer_lock_s http_h264;
extern struct buffer_lock_s http_h264_lowres;
extern struct buffer_lock_s http_jpeg;
extern struct buffer_lock_s http_jpeg_lowres;
extern struct buffer_lock_s snapshot_lock;
extern struct buffer_lock_s stream_lock;
extern struct buffer_lock_s video_lock;
// M-JPEG
void http_snapshot(struct http_worker_s *worker, FILE *stream);

View File

@ -26,13 +26,12 @@ static pthread_mutex_t rtsp_lock = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
static class DynamicH264Stream *rtsp_streams;
static const char *stream_name = "stream.h264";
static const char *stream_low_res_name = "stream_low_res.h264";
class DynamicH264Stream : public FramedSource
{
public:
DynamicH264Stream(UsageEnvironment& env, Boolean lowResMode)
: FramedSource(env), fHaveStartedReading(False), fLowResMode(lowResMode)
DynamicH264Stream(UsageEnvironment& env)
: FramedSource(env), fHaveStartedReading(False)
{
}
@ -65,16 +64,12 @@ public:
pthread_mutex_unlock(&rtsp_lock);
}
void receiveData(buffer_t *buf, bool lowResMode)
void receiveData(buffer_t *buf)
{
if (!isCurrentlyAwaitingData()) {
return; // we're not ready for the data yet
}
if (fLowResMode != lowResMode) {
return;
}
if (buf->flags.is_keyframe) {
fHadKeyFrame = true;
}
@ -109,7 +104,6 @@ public:
Boolean fHaveStartedReading;
Boolean fHadKeyFrame;
Boolean fRequestedKeyFrame;
Boolean fLowResMode;
DynamicH264Stream *pNextStream;
};
@ -117,23 +111,21 @@ public:
class DynamicH264VideoFileServerMediaSubsession : public OnDemandServerMediaSubsession
{
public:
DynamicH264VideoFileServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource, Boolean lowResMode)
: OnDemandServerMediaSubsession(env, reuseFirstSource), fLowResMode(lowResMode)
DynamicH264VideoFileServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource)
: OnDemandServerMediaSubsession(env, reuseFirstSource)
{
}
virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{
estBitrate = 500; // kbps, estimate
return H264VideoStreamFramer::createNew(envir(), new DynamicH264Stream(envir(), fLowResMode));
return H264VideoStreamFramer::createNew(envir(), new DynamicH264Stream(envir()));
}
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/)
{
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
Boolean fLowResMode;
};
class DynamicRTSPServer: public RTSPServerSupportingHTTPStreaming
@ -164,13 +156,8 @@ protected:
protected: // redefined virtual functions
virtual ServerMediaSession* lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession)
{
bool lowResMode = false;
if (strcmp(streamName, stream_name) == 0) {
LOG_INFO(NULL, "Requesting %s stream...", streamName);
} else if (strcmp(streamName, stream_low_res_name) == 0) {
LOG_INFO(NULL, "Requesting %s stream (low resolution mode)...", streamName);
lowResMode = true;
} else {
LOG_INFO(NULL, "No stream available: '%s'", streamName);
return NULL;
@ -188,7 +175,7 @@ protected: // redefined virtual functions
sms = ServerMediaSession::createNew(envir(), streamName, streamName, "streamed by the LIVE555 Media Server");;
OutPacketBuffer::maxSize = 2000000; // allow for some possibly large H.264 frames
auto subsession = new DynamicH264VideoFileServerMediaSubsession(envir(), false, lowResMode);
auto subsession = new DynamicH264VideoFileServerMediaSubsession(envir(), false);
sms->addSubsession(subsession);
addServerMediaSession(sms);
return sms;
@ -208,8 +195,7 @@ static bool rtsp_h264_needs_buffer(buffer_lock_t *buf_lock)
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
if (!stream->fLowResMode)
needsBuffer = true;
needsBuffer = true;
}
pthread_mutex_unlock(&rtsp_lock);
return needsBuffer;
@ -219,33 +205,7 @@ static void rtsp_h264_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
stream->receiveData(buf, false);
if (!http_h264_lowres.buf_list) {
stream->receiveData(buf, true);
}
}
pthread_mutex_unlock(&rtsp_lock);
}
static bool rtsp_h264_low_res_needs_buffer(buffer_lock_t *buf_lock)
{
bool needsBuffer = false;
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
if (stream->fLowResMode)
needsBuffer = true;
}
pthread_mutex_unlock(&rtsp_lock);
return needsBuffer;
}
static void rtsp_h264_low_res_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
stream->receiveData(buf, true);
stream->receiveData(buf);
}
pthread_mutex_unlock(&rtsp_lock);
}
@ -280,10 +240,8 @@ extern "C" int rtsp_server(rtsp_options_t *options)
// LOG_INFO(NULL, "The RTSP-over-HTTP is not available.");
// }
buffer_lock_register_check_streaming(&http_h264, rtsp_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264, rtsp_h264_capture);
buffer_lock_register_check_streaming(&http_h264_lowres, rtsp_h264_low_res_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264_lowres, rtsp_h264_low_res_capture);
buffer_lock_register_check_streaming(&video_lock, rtsp_h264_needs_buffer);
buffer_lock_register_notify_buffer(&video_lock, rtsp_h264_capture);
pthread_create(&rtsp_thread, NULL, rtsp_server_thread, env);
return 0;
@ -299,4 +257,4 @@ extern "C" int rtsp_server(rtsp_options_t *options)
return 0;
}
#endif // USE_RTSP
#endif // USE_RTSP

View File

@ -76,7 +76,7 @@ class Client
{
public:
Client(std::shared_ptr<rtc::PeerConnection> pc_)
: pc(pc_), use_low_res(false)
: pc(pc_)
{
id.resize(20);
for (auto & c : id) {
@ -91,18 +91,16 @@ public:
free(name);
}
bool wantsFrame(bool low_res) const
bool wantsFrame() const
{
if (!pc || !video)
return false;
if (pc->state() != rtc::PeerConnection::State::Connected)
return false;
if (use_low_res != low_res)
return false;
return video->wantsFrame();
}
void pushFrame(buffer_t *buf, bool low_res)
void pushFrame(buffer_t *buf)
{
auto self = this;
@ -110,10 +108,6 @@ public:
return;
}
if (use_low_res != low_res) {
return;
}
if (!had_key_frame) {
if (!buf->flags.is_keyframe) {
device_video_force_key(buf->buf_list->dev);
@ -136,7 +130,6 @@ public:
std::mutex lock;
std::condition_variable wait_for_complete;
bool had_key_frame;
bool use_low_res;
};
std::shared_ptr<Client> findClient(std::string id)
@ -232,9 +225,7 @@ static bool webrtc_h264_needs_buffer(buffer_lock_t *buf_lock)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(false))
return true;
if (!http_h264_lowres.buf_list && client->wantsFrame(true))
if (client->wantsFrame())
return true;
}
@ -245,31 +236,8 @@ static void webrtc_h264_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(false))
client->pushFrame(buf, false);
if (!http_h264_lowres.buf_list && client->wantsFrame(true))
client->pushFrame(buf, true);
}
}
static bool webrtc_h264_low_res_needs_buffer(buffer_lock_t *buf_lock)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(true))
return true;
}
return false;
}
static void webrtc_h264_low_res_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(true)) {
client->pushFrame(buf, true);
}
if (client->wantsFrame())
client->pushFrame(buf);
}
}
@ -279,9 +247,6 @@ static void http_webrtc_request(http_worker_t *worker, FILE *stream, const nlohm
LOG_INFO(client.get(), "Stream requested.");
client->video = addVideo(client->pc, webrtc_client_video_payload_type, rand(), "video", "");
if (message.contains("res")) {
client->use_low_res = (message["res"] == "low");
}
try {
{
@ -418,10 +383,8 @@ extern "C" void http_webrtc_offer(http_worker_t *worker, FILE *stream)
extern "C" void webrtc_server()
{
buffer_lock_register_check_streaming(&http_h264, webrtc_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264, webrtc_h264_capture);
buffer_lock_register_check_streaming(&http_h264_lowres, webrtc_h264_low_res_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264_lowres, webrtc_h264_low_res_capture);
buffer_lock_register_check_streaming(&video_lock, webrtc_h264_needs_buffer);
buffer_lock_register_notify_buffer(&video_lock, webrtc_h264_capture);
}
#else // USE_LIBDATACHANNEL